summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--app/controllers/concerns/uploads_actions.rb61
-rw-r--r--app/controllers/groups/uploads_controller.rb30
-rw-r--r--app/controllers/projects/lfs_storage_controller.rb2
-rw-r--r--app/controllers/projects/uploads_controller.rb21
-rw-r--r--app/controllers/uploads_controller.rb75
-rw-r--r--app/models/appearance.rb1
-rw-r--r--app/models/ci/build.rb2
-rw-r--r--app/models/concerns/avatarable.rb24
-rw-r--r--app/models/group.rb20
-rw-r--r--app/models/lfs_object.rb2
-rw-r--r--app/models/note.rb1
-rw-r--r--app/models/project.rb14
-rw-r--r--app/models/upload.rb50
-rw-r--r--app/models/user.rb16
-rw-r--r--app/services/projects/hashed_storage/migrate_attachments_service.rb4
-rw-r--r--app/uploaders/attachment_uploader.rb10
-rw-r--r--app/uploaders/avatar_uploader.rb19
-rw-r--r--app/uploaders/file_mover.rb6
-rw-r--r--app/uploaders/file_uploader.rb122
-rw-r--r--app/uploaders/gitlab_uploader.rb79
-rw-r--r--app/uploaders/job_artifact_uploader.rb19
-rw-r--r--app/uploaders/legacy_artifact_uploader.rb15
-rw-r--r--app/uploaders/lfs_object_uploader.rb20
-rw-r--r--app/uploaders/namespace_file_uploader.rb25
-rw-r--r--app/uploaders/object_store_uploader.rb215
-rw-r--r--app/uploaders/personal_file_uploader.rb43
-rw-r--r--app/uploaders/records_uploads.rb80
-rw-r--r--app/uploaders/uploader_helper.rb9
-rw-r--r--app/uploaders/workhorse.rb7
-rw-r--r--app/workers/object_storage_upload_worker.rb16
-rw-r--r--app/workers/upload_checksum_worker.rb2
-rw-r--r--changelogs/unreleased-ee/4163-move-uploads-to-object-storage.yml5
-rw-r--r--config/gitlab.yml.example29
-rw-r--r--config/initializers/1_settings.rb32
-rw-r--r--db/migrate/20171214144320_add_store_column_to_uploads.rb12
-rw-r--r--db/migrate/20180119135717_add_uploader_index_to_uploads.rb20
-rw-r--r--db/schema.rb3
-rw-r--r--doc/development/file_storage.md104
-rw-r--r--ee/app/models/ee/ci/job_artifact.rb25
-rw-r--r--ee/app/models/ee/lfs_object.rb23
-rw-r--r--ee/app/models/geo/fdw/ci/job_artifact.rb11
-rw-r--r--ee/app/models/geo/fdw/lfs_object.rb9
-rw-r--r--ee/app/services/geo/files_expire_service.rb77
-rw-r--r--ee/app/services/geo/hashed_storage_attachments_migration_service.rb55
-rw-r--r--ee/app/services/geo/job_artifact_deleted_event_store.rb48
-rw-r--r--ee/app/services/geo/lfs_object_deleted_event_store.rb49
-rw-r--r--ee/app/uploaders/object_storage.rb265
-rw-r--r--ee/lib/gitlab/geo/file_transfer.rb24
-rw-r--r--ee/lib/gitlab/geo/log_cursor/daemon.rb266
-rw-r--r--lib/api/runner.rb6
-rw-r--r--lib/backup/artifacts.rb2
-rw-r--r--lib/gitlab/background_migration/populate_untracked_uploads.rb2
-rw-r--r--lib/gitlab/background_migration/prepare_untracked_uploads.rb9
-rw-r--r--lib/gitlab/gfm/uploads_rewriter.rb2
-rw-r--r--lib/gitlab/import_export/uploads_saver.rb8
-rw-r--r--lib/gitlab/uploads_transfer.rb2
-rw-r--r--lib/gitlab/workhorse.rb4
-rw-r--r--lib/tasks/gitlab/artifacts.rake4
-rw-r--r--lib/tasks/gitlab/lfs.rake2
-rw-r--r--spec/controllers/groups/uploads_controller_spec.rb4
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb6
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb4
-rw-r--r--spec/controllers/uploads_controller_spec.rb13
-rw-r--r--spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb270
-rw-r--r--spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb22
-rw-r--r--spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb414
-rw-r--r--spec/ee/spec/models/ee/lfs_object_spec.rb8
-rw-r--r--spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb50
-rw-r--r--spec/ee/spec/services/geo/file_download_service_spec.rb227
-rw-r--r--spec/ee/spec/services/geo/files_expire_service_spec.rb51
-rw-r--r--spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb83
-rw-r--r--spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb291
-rw-r--r--spec/ee/workers/object_storage_upload_worker_spec.rb4
-rw-r--r--spec/factories/ci/job_artifacts.rb2
-rw-r--r--spec/factories/geo/event_log.rb121
-rw-r--r--spec/factories/groups.rb2
-rw-r--r--spec/factories/notes.rb4
-rw-r--r--spec/factories/projects.rb2
-rw-r--r--spec/factories/uploads.rb27
-rw-r--r--spec/factories/users.rb2
-rw-r--r--spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb57
-rw-r--r--spec/lib/gitlab/gfm/uploads_rewriter_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/uploads_restorer_spec.rb9
-rw-r--r--spec/lib/gitlab/import_export/uploads_saver_spec.rb4
-rw-r--r--spec/migrations/remove_empty_fork_networks_spec.rb4
-rw-r--r--spec/models/namespace_spec.rb2
-rw-r--r--spec/models/upload_spec.rb73
-rw-r--r--spec/requests/api/runner_spec.rb8
-rw-r--r--spec/requests/lfs_http_spec.rb8
-rw-r--r--spec/services/issues/move_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb4
-rw-r--r--spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb62
-rw-r--r--spec/support/shared_examples/uploaders/object_storage_shared_examples.rb126
-rw-r--r--spec/support/stub_object_storage.rb7
-rw-r--r--spec/support/test_env.rb2
-rw-r--r--spec/support/track_untracked_uploads_helpers.rb2
-rw-r--r--spec/tasks/gitlab/artifacts_rake_spec.rb32
-rw-r--r--spec/tasks/gitlab/lfs_rake_spec.rb4
-rw-r--r--spec/uploaders/attachment_uploader_spec.rb41
-rw-r--r--spec/uploaders/avatar_uploader_spec.rb44
-rw-r--r--spec/uploaders/file_mover_spec.rb18
-rw-r--r--spec/uploaders/file_uploader_spec.rb128
-rw-r--r--spec/uploaders/job_artifact_uploader_spec.rb46
-rw-r--r--spec/uploaders/legacy_artifact_uploader_spec.rb52
-rw-r--r--spec/uploaders/lfs_object_uploader_spec.rb43
-rw-r--r--spec/uploaders/namespace_file_uploader_spec.rb36
-rw-r--r--spec/uploaders/object_storage_spec.rb350
-rw-r--r--spec/uploaders/object_store_uploader_spec.rb315
-rw-r--r--spec/uploaders/personal_file_uploader_spec.rb45
-rw-r--r--spec/uploaders/records_uploads_spec.rb73
-rw-r--r--spec/workers/upload_checksum_worker_spec.rb29
111 files changed, 3972 insertions, 1371 deletions
diff --git a/app/controllers/concerns/uploads_actions.rb b/app/controllers/concerns/uploads_actions.rb
index a6fb1f40001..61554029d09 100644
--- a/app/controllers/concerns/uploads_actions.rb
+++ b/app/controllers/concerns/uploads_actions.rb
@@ -1,6 +1,8 @@
module UploadsActions
include Gitlab::Utils::StrongMemoize
+ UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze
+
def create
link_to_file = UploadService.new(model, params[:file], uploader_class).execute
@@ -17,34 +19,71 @@ module UploadsActions
end
end
+ # This should either
+ # - send the file directly
+ # - or redirect to its URL
+ #
def show
return render_404 unless uploader.exists?
- disposition = uploader.image_or_video? ? 'inline' : 'attachment'
-
- expires_in 0.seconds, must_revalidate: true, private: true
+ if uploader.file_storage?
+ disposition = uploader.image_or_video? ? 'inline' : 'attachment'
+ expires_in 0.seconds, must_revalidate: true, private: true
- send_file uploader.file.path, disposition: disposition
+ send_file uploader.file.path, disposition: disposition
+ else
+ redirect_to uploader.url
+ end
end
private
+ def uploader_class
+ raise NotImplementedError
+ end
+
+ def upload_mount
+ mounted_as = params[:mounted_as]
+ mounted_as if UPLOAD_MOUNTS.include?(mounted_as)
+ end
+
+ def uploader_mounted?
+ upload_model_class < CarrierWave::Mount::Extension && !upload_mount.nil?
+ end
+
def uploader
strong_memoize(:uploader) do
- return if show_model.nil?
+ if uploader_mounted?
+ model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend
+ else
+ build_uploader_from_upload || build_uploader_from_params
+ end
+ end
+ end
- file_uploader = FileUploader.new(show_model, params[:secret])
- file_uploader.retrieve_from_store!(params[:filename])
+ def build_uploader_from_upload
+ return nil unless params[:secret] && params[:filename]
- file_uploader
- end
+ upload_path = uploader_class.upload_path(params[:secret], params[:filename])
+ upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_path)
+ upload&.build_uploader
+ end
+
+ def build_uploader_from_params
+ uploader = uploader_class.new(model, params[:secret])
+ uploader.retrieve_from_store!(params[:filename])
+ uploader
end
def image_or_video?
uploader && uploader.exists? && uploader.image_or_video?
end
- def uploader_class
- FileUploader
+ def find_model
+ nil
+ end
+
+ def model
+ strong_memoize(:model) { find_model }
end
end
diff --git a/app/controllers/groups/uploads_controller.rb b/app/controllers/groups/uploads_controller.rb
index e6bd9806401..f1578f75e88 100644
--- a/app/controllers/groups/uploads_controller.rb
+++ b/app/controllers/groups/uploads_controller.rb
@@ -7,29 +7,23 @@ class Groups::UploadsController < Groups::ApplicationController
private
- def show_model
- strong_memoize(:show_model) do
- group_id = params[:group_id]
-
- Group.find_by_full_path(group_id)
- end
+ def upload_model_class
+ Group
end
- def authorize_upload_file!
- render_404 unless can?(current_user, :upload_file, group)
+ def uploader_class
+ NamespaceFileUploader
end
- def uploader
- strong_memoize(:uploader) do
- file_uploader = uploader_class.new(show_model, params[:secret])
- file_uploader.retrieve_from_store!(params[:filename])
- file_uploader
- end
- end
+ def find_model
+ return @group if @group
- def uploader_class
- NamespaceFileUploader
+ group_id = params[:group_id]
+
+ Group.find_by_full_path(group_id)
end
- alias_method :model, :group
+ def authorize_upload_file!
+ render_404 unless can?(current_user, :upload_file, group)
+ end
end
diff --git a/app/controllers/projects/lfs_storage_controller.rb b/app/controllers/projects/lfs_storage_controller.rb
index 5b0f3d11d9e..88fc373945a 100644
--- a/app/controllers/projects/lfs_storage_controller.rb
+++ b/app/controllers/projects/lfs_storage_controller.rb
@@ -61,7 +61,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
def store_file(oid, size, tmp_file)
# Define tmp_file_path early because we use it in "ensure"
- tmp_file_path = File.join("#{Gitlab.config.lfs.storage_path}/tmp/upload", tmp_file)
+ tmp_file_path = File.join(LfsObjectUploader.workhorse_upload_path, tmp_file)
object = LfsObject.find_or_create_by(oid: oid, size: size)
file_exists = object.file.exists? || move_tmp_file_to_storage(object, tmp_file_path)
diff --git a/app/controllers/projects/uploads_controller.rb b/app/controllers/projects/uploads_controller.rb
index 4685bbe80b4..f5cf089ad98 100644
--- a/app/controllers/projects/uploads_controller.rb
+++ b/app/controllers/projects/uploads_controller.rb
@@ -1,6 +1,7 @@
class Projects::UploadsController < Projects::ApplicationController
include UploadsActions
+ # These will kick you out if you don't have access.
skip_before_action :project, :repository,
if: -> { action_name == 'show' && image_or_video? }
@@ -8,14 +9,20 @@ class Projects::UploadsController < Projects::ApplicationController
private
- def show_model
- strong_memoize(:show_model) do
- namespace = params[:namespace_id]
- id = params[:project_id]
+ def upload_model_class
+ Project
+ end
- Project.find_by_full_path("#{namespace}/#{id}")
- end
+ def uploader_class
+ FileUploader
end
- alias_method :model, :project
+ def find_model
+ return @project if @project
+
+ namespace = params[:namespace_id]
+ id = params[:project_id]
+
+ Project.find_by_full_path("#{namespace}/#{id}")
+ end
end
diff --git a/app/controllers/uploads_controller.rb b/app/controllers/uploads_controller.rb
index 16a74f82d3f..3d227b0a955 100644
--- a/app/controllers/uploads_controller.rb
+++ b/app/controllers/uploads_controller.rb
@@ -1,19 +1,34 @@
class UploadsController < ApplicationController
include UploadsActions
+ UnknownUploadModelError = Class.new(StandardError)
+
+ MODEL_CLASSES = {
+ "user" => User,
+ "project" => Project,
+ "note" => Note,
+ "group" => Group,
+ "appearance" => Appearance,
+ "personal_snippet" => PersonalSnippet,
+ nil => PersonalSnippet
+ }.freeze
+
+ rescue_from UnknownUploadModelError, with: :render_404
+
skip_before_action :authenticate_user!
+ before_action :upload_mount_satisfied?
before_action :find_model
before_action :authorize_access!, only: [:show]
before_action :authorize_create_access!, only: [:create]
- private
+ def uploader_class
+ PersonalFileUploader
+ end
def find_model
return nil unless params[:id]
- return render_404 unless upload_model && upload_mount
-
- @model = upload_model.find(params[:id])
+ upload_model_class.find(params[:id])
end
def authorize_access!
@@ -53,55 +68,17 @@ class UploadsController < ApplicationController
end
end
- def upload_model
- upload_models = {
- "user" => User,
- "project" => Project,
- "note" => Note,
- "group" => Group,
- "appearance" => Appearance,
- "personal_snippet" => PersonalSnippet
- }
-
- upload_models[params[:model]]
- end
-
- def upload_mount
- return true unless params[:mounted_as]
-
- upload_mounts = %w(avatar attachment file logo header_logo)
-
- if upload_mounts.include?(params[:mounted_as])
- params[:mounted_as]
- end
+ def upload_model_class
+ MODEL_CLASSES[params[:model]] || raise(UnknownUploadModelError)
end
- def uploader
- return @uploader if defined?(@uploader)
-
- case model
- when nil
- @uploader = PersonalFileUploader.new(nil, params[:secret])
-
- @uploader.retrieve_from_store!(params[:filename])
- when PersonalSnippet
- @uploader = PersonalFileUploader.new(model, params[:secret])
-
- @uploader.retrieve_from_store!(params[:filename])
- else
- @uploader = @model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend
-
- redirect_to @uploader.url unless @uploader.file_storage?
- end
-
- @uploader
+ def upload_model_class_has_mounts?
+ upload_model_class < CarrierWave::Mount::Extension
end
- def uploader_class
- PersonalFileUploader
- end
+ def upload_mount_satisfied?
+ return true unless upload_model_class_has_mounts?
- def model
- @model ||= find_model
+ upload_model_class.uploader_options.has_key?(upload_mount)
end
end
diff --git a/app/models/appearance.rb b/app/models/appearance.rb
index 76cfe28742a..dcd14c08f3c 100644
--- a/app/models/appearance.rb
+++ b/app/models/appearance.rb
@@ -11,6 +11,7 @@ class Appearance < ActiveRecord::Base
mount_uploader :logo, AttachmentUploader
mount_uploader :header_logo, AttachmentUploader
+
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
CACHE_KEY = 'current_appearance'.freeze
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index b65daa376d2..4eeccd4d934 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -45,7 +45,7 @@ module Ci
end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
- scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::LOCAL_STORE]) }
+ scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) }
diff --git a/app/models/concerns/avatarable.rb b/app/models/concerns/avatarable.rb
index 10659030910..d35e37935fb 100644
--- a/app/models/concerns/avatarable.rb
+++ b/app/models/concerns/avatarable.rb
@@ -1,6 +1,30 @@
module Avatarable
extend ActiveSupport::Concern
+ included do
+ prepend ShadowMethods
+
+ validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
+ validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
+
+ mount_uploader :avatar, AvatarUploader
+ end
+
+ module ShadowMethods
+ def avatar_url(**args)
+ # We use avatar_path instead of overriding avatar_url because of carrierwave.
+ # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
+
+ avatar_path(only_path: args.fetch(:only_path, true)) || super
+ end
+ end
+
+ def avatar_type
+ unless self.avatar.image?
+ self.errors.add :avatar, "only images allowed"
+ end
+ end
+
def avatar_path(only_path: true)
return unless self[:avatar].present?
diff --git a/app/models/group.rb b/app/models/group.rb
index fddace03387..5d1e2f62982 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -29,18 +29,14 @@ class Group < Namespace
has_many :variables, class_name: 'Ci::GroupVariable'
has_many :custom_attributes, class_name: 'GroupCustomAttribute'
- validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
+ has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+
validate :visibility_level_allowed_by_projects
validate :visibility_level_allowed_by_sub_groups
validate :visibility_level_allowed_by_parent
- validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
-
validates :two_factor_grace_period, presence: true, numericality: { greater_than_or_equal_to: 0 }
- mount_uploader :avatar, AvatarUploader
- has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
-
after_create :post_create_hook
after_destroy :post_destroy_hook
after_save :update_two_factor_requirement
@@ -116,12 +112,6 @@ class Group < Namespace
visibility_level_allowed_by_sub_groups?(level)
end
- def avatar_url(**args)
- # We use avatar_path instead of overriding avatar_url because of carrierwave.
- # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
- avatar_path(args)
- end
-
def lfs_enabled?
return false unless Gitlab.config.lfs.enabled
return Gitlab.config.lfs.enabled if self[:lfs_enabled].nil?
@@ -193,12 +183,6 @@ class Group < Namespace
owners.include?(user) && owners.size == 1
end
- def avatar_type
- unless self.avatar.image?
- self.errors.add :avatar, "only images allowed"
- end
- end
-
def post_create_hook
Gitlab::AppLogger.info("Group \"#{name}\" was created")
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index 6ad792aab30..65c157d61ca 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -7,7 +7,7 @@ class LfsObject < ActiveRecord::Base
validates :oid, presence: true, uniqueness: true
- scope :with_files_stored_locally, ->() { where(file_store: [nil, LfsObjectUploader::LOCAL_STORE]) }
+ scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
mount_uploader :file, LfsObjectUploader
diff --git a/app/models/note.rb b/app/models/note.rb
index 184fbd5f5ae..a84db8982e5 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -88,6 +88,7 @@ class Note < ActiveRecord::Base
end
end
+ # @deprecated attachments are handler by the MarkdownUploader
mount_uploader :attachment, AttachmentUploader
# Scopes
diff --git a/app/models/project.rb b/app/models/project.rb
index fbe65e700a4..b3c2b599129 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -255,9 +255,6 @@ class Project < ActiveRecord::Base
validates :star_count, numericality: { greater_than_or_equal_to: 0 }
validate :check_limit, on: :create
validate :check_repository_path_availability, on: :update, if: ->(project) { project.renamed? }
- validate :avatar_type,
- if: ->(project) { project.avatar.present? && project.avatar_changed? }
- validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
validate :visibility_level_allowed_by_group
validate :visibility_level_allowed_as_fork
validate :check_wiki_path_conflict
@@ -265,7 +262,6 @@ class Project < ActiveRecord::Base
presence: true,
inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } }
- mount_uploader :avatar, AvatarUploader
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
# Scopes
@@ -917,20 +913,12 @@ class Project < ActiveRecord::Base
issues_tracker.to_param == 'jira'
end
- def avatar_type
- unless self.avatar.image?
- self.errors.add :avatar, 'only images allowed'
- end
- end
-
def avatar_in_git
repository.avatar
end
def avatar_url(**args)
- # We use avatar_path instead of overriding avatar_url because of carrierwave.
- # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
- avatar_path(args) || (Gitlab::Routing.url_helpers.project_avatar_url(self) if avatar_in_git)
+ Gitlab::Routing.url_helpers.project_avatar_url(self) if avatar_in_git
end
# For compatibility with old code
diff --git a/app/models/upload.rb b/app/models/upload.rb
index f194d7bdb80..e227baea994 100644
--- a/app/models/upload.rb
+++ b/app/models/upload.rb
@@ -9,44 +9,52 @@ class Upload < ActiveRecord::Base
validates :model, presence: true
validates :uploader, presence: true
- before_save :calculate_checksum, if: :foreground_checksum?
- after_commit :schedule_checksum, unless: :foreground_checksum?
+ before_save :calculate_checksum!, if: :foreground_checksummable?
+ after_commit :schedule_checksum, if: :checksummable?
- def self.remove_path(path)
- where(path: path).destroy_all
- end
-
- def self.record(uploader)
- remove_path(uploader.relative_path)
-
- create(
- size: uploader.file.size,
- path: uploader.relative_path,
- model: uploader.model,
- uploader: uploader.class.to_s
- )
+ def self.hexdigest(path)
+ Digest::SHA256.file(path).hexdigest
end
def absolute_path
+ raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
end
- def calculate_checksum
- return unless exist?
+ def calculate_checksum!
+ self.checksum = nil
+ return unless checksummable?
self.checksum = Digest::SHA256.file(absolute_path).hexdigest
end
+ def build_uploader
+ uploader_class.new(model).tap do |uploader|
+ uploader.upload = self
+ uploader.retrieve_from_store!(identifier)
+ end
+ end
+
def exist?
File.exist?(absolute_path)
end
private
- def foreground_checksum?
- size <= CHECKSUM_THRESHOLD
+ def checksummable?
+ checksum.nil? && local? && exist?
+ end
+
+ def local?
+ return true if store.nil?
+
+ store == ObjectStorage::Store::LOCAL
+ end
+
+ def foreground_checksummable?
+ checksummable? && size <= CHECKSUM_THRESHOLD
end
def schedule_checksum
@@ -57,6 +65,10 @@ class Upload < ActiveRecord::Base
!path.start_with?('/')
end
+ def identifier
+ File.basename(path)
+ end
+
def uploader_class
Object.const_get(uploader)
end
diff --git a/app/models/user.rb b/app/models/user.rb
index 4484ee9ff4c..eb6d12b5ec5 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -134,6 +134,7 @@ class User < ActiveRecord::Base
has_many :assigned_merge_requests, dependent: :nullify, foreign_key: :assignee_id, class_name: "MergeRequest" # rubocop:disable Cop/ActiveRecordDependent
has_many :custom_attributes, class_name: 'UserCustomAttribute'
+ has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
#
# Validations
@@ -156,12 +157,10 @@ class User < ActiveRecord::Base
validate :namespace_uniq, if: :username_changed?
validate :namespace_move_dir_allowed, if: :username_changed?
- validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validate :unique_email, if: :email_changed?
validate :owns_notification_email, if: :notification_email_changed?
validate :owns_public_email, if: :public_email_changed?
validate :signup_domain_valid?, on: :create, if: ->(user) { !user.created_by_id }
- validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
before_validation :sanitize_attrs
before_validation :set_notification_email, if: :email_changed?
@@ -223,9 +222,6 @@ class User < ActiveRecord::Base
end
end
- mount_uploader :avatar, AvatarUploader
- has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
-
# Scopes
scope :admins, -> { where(admin: true) }
scope :blocked, -> { with_states(:blocked, :ldap_blocked) }
@@ -521,12 +517,6 @@ class User < ActiveRecord::Base
end
end
- def avatar_type
- unless avatar.image?
- errors.add :avatar, "only images allowed"
- end
- end
-
def unique_email
if !emails.exists?(email: email) && Email.exists?(email: email)
errors.add(:email, 'has already been taken')
@@ -854,9 +844,7 @@ class User < ActiveRecord::Base
end
def avatar_url(size: nil, scale: 2, **args)
- # We use avatar_path instead of overriding avatar_url because of carrierwave.
- # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
- avatar_path(args) || GravatarService.new.execute(email, size, scale, username: username)
+ GravatarService.new.execute(email, size, scale, username: username)
end
def primary_email_verified?
diff --git a/app/services/projects/hashed_storage/migrate_attachments_service.rb b/app/services/projects/hashed_storage/migrate_attachments_service.rb
index f8aaec8a9c0..bc897d891d5 100644
--- a/app/services/projects/hashed_storage/migrate_attachments_service.rb
+++ b/app/services/projects/hashed_storage/migrate_attachments_service.rb
@@ -14,9 +14,9 @@ module Projects
@old_path = project.full_path
@new_path = project.disk_path
- origin = FileUploader.dynamic_path_segment(project)
+ origin = FileUploader.absolute_base_dir(project)
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
- target = FileUploader.dynamic_path_segment(project)
+ target = FileUploader.absolute_base_dir(project)
result = move_folder!(origin, target)
project.save!
diff --git a/app/uploaders/attachment_uploader.rb b/app/uploaders/attachment_uploader.rb
index 109eb2fea0b..cd819dc9bff 100644
--- a/app/uploaders/attachment_uploader.rb
+++ b/app/uploaders/attachment_uploader.rb
@@ -1,10 +1,12 @@
class AttachmentUploader < GitlabUploader
- include RecordsUploads
+ include RecordsUploads::Concern
+ include ObjectStorage::Concern
+ prepend ObjectStorage::Extension::RecordsUploads
include UploaderHelper
- storage :file
+ private
- def store_dir
- "#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
+ def dynamic_segment
+ File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s)
end
end
diff --git a/app/uploaders/avatar_uploader.rb b/app/uploaders/avatar_uploader.rb
index cbb79376d5f..5848e6c6994 100644
--- a/app/uploaders/avatar_uploader.rb
+++ b/app/uploaders/avatar_uploader.rb
@@ -1,20 +1,13 @@
class AvatarUploader < GitlabUploader
- include RecordsUploads
include UploaderHelper
-
- storage :file
-
- def store_dir
- "#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
- end
+ include RecordsUploads::Concern
+ include ObjectStorage::Concern
+ prepend ObjectStorage::Extension::RecordsUploads
def exists?
model.avatar.file && model.avatar.file.present?
end
- # We set move_to_store and move_to_cache to 'false' to prevent stealing
- # the avatar file from a project when forking it.
- # https://gitlab.com/gitlab-org/gitlab-ce/issues/26158
def move_to_store
false
end
@@ -22,4 +15,10 @@ class AvatarUploader < GitlabUploader
def move_to_cache
false
end
+
+ private
+
+ def dynamic_segment
+ File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s)
+ end
end
diff --git a/app/uploaders/file_mover.rb b/app/uploaders/file_mover.rb
index 00c2888d224..f37567d6141 100644
--- a/app/uploaders/file_mover.rb
+++ b/app/uploaders/file_mover.rb
@@ -21,13 +21,11 @@ class FileMover
end
def update_markdown
- updated_text = model.read_attribute(update_field).gsub(temp_file_uploader.to_markdown, uploader.to_markdown)
+ updated_text = model.read_attribute(update_field)
+ .gsub(temp_file_uploader.markdown_link, uploader.markdown_link)
model.update_attribute(update_field, updated_text)
-
- true
rescue
revert
-
false
end
diff --git a/app/uploaders/file_uploader.rb b/app/uploaders/file_uploader.rb
index 0b591e3bbbb..81952dacce4 100644
--- a/app/uploaders/file_uploader.rb
+++ b/app/uploaders/file_uploader.rb
@@ -1,23 +1,40 @@
+# This class breaks the actual CarrierWave concept.
+# Every uploader should use a base_dir that is model agnostic so we can build
+# back URLs from base_dir-relative paths saved in the `Upload` model.
+#
+# As the `.base_dir` is model dependent and **not** saved in the upload model (see #upload_path)
+# there is no way to build back the correct file path without the model, which defies
+# CarrierWave way of storing files.
+#
class FileUploader < GitlabUploader
- include RecordsUploads
include UploaderHelper
+ include RecordsUploads::Concern
+ include ObjectStorage::Concern
+ prepend ObjectStorage::Extension::RecordsUploads
MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}
+ DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)}
- storage :file
+ attr_accessor :model
+
+ def self.root
+ File.join(options.storage_path, 'uploads')
+ end
- def self.absolute_path(upload_record)
+ def self.absolute_path(upload)
File.join(
- self.dynamic_path_segment(upload_record.model),
- upload_record.path
+ absolute_base_dir(upload.model),
+ upload.path # already contain the dynamic_segment, see #upload_path
)
end
- # Not using `GitlabUploader.base_dir` because all project namespaces are in
- # the `public/uploads` dir.
- #
- def self.base_dir
- root_dir
+ def self.base_dir(model)
+ model_path_segment(model)
+ end
+
+ # used in migrations and import/exports
+ def self.absolute_base_dir(model)
+ File.join(root, base_dir(model))
end
# Returns the part of `store_dir` that can change based on the model's current
@@ -29,63 +46,94 @@ class FileUploader < GitlabUploader
# model - Object that responds to `full_path` and `disk_path`
#
# Returns a String without a trailing slash
- def self.dynamic_path_segment(model)
+ def self.model_path_segment(model)
if model.hashed_storage?(:attachments)
- dynamic_path_builder(model.disk_path)
+ model.disk_path
else
- dynamic_path_builder(model.full_path)
+ model.full_path
end
end
- # Auxiliary method to build dynamic path segment when not using a project model
- #
- # Prefer to use the `.dynamic_path_segment` as it includes Hashed Storage specific logic
- def self.dynamic_path_builder(path)
- File.join(CarrierWave.root, base_dir, path)
+ def self.upload_path(secret, identifier)
+ File.join(secret, identifier)
end
- attr_accessor :model
- attr_reader :secret
+ def self.generate_secret
+ SecureRandom.hex
+ end
def initialize(model, secret = nil)
@model = model
- @secret = secret || generate_secret
+ @secret = secret
end
- def store_dir
- File.join(dynamic_path_segment, @secret)
+ def base_dir
+ self.class.base_dir(@model)
end
- def relative_path
- self.file.path.sub("#{dynamic_path_segment}/", '')
+ # we don't need to know the actual path, an uploader instance should be
+ # able to yield the file content on demand, so we should build the digest
+ def absolute_path
+ self.class.absolute_path(@upload)
end
- def to_markdown
- to_h[:markdown]
+ def upload_path
+ self.class.upload_path(dynamic_segment, identifier)
end
- def to_h
- filename = image_or_video? ? self.file.basename : self.file.filename
- escaped_filename = filename.gsub("]", "\\]")
+ def model_path_segment
+ self.class.model_path_segment(@model)
+ end
- markdown = "[#{escaped_filename}](#{secure_url})"
+ def store_dir
+ File.join(base_dir, dynamic_segment)
+ end
+
+ def markdown_link
+ markdown = "[#{markdown_name}](#{secure_url})"
markdown.prepend("!") if image_or_video? || dangerous?
+ markdown
+ end
+ def to_h
{
- alt: filename,
+ alt: markdown_name,
url: secure_url,
- markdown: markdown
+ markdown: markdown_link
}
end
+ def filename
+ self.file.filename
+ end
+
+ # the upload does not hold the secret, but holds the path
+ # which contains the secret: extract it
+ def upload=(value)
+ if matches = DYNAMIC_PATH_PATTERN.match(value.path)
+ @secret = matches[:secret]
+ @identifier = matches[:identifier]
+ end
+
+ super
+ end
+
+ def secret
+ @secret ||= self.class.generate_secret
+ end
+
private
- def dynamic_path_segment
- self.class.dynamic_path_segment(model)
+ def markdown_name
+ (image_or_video? ? File.basename(filename, File.extname(filename)) : filename).gsub("]", "\\]")
end
- def generate_secret
- SecureRandom.hex
+ def identifier
+ @identifier ||= filename
+ end
+
+ def dynamic_segment
+ secret
end
def secure_url
diff --git a/app/uploaders/gitlab_uploader.rb b/app/uploaders/gitlab_uploader.rb
index 7f72b3ce471..ba2ceb0c8cf 100644
--- a/app/uploaders/gitlab_uploader.rb
+++ b/app/uploaders/gitlab_uploader.rb
@@ -1,64 +1,56 @@
class GitlabUploader < CarrierWave::Uploader::Base
- def self.absolute_path(upload_record)
- File.join(CarrierWave.root, upload_record.path)
- end
+ class_attribute :options
- def self.root_dir
- 'uploads'
- end
+ class << self
+ # DSL setter
+ def storage_options(options)
+ self.options = options
+ end
- # When object storage is used, keep the `root_dir` as `base_dir`.
- # The files aren't really in folders there, they just have a name.
- # The files that contain user input in their name, also contain a hash, so
- # the names are still unique
- #
- # This method is overridden in the `FileUploader`
- def self.base_dir
- return root_dir unless file_storage?
+ def root
+ options.storage_path
+ end
- File.join(root_dir, '-', 'system')
- end
+ # represent the directory namespacing at the class level
+ def base_dir
+ options.fetch('base_dir', '')
+ end
- def self.file_storage?
- self.storage == CarrierWave::Storage::File
+ def file_storage?
+ storage == CarrierWave::Storage::File
+ end
+
+ def absolute_path(upload_record)
+ File.join(root, upload_record.path)
+ end
end
+ storage_options Gitlab.config.uploads
+
delegate :base_dir, :file_storage?, to: :class
def file_cache_storage?
cache_storage.is_a?(CarrierWave::Storage::File)
end
- # Reduce disk IO
def move_to_cache
- true
+ file_storage?
end
- # Reduce disk IO
def move_to_store
- true
- end
-
- # Designed to be overridden by child uploaders that have a dynamic path
- # segment -- that is, a path that changes based on mutable attributes of its
- # associated model
- #
- # For example, `FileUploader` builds the storage path based on the associated
- # project model's `path_with_namespace` value, which can change when the
- # project or its containing namespace is moved or renamed.
- def relative_path
- self.file.path.sub("#{root}/", '')
+ file_storage?
end
def exists?
file.present?
end
- # Override this if you don't want to save files by default to the Rails.root directory
+ def cache_dir
+ File.join(root, base_dir, 'tmp/cache')
+ end
+
def work_dir
- # Default path set by CarrierWave:
- # https://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L182
- CarrierWave.tmp_path
+ File.join(root, base_dir, 'tmp/work')
end
def filename
@@ -67,6 +59,17 @@ class GitlabUploader < CarrierWave::Uploader::Base
private
+ # Designed to be overridden by child uploaders that have a dynamic path
+ # segment -- that is, a path that changes based on mutable attributes of its
+ # associated model
+ #
+ # For example, `FileUploader` builds the storage path based on the associated
+ # project model's `path_with_namespace` value, which can change when the
+ # project or its containing namespace is moved or renamed.
+ def dynamic_segment
+ raise(NotImplementedError)
+ end
+
# To prevent files from moving across filesystems, override the default
# implementation:
# http://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L181-L183
@@ -74,6 +77,6 @@ class GitlabUploader < CarrierWave::Uploader::Base
# To be safe, keep this directory outside of the the cache directory
# because calling CarrierWave.clean_cache_files! will remove any files in
# the cache directory.
- File.join(work_dir, @cache_id, version_name.to_s, for_file)
+ File.join(work_dir, cache_id, version_name.to_s, for_file)
end
end
diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb
index a0757dbe6b2..3ad3e6ea32b 100644
--- a/app/uploaders/job_artifact_uploader.rb
+++ b/app/uploaders/job_artifact_uploader.rb
@@ -1,13 +1,8 @@
-class JobArtifactUploader < ObjectStoreUploader
- storage_options Gitlab.config.artifacts
-
- def self.local_store_path
- Gitlab.config.artifacts.path
- end
+class JobArtifactUploader < GitlabUploader
+ extend Workhorse::UploadPath
+ include ObjectStorage::Concern
- def self.artifacts_upload_path
- File.join(self.local_store_path, 'tmp/uploads/')
- end
+ storage_options Gitlab.config.artifacts
def size
return super if model.size.nil?
@@ -15,9 +10,13 @@ class JobArtifactUploader < ObjectStoreUploader
model.size
end
+ def store_dir
+ dynamic_segment
+ end
+
private
- def default_path
+ def dynamic_segment
creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
diff --git a/app/uploaders/legacy_artifact_uploader.rb b/app/uploaders/legacy_artifact_uploader.rb
index 476a46c1754..b726b053493 100644
--- a/app/uploaders/legacy_artifact_uploader.rb
+++ b/app/uploaders/legacy_artifact_uploader.rb
@@ -1,17 +1,16 @@
-class LegacyArtifactUploader < ObjectStoreUploader
- storage_options Gitlab.config.artifacts
+class LegacyArtifactUploader < GitlabUploader
+ extend Workhorse::UploadPath
+ include ObjectStorage::Concern
- def self.local_store_path
- Gitlab.config.artifacts.path
- end
+ storage_options Gitlab.config.artifacts
- def self.artifacts_upload_path
- File.join(self.local_store_path, 'tmp/uploads/')
+ def store_dir
+ dynamic_segment
end
private
- def default_path
+ def dynamic_segment
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end
end
diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb
index fa42e4710b7..e7cce1bbb0a 100644
--- a/app/uploaders/lfs_object_uploader.rb
+++ b/app/uploaders/lfs_object_uploader.rb
@@ -1,17 +1,25 @@
-class LfsObjectUploader < ObjectStoreUploader
- storage_options Gitlab.config.lfs
+class LfsObjectUploader < GitlabUploader
+ extend Workhorse::UploadPath
+ include ObjectStorage::Concern
- def self.local_store_path
- Gitlab.config.lfs.storage_path
+ # LfsObject are in `tmp/upload` instead of `tmp/uploads`
+ def self.workhorse_upload_path
+ File.join(root, 'tmp/upload')
end
+ storage_options Gitlab.config.lfs
+
def filename
model.oid[4..-1]
end
+ def store_dir
+ dynamic_segment
+ end
+
private
- def default_path
- "#{model.oid[0, 2]}/#{model.oid[2, 2]}"
+ def dynamic_segment
+ File.join(model.oid[0, 2], model.oid[2, 2])
end
end
diff --git a/app/uploaders/namespace_file_uploader.rb b/app/uploaders/namespace_file_uploader.rb
index 672126e9ec2..269415b1926 100644
--- a/app/uploaders/namespace_file_uploader.rb
+++ b/app/uploaders/namespace_file_uploader.rb
@@ -1,15 +1,26 @@
class NamespaceFileUploader < FileUploader
- def self.base_dir
- File.join(root_dir, '-', 'system', 'namespace')
+ # Re-Override
+ def self.root
+ options.storage_path
end
- def self.dynamic_path_segment(model)
- dynamic_path_builder(model.id.to_s)
+ def self.base_dir(model)
+ File.join(options.base_dir, 'namespace', model_path_segment(model))
end
- private
+ def self.model_path_segment(model)
+ File.join(model.id.to_s)
+ end
+
+ # Re-Override
+ def store_dir
+ store_dirs[object_store]
+ end
- def secure_url
- File.join('/uploads', @secret, file.filename)
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join('namespace', model_path_segment, dynamic_segment)
+ }
end
end
diff --git a/app/uploaders/object_store_uploader.rb b/app/uploaders/object_store_uploader.rb
deleted file mode 100644
index bb25dc4219f..00000000000
--- a/app/uploaders/object_store_uploader.rb
+++ /dev/null
@@ -1,215 +0,0 @@
-require 'fog/aws'
-require 'carrierwave/storage/fog'
-
-class ObjectStoreUploader < GitlabUploader
- before :store, :set_default_local_store
- before :store, :verify_license!
-
- LOCAL_STORE = 1
- REMOTE_STORE = 2
-
- class << self
- def storage_options(options)
- @storage_options = options
- end
-
- def object_store_options
- @storage_options&.object_store
- end
-
- def object_store_enabled?
- object_store_options&.enabled
- end
-
- def background_upload_enabled?
- object_store_options&.background_upload
- end
-
- def object_store_credentials
- @object_store_credentials ||= object_store_options&.connection&.to_hash&.deep_symbolize_keys
- end
-
- def object_store_directory
- object_store_options&.remote_directory
- end
-
- def local_store_path
- raise NotImplementedError
- end
- end
-
- def file_storage?
- storage.is_a?(CarrierWave::Storage::File)
- end
-
- def file_cache_storage?
- cache_storage.is_a?(CarrierWave::Storage::File)
- end
-
- def real_object_store
- model.public_send(store_serialization_column) # rubocop:disable GitlabSecurity/PublicSend
- end
-
- def object_store
- subject.public_send(:"#{field}_store")
- end
-
- def object_store=(value)
- @storage = nil
- model.public_send(:"#{store_serialization_column}=", value) # rubocop:disable GitlabSecurity/PublicSend
- end
-
- def store_dir
- if file_storage?
- default_local_path
- else
- default_path
- end
- end
-
- def use_file
- if file_storage?
- return yield path
- end
-
- begin
- cache_stored_file!
- yield cache_path
- ensure
- cache_storage.delete_dir!(cache_path(nil))
- end
- end
-
- def filename
- super || file&.filename
- end
-
- def migrate!(new_store)
- raise 'Undefined new store' unless new_store
-
- return unless object_store != new_store
- return unless file
-
- old_file = file
- old_store = object_store
-
- # for moving remote file we need to first store it locally
- cache_stored_file! unless file_storage?
-
- # change storage
- self.object_store = new_store
-
- with_callbacks(:store, file) do
- storage.store!(file).tap do |new_file|
- # since we change storage store the new storage
- # in case of failure delete new file
- begin
- model.save!
- rescue => e
- new_file.delete
- self.object_store = old_store
- raise e
- end
-
- old_file.delete
- end
- end
- end
-
- def schedule_migration_to_object_storage(*args)
- return unless self.class.object_store_enabled?
- return unless self.class.background_upload_enabled?
- return unless self.licensed?
- return unless self.file_storage?
-
- ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
- end
-
- def fog_directory
- self.class.object_store_options.remote_directory
- end
-
- def fog_credentials
- self.class.object_store_options.connection
- end
-
- def fog_public
- false
- end
-
- def move_to_store
- file.try(:storage) == storage
- end
-
- def move_to_cache
- file.try(:storage) == cache_storage
- end
-
- # We block storing artifacts on Object Storage, not receiving
- def verify_license!(new_file)
- return if file_storage?
-
- raise 'Object Storage feature is missing' unless licensed?
- end
-
- def exists?
- file.try(:exists?)
- end
-
- def cache_dir
- File.join(self.class.local_store_path, 'tmp/cache')
- end
-
- # Override this if you don't want to save local files by default to the Rails.root directory
- def work_dir
- # Default path set by CarrierWave:
- # https://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L182
- # CarrierWave.tmp_path
- File.join(self.class.local_store_path, 'tmp/work')
- end
-
- def licensed?
- License.feature_available?(:object_storage)
- end
-
- private
-
- def set_default_local_store(new_file)
- self.object_store = LOCAL_STORE unless self.object_store
- end
-
- def default_local_path
- File.join(self.class.local_store_path, default_path)
- end
-
- def default_path
- raise NotImplementedError
- end
-
- def serialization_column
- model.class.uploader_option(mounted_as, :mount_on) || mounted_as
- end
-
- def store_serialization_column
- :"#{serialization_column}_store"
- end
-
- def storage
- @storage ||=
- if object_store == REMOTE_STORE
- remote_storage
- else
- local_storage
- end
- end
-
- def remote_storage
- raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
-
- CarrierWave::Storage::Fog.new(self)
- end
-
- def local_storage
- CarrierWave::Storage::File.new(self)
- end
-end
diff --git a/app/uploaders/personal_file_uploader.rb b/app/uploaders/personal_file_uploader.rb
index 3298ad104ec..440972affec 100644
--- a/app/uploaders/personal_file_uploader.rb
+++ b/app/uploaders/personal_file_uploader.rb
@@ -1,23 +1,40 @@
class PersonalFileUploader < FileUploader
- def self.dynamic_path_segment(model)
- File.join(CarrierWave.root, model_path(model))
+ # Re-Override
+ def self.root
+ options.storage_path
end
- def self.base_dir
- File.join(root_dir, '-', 'system')
+ def self.base_dir(model)
+ File.join(options.base_dir, model_path_segment(model))
end
- private
+ def self.model_path_segment(model)
+ return 'temp/' unless model
- def secure_url
- File.join(self.class.model_path(model), secret, file.filename)
+ File.join(model.class.to_s.underscore, model.id.to_s)
+ end
+
+ def object_store
+ return Store::LOCAL unless model
+
+ super
+ end
+
+ # Revert-Override
+ def store_dir
+ store_dirs[object_store]
+ end
+
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join(model_path_segment, dynamic_segment)
+ }
end
- def self.model_path(model)
- if model
- File.join("/#{base_dir}", model.class.to_s.underscore, model.id.to_s)
- else
- File.join("/#{base_dir}", 'temp')
- end
+ private
+
+ def secure_url
+ File.join('/', base_dir, secret, file.filename)
end
end
diff --git a/app/uploaders/records_uploads.rb b/app/uploaders/records_uploads.rb
index feb4f04d7b7..dfb8dccec57 100644
--- a/app/uploaders/records_uploads.rb
+++ b/app/uploaders/records_uploads.rb
@@ -1,35 +1,61 @@
module RecordsUploads
- extend ActiveSupport::Concern
+ module Concern
+ extend ActiveSupport::Concern
- included do
- after :store, :record_upload
- before :remove, :destroy_upload
- end
+ attr_accessor :upload
- # After storing an attachment, create a corresponding Upload record
- #
- # NOTE: We're ignoring the argument passed to this callback because we want
- # the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the
- # `Tempfile` object the callback gets.
- #
- # Called `after :store`
- def record_upload(_tempfile = nil)
- return unless model
- return unless file_storage?
- return unless file.exists?
-
- Upload.record(self)
- end
+ included do
+ after :store, :record_upload
+ before :remove, :destroy_upload
+ end
+
+ # After storing an attachment, create a corresponding Upload record
+ #
+ # NOTE: We're ignoring the argument passed to this callback because we want
+ # the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the
+ # `Tempfile` object the callback gets.
+ #
+ # Called `after :store`
+ def record_upload(_tempfile = nil)
+ return unless model
+ return unless file && file.exists?
+
+ Upload.transaction do
+ uploads.where(path: upload_path).delete_all
+ upload.destroy! if upload
+
+ self.upload = build_upload_from_uploader(self)
+ upload.save!
+ end
+ end
+
+ def upload_path
+ File.join(store_dir, filename.to_s)
+ end
+
+ private
+
+ def uploads
+ Upload.order(id: :desc).where(uploader: self.class.to_s)
+ end
- private
+ def build_upload_from_uploader(uploader)
+ Upload.new(
+ size: uploader.file.size,
+ path: uploader.upload_path,
+ model: uploader.model,
+ uploader: uploader.class.to_s
+ )
+ end
- # Before removing an attachment, destroy any Upload records at the same path
- #
- # Called `before :remove`
- def destroy_upload(*args)
- return unless file_storage?
- return unless file
+ # Before removing an attachment, destroy any Upload records at the same path
+ #
+ # Called `before :remove`
+ def destroy_upload(*args)
+ return unless file && file.exists?
- Upload.remove_path(relative_path)
+ self.upload = nil
+ uploads.where(path: upload_path).delete_all
+ end
end
end
diff --git a/app/uploaders/uploader_helper.rb b/app/uploaders/uploader_helper.rb
index 7635c20ab3a..fd446d31092 100644
--- a/app/uploaders/uploader_helper.rb
+++ b/app/uploaders/uploader_helper.rb
@@ -32,14 +32,7 @@ module UploaderHelper
def extension_match?(extensions)
return false unless file
- extension =
- if file.respond_to?(:extension)
- file.extension
- else
- # Not all CarrierWave storages respond to :extension
- File.extname(file.path).delete('.')
- end
-
+ extension = file.try(:extension) || File.extname(file.path).delete('.')
extensions.include?(extension.downcase)
end
end
diff --git a/app/uploaders/workhorse.rb b/app/uploaders/workhorse.rb
new file mode 100644
index 00000000000..782032cf516
--- /dev/null
+++ b/app/uploaders/workhorse.rb
@@ -0,0 +1,7 @@
+module Workhorse
+ module UploadPath
+ def workhorse_upload_path
+ File.join(root, base_dir, 'tmp/uploads')
+ end
+ end
+end
diff --git a/app/workers/object_storage_upload_worker.rb b/app/workers/object_storage_upload_worker.rb
index 0b9411ff2df..e087261770f 100644
--- a/app/workers/object_storage_upload_worker.rb
+++ b/app/workers/object_storage_upload_worker.rb
@@ -8,16 +8,16 @@ class ObjectStorageUploadWorker
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
+ return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
+ return unless uploader_class.licensed?
return unless uploader_class.background_upload_enabled?
- subject = subject_class.find_by(id: subject_id)
- return unless subject
-
- file = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
-
- return unless file.licensed?
-
- file.migrate!(uploader_class::REMOTE_STORE)
+ subject = subject_class.find(subject_id)
+ uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
+ uploader.migrate!(ObjectStorage::Store::REMOTE)
+ rescue RecordNotFound
+ # does not retry when the record do not exists
+ Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
end
end
diff --git a/app/workers/upload_checksum_worker.rb b/app/workers/upload_checksum_worker.rb
index 9222760c031..65d40336f18 100644
--- a/app/workers/upload_checksum_worker.rb
+++ b/app/workers/upload_checksum_worker.rb
@@ -3,7 +3,7 @@ class UploadChecksumWorker
def perform(upload_id)
upload = Upload.find(upload_id)
- upload.calculate_checksum
+ upload.calculate_checksum!
upload.save!
rescue ActiveRecord::RecordNotFound
Rails.logger.error("UploadChecksumWorker: couldn't find upload #{upload_id}, skipping")
diff --git a/changelogs/unreleased-ee/4163-move-uploads-to-object-storage.yml b/changelogs/unreleased-ee/4163-move-uploads-to-object-storage.yml
new file mode 100644
index 00000000000..18910f0d97b
--- /dev/null
+++ b/changelogs/unreleased-ee/4163-move-uploads-to-object-storage.yml
@@ -0,0 +1,5 @@
+---
+title: Add object storage support for uploads.
+merge_request: 3867
+author:
+type: added
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index cab72032d22..c360c42509a 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -174,6 +174,25 @@ production: &base
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
+ ## Uploads (attachments, avatars, etc...)
+ uploads:
+ # The location where uploads objects are stored (default: public/).
+ # storage_path: public/
+ # base_dir: uploads/-/system
+ object_store:
+ enabled: true
+ remote_directory: uploads # Bucket name
+ # background_upload: false # Temporary option to limit automatic upload (Default: true)
+ connection:
+ provider: AWS
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
+ # Use the following options to configure an AWS compatible host
+ # host: 'localhost' # default: s3.amazonaws.com
+ # endpoint: 'http://127.0.0.1:9000' # default: nil
+ # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
+
## GitLab Pages
pages:
enabled: false
@@ -686,6 +705,16 @@ test:
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
+ uploads:
+ storage_path: tmp/tests/public
+ enabled: true
+ object_store:
+ enabled: false
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
gitlab:
host: localhost
port: 80
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index b0cfd50233a..ab953583df9 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -298,13 +298,15 @@ Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled']
#
Settings['artifacts'] ||= Settingslogic.new({})
Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
-Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
-Settings.artifacts['max_size'] ||= 100 # in megabytes
+Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts"))
+# Settings.artifact['path'] is deprecated, use `storage_path` instead
+Settings.artifacts['path'] = Settings.artifacts['storage_path']
+Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
-Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
-Settings.artifacts['object_store']['remote_directory'] ||= nil
-Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
+Settings.artifacts['object_store']['enabled'] ||= false
+Settings.artifacts['object_store']['remote_directory'] ||= nil
+Settings.artifacts['object_store']['background_upload'] ||= true
# Convert upload connection settings to use string keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
@@ -342,15 +344,27 @@ Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pa
Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
-
Settings.lfs['object_store'] ||= Settingslogic.new({})
-Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
-Settings.lfs['object_store']['remote_directory'] ||= nil
-Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
+Settings.lfs['object_store']['enabled'] ||= false
+Settings.lfs['object_store']['remote_directory'] ||= nil
+Settings.lfs['object_store']['background_upload'] ||= true
# Convert upload connection settings to use string keys, to make Fog happy
Settings.lfs['object_store']['connection']&.deep_stringify_keys!
#
+# Uploads
+#
+Settings['uploads'] ||= Settingslogic.new({})
+Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
+Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
+Settings.uploads['object_store'] ||= Settingslogic.new({})
+Settings.uploads['object_store']['enabled'] ||= false
+Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
+Settings.uploads['object_store']['background_upload'] ||= true
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.uploads['object_store']['connection']&.deep_stringify_keys!
+
+#
# Mattermost
#
Settings['mattermost'] ||= Settingslogic.new({})
diff --git a/db/migrate/20171214144320_add_store_column_to_uploads.rb b/db/migrate/20171214144320_add_store_column_to_uploads.rb
new file mode 100644
index 00000000000..bad20dcdbcf
--- /dev/null
+++ b/db/migrate/20171214144320_add_store_column_to_uploads.rb
@@ -0,0 +1,12 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddStoreColumnToUploads < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ add_column :uploads, :store, :integer
+ end
+end
diff --git a/db/migrate/20180119135717_add_uploader_index_to_uploads.rb b/db/migrate/20180119135717_add_uploader_index_to_uploads.rb
new file mode 100644
index 00000000000..a678c3d049f
--- /dev/null
+++ b/db/migrate/20180119135717_add_uploader_index_to_uploads.rb
@@ -0,0 +1,20 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddUploaderIndexToUploads < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ remove_concurrent_index :uploads, :path
+ add_concurrent_index :uploads, [:uploader, :path], using: :btree
+ end
+
+ def down
+ remove_concurrent_index :uploads, [:uploader, :path]
+ add_concurrent_index :uploads, :path, using: :btree
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 02c44bccc61..b6800ff926e 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -1760,11 +1760,12 @@ ActiveRecord::Schema.define(version: 20171230123729) do
t.string "model_type"
t.string "uploader", null: false
t.datetime "created_at", null: false
+ t.integer "store"
end
add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree
add_index "uploads", ["model_id", "model_type"], name: "index_uploads_on_model_id_and_model_type", using: :btree
- add_index "uploads", ["path"], name: "index_uploads_on_path", using: :btree
+ add_index "uploads", ["uploader", "path"], name: "index_uploads_on_uploader_and_path", using: :btree
create_table "user_agent_details", force: :cascade do |t|
t.string "user_agent", null: false
diff --git a/doc/development/file_storage.md b/doc/development/file_storage.md
index cf00e24e11a..76354b92820 100644
--- a/doc/development/file_storage.md
+++ b/doc/development/file_storage.md
@@ -14,8 +14,8 @@ There are many places where file uploading is used, according to contexts:
- User snippet attachments
* Project
- Project avatars
- - Issues/MR Markdown attachments
- - Issues/MR Legacy Markdown attachments
+ - Issues/MR/Notes Markdown attachments
+ - Issues/MR/Notes Legacy Markdown attachments
- CI Build Artifacts
- LFS Objects
@@ -25,7 +25,7 @@ There are many places where file uploading is used, according to contexts:
GitLab started saving everything on local disk. While directory location changed from previous versions,
they are still not 100% standardized. You can see them below:
-| Description | In DB? | Relative path | Uploader class | model_type |
+| Description | In DB? | Relative path (from CarrierWave.root) | Uploader class | model_type |
| ------------------------------------- | ------ | ----------------------------------------------------------- | ---------------------- | ---------- |
| Instance logo | yes | uploads/-/system/appearance/logo/:id/:filename | `AttachmentUploader` | Appearance |
| Header logo | yes | uploads/-/system/appearance/header_logo/:id/:filename | `AttachmentUploader` | Appearance |
@@ -33,17 +33,107 @@ they are still not 100% standardized. You can see them below:
| User avatars | yes | uploads/-/system/user/avatar/:id/:filename | `AvatarUploader` | User |
| User snippet attachments | yes | uploads/-/system/personal_snippet/:id/:random_hex/:filename | `PersonalFileUploader` | Snippet |
| Project avatars | yes | uploads/-/system/project/avatar/:id/:filename | `AvatarUploader` | Project |
-| Issues/MR Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project |
-| Issues/MR Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note |
+| Issues/MR/Notes Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project |
+| Issues/MR/Notes Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note |
| CI Artifacts (CE) | yes | shared/artifacts/:year_:month/:project_id/:id | `ArtifactUploader` | Ci::Build |
| LFS Objects (CE) | yes | shared/lfs-objects/:hex/:hex/:object_hash | `LfsObjectUploader` | LfsObject |
CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader`
-while in EE they inherit the `ObjectStoreUploader` and store files in and S3 API compatible object store.
+while in EE they inherit the `ObjectStorage` and store files in and S3 API compatible object store.
-In the case of Issues/MR Markdown attachments, there is a different approach using the [Hashed Storage] layout,
+In the case of Issues/MR/Notes Markdown attachments, there is a different approach using the [Hashed Storage] layout,
instead of basing the path into a mutable variable `:project_path_with_namespace`, it's possible to use the
hash of the project ID instead, if project migrates to the new approach (introduced in 10.2).
+### Path segments
+
+Files are stored at multiple locations and use different path schemes.
+All the `GitlabUploader` derived classes should comply with this path segment schema:
+
+```
+| GitlabUploader
+| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
+| `<gitlab_root>/public/` | `uploads/-/system/` | `user/avatar/:id/` | `:filename` |
+| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
+| `CarrierWave.root` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
+| | `CarrierWave::Uploader#store_dir` | |
+
+| FileUploader
+| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
+| `<gitlab_root>/shared/` | `artifacts/` | `:year_:month/:id` | `:filename` |
+| `<gitlab_root>/shared/` | `snippets/` | `:secret/` | `:filename` |
+| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
+| `CarrierWave.root` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
+| | `CarrierWave::Uploader#store_dir` | |
+| | | `FileUploader#upload_path |
+
+| ObjectStore::Concern (store = remote)
+| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
+| `<bucket_name>` | <ignored> | `user/avatar/:id/` | `:filename` |
+| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
+| `#fog_dir` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
+| | | `ObjectStorage::Concern#store_dir` | |
+| | | `ObjectStorage::Concern#upload_path |
+```
+
+The `RecordsUploads::Concern` concern will create an `Upload` entry for every file stored by a `GitlabUploader` persisting the dynamic parts of the path using
+`GitlabUploader#dynamic_path`. You may then use the `Upload#build_uploader` method to manipulate the file.
+
+## Object Storage
+
+By including the `ObjectStorage::Concern` in the `GitlabUploader` derived class, you may enable the object storage for this uploader. To enable the object storage
+in your uploader, you need to either 1) include `RecordsUpload::Concern` and prepend `ObjectStorage::Extension::RecordsUploads` or 2) mount the uploader and create a new field named `<mount>_store`.
+
+The `CarrierWave::Uploader#store_dir` is overriden to
+
+ - `GitlabUploader.base_dir` + `GitlabUploader.dynamic_segment` when the store is LOCAL
+ - `GitlabUploader.dynamic_segment` when the store is REMOTE (the bucket name is used to namespace)
+
+### Using `ObjectStorage::Extension::RecordsUploads`
+
+> Note: this concern will automatically include `RecordsUploads::Concern` if not already included.
+
+The `ObjectStorage::Concern` uploader will search for the matching `Upload` to select the correct object store. The `Upload` is mapped using `#store_dirs + identifier` for each store (LOCAL/REMOTE).
+
+```ruby
+class SongUploader < GitlabUploader
+ include RecordsUploads::Concern
+ include ObjectStorage::Concern
+ prepend ObjectStorage::Extension::RecordsUploads
+
+ ...
+end
+
+class Thing < ActiveRecord::Base
+ mount :theme, SongUploader # we have a great theme song!
+
+ ...
+end
+```
+
+### Using a mounted uploader
+
+The `ObjectStorage::Concern` will query the `model.<mount>_store` attribute to select the correct object store.
+This column must be present in the model schema.
+
+```ruby
+class SongUploader < GitlabUploader
+ include ObjectStorage::Concern
+
+ ...
+end
+
+class Thing < ActiveRecord::Base
+ attr_reader :theme_store # this is an ActiveRecord attribute
+ mount :theme, SongUploader # we have a great theme song!
+
+ def theme_store
+ super || ObjectStorage::Store::LOCAL
+ end
+
+ ...
+end
+```
+
[CarrierWave]: https://github.com/carrierwaveuploader/carrierwave
[Hashed Storage]: ../administration/repository_storage_types.md
diff --git a/ee/app/models/ee/ci/job_artifact.rb b/ee/app/models/ee/ci/job_artifact.rb
new file mode 100644
index 00000000000..02c6715f447
--- /dev/null
+++ b/ee/app/models/ee/ci/job_artifact.rb
@@ -0,0 +1,25 @@
+module EE
+ # CI::JobArtifact EE mixin
+ #
+ # This module is intended to encapsulate EE-specific model logic
+ # and be prepended in the `Ci::JobArtifact` model
+ module Ci::JobArtifact
+ extend ActiveSupport::Concern
+
+ prepended do
+ after_destroy :log_geo_event
+
+ scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::Store::LOCAL]) }
+ end
+
+ def local_store?
+ [nil, JobArtifactUploader::Store::LOCAL].include?(self.file_store)
+ end
+
+ private
+
+ def log_geo_event
+ ::Geo::JobArtifactDeletedEventStore.new(self).create
+ end
+ end
+end
diff --git a/ee/app/models/ee/lfs_object.rb b/ee/app/models/ee/lfs_object.rb
new file mode 100644
index 00000000000..6962c2bea4f
--- /dev/null
+++ b/ee/app/models/ee/lfs_object.rb
@@ -0,0 +1,23 @@
+module EE
+ # LFS Object EE mixin
+ #
+ # This module is intended to encapsulate EE-specific model logic
+ # and be prepended in the `LfsObject` model
+ module LfsObject
+ extend ActiveSupport::Concern
+
+ prepended do
+ after_destroy :log_geo_event
+ end
+
+ def local_store?
+ [nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
+ end
+
+ private
+
+ def log_geo_event
+ ::Geo::LfsObjectDeletedEventStore.new(self).create
+ end
+ end
+end
diff --git a/ee/app/models/geo/fdw/ci/job_artifact.rb b/ee/app/models/geo/fdw/ci/job_artifact.rb
new file mode 100644
index 00000000000..eaca84b332e
--- /dev/null
+++ b/ee/app/models/geo/fdw/ci/job_artifact.rb
@@ -0,0 +1,11 @@
+module Geo
+ module Fdw
+ module Ci
+ class JobArtifact < ::Geo::BaseFdw
+ self.table_name = Gitlab::Geo.fdw_table('ci_job_artifacts')
+
+ scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::Store::LOCAL]) }
+ end
+ end
+ end
+end
diff --git a/ee/app/models/geo/fdw/lfs_object.rb b/ee/app/models/geo/fdw/lfs_object.rb
new file mode 100644
index 00000000000..18aae28518d
--- /dev/null
+++ b/ee/app/models/geo/fdw/lfs_object.rb
@@ -0,0 +1,9 @@
+module Geo
+ module Fdw
+ class LfsObject < ::Geo::BaseFdw
+ self.table_name = Gitlab::Geo.fdw_table('lfs_objects')
+
+ scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
+ end
+ end
+end
diff --git a/ee/app/services/geo/files_expire_service.rb b/ee/app/services/geo/files_expire_service.rb
new file mode 100644
index 00000000000..e3604674d85
--- /dev/null
+++ b/ee/app/services/geo/files_expire_service.rb
@@ -0,0 +1,77 @@
+module Geo
+ class FilesExpireService
+ include ::Gitlab::Geo::LogHelpers
+
+ BATCH_SIZE = 500
+
+ attr_reader :project, :old_full_path
+
+ def initialize(project, old_full_path)
+ @project = project
+ @old_full_path = old_full_path
+ end
+
+ # Expire already replicated uploads
+ #
+ # This is a fallback solution to support projects that haven't rolled out to hashed-storage yet.
+ #
+ # Note: Unless we add some locking mechanism, this will be best effort only
+ # as if there are files that are being replicated during this execution, they will not
+ # be expired.
+ #
+ # The long-term solution is to use hashed storage.
+ def execute
+ return unless Gitlab::Geo.secondary?
+
+ uploads = finder.find_project_uploads(project)
+ log_info("Expiring replicated attachments after project rename", count: uploads.count)
+
+ schedule_file_removal(uploads)
+ mark_for_resync!
+ end
+
+ # Project's base directory for attachments storage
+ #
+ # @return base directory where all uploads for the project are stored
+ def base_dir
+ @base_dir ||= File.join(FileUploader.root, old_full_path)
+ end
+
+ private
+
+ def schedule_file_removal(uploads)
+ paths_to_remove = uploads.find_each(batch_size: BATCH_SIZE).each_with_object([]) do |upload, to_remove|
+ file_path = File.join(base_dir, upload.path)
+
+ if File.exist?(file_path)
+ to_remove << [file_path]
+
+ log_info("Scheduled to remove file", file_path: file_path)
+ end
+ end
+
+ Geo::FileRemovalWorker.bulk_perform_async(paths_to_remove)
+ end
+
+ def mark_for_resync!
+ finder.find_file_registries_uploads(project).delete_all
+ end
+
+ def finder
+ @finder ||= ::Geo::ExpireUploadsFinder.new
+ end
+
+ # This is called by LogHelpers to build json log with context info
+ #
+ # @see ::Gitlab::Geo::LogHelpers
+ def base_log_data(message)
+ {
+ class: self.class.name,
+ project_id: project.id,
+ project_path: project.full_path,
+ project_old_path: old_full_path,
+ message: message
+ }
+ end
+ end
+end
diff --git a/ee/app/services/geo/hashed_storage_attachments_migration_service.rb b/ee/app/services/geo/hashed_storage_attachments_migration_service.rb
new file mode 100644
index 00000000000..d967d8f6d5e
--- /dev/null
+++ b/ee/app/services/geo/hashed_storage_attachments_migration_service.rb
@@ -0,0 +1,55 @@
+module Geo
+ AttachmentMigrationError = Class.new(StandardError)
+
+ class HashedStorageAttachmentsMigrationService
+ include ::Gitlab::Geo::LogHelpers
+
+ attr_reader :project_id, :old_attachments_path, :new_attachments_path
+
+ def initialize(project_id, old_attachments_path:, new_attachments_path:)
+ @project_id = project_id
+ @old_attachments_path = old_attachments_path
+ @new_attachments_path = new_attachments_path
+ end
+
+ def async_execute
+ Geo::HashedStorageAttachmentsMigrationWorker.perform_async(
+ project_id,
+ old_attachments_path,
+ new_attachments_path
+ )
+ end
+
+ def execute
+ origin = File.join(FileUploader.root, old_attachments_path)
+ target = File.join(FileUploader.root, new_attachments_path)
+ move_folder!(origin, target)
+ end
+
+ private
+
+ def project
+ @project ||= Project.find(project_id)
+ end
+
+ def move_folder!(old_path, new_path)
+ unless File.directory?(old_path)
+ log_info("Skipped attachments migration to Hashed Storage, source path doesn't exist or is not a directory", project_id: project.id, source: old_path, target: new_path)
+ return
+ end
+
+ if File.exist?(new_path)
+ log_error("Cannot migrate attachments to Hashed Storage, target path already exist", project_id: project.id, source: old_path, target: new_path)
+ raise AttachmentMigrationError, "Target path '#{new_path}' already exist"
+ end
+
+ # Create hashed storage base path folder
+ FileUtils.mkdir_p(File.dirname(new_path))
+
+ FileUtils.mv(old_path, new_path)
+ log_info("Migrated project attachments to Hashed Storage", project_id: project.id, source: old_path, target: new_path)
+
+ true
+ end
+ end
+end
diff --git a/ee/app/services/geo/job_artifact_deleted_event_store.rb b/ee/app/services/geo/job_artifact_deleted_event_store.rb
new file mode 100644
index 00000000000..7455773985c
--- /dev/null
+++ b/ee/app/services/geo/job_artifact_deleted_event_store.rb
@@ -0,0 +1,48 @@
+module Geo
+ class JobArtifactDeletedEventStore < EventStore
+ self.event_type = :job_artifact_deleted_event
+
+ attr_reader :job_artifact
+
+ def initialize(job_artifact)
+ @job_artifact = job_artifact
+ end
+
+ def create
+ return unless job_artifact.local_store?
+
+ super
+ end
+
+ private
+
+ def build_event
+ Geo::JobArtifactDeletedEvent.new(
+ job_artifact: job_artifact,
+ file_path: relative_file_path
+ )
+ end
+
+ def local_store_path
+ Pathname.new(JobArtifactUploader.root)
+ end
+
+ def relative_file_path
+ return unless job_artifact.file.present?
+
+ Pathname.new(job_artifact.file.path).relative_path_from(local_store_path)
+ end
+
+ # This is called by ProjectLogHelpers to build json log with context info
+ #
+ # @see ::Gitlab::Geo::ProjectLogHelpers
+ def base_log_data(message)
+ {
+ class: self.class.name,
+ job_artifact_id: job_artifact.id,
+ file_path: job_artifact.file.path,
+ message: message
+ }
+ end
+ end
+end
diff --git a/ee/app/services/geo/lfs_object_deleted_event_store.rb b/ee/app/services/geo/lfs_object_deleted_event_store.rb
new file mode 100644
index 00000000000..9eb47f91472
--- /dev/null
+++ b/ee/app/services/geo/lfs_object_deleted_event_store.rb
@@ -0,0 +1,49 @@
+module Geo
+ class LfsObjectDeletedEventStore < EventStore
+ self.event_type = :lfs_object_deleted_event
+
+ attr_reader :lfs_object
+
+ def initialize(lfs_object)
+ @lfs_object = lfs_object
+ end
+
+ def create
+ return unless lfs_object.local_store?
+
+ super
+ end
+
+ private
+
+ def build_event
+ Geo::LfsObjectDeletedEvent.new(
+ lfs_object: lfs_object,
+ oid: lfs_object.oid,
+ file_path: relative_file_path
+ )
+ end
+
+ def local_store_path
+ Pathname.new(LfsObjectUploader.root)
+ end
+
+ def relative_file_path
+ return unless lfs_object.file.present?
+
+ Pathname.new(lfs_object.file.path).relative_path_from(local_store_path)
+ end
+
+ # This is called by ProjectLogHelpers to build json log with context info
+ #
+ # @see ::Gitlab::Geo::ProjectLogHelpers
+ def base_log_data(message)
+ {
+ class: self.class.name,
+ lfs_object_id: lfs_object.id,
+ file_path: lfs_object.file.path,
+ message: message
+ }
+ end
+ end
+end
diff --git a/ee/app/uploaders/object_storage.rb b/ee/app/uploaders/object_storage.rb
new file mode 100644
index 00000000000..e5b087524f5
--- /dev/null
+++ b/ee/app/uploaders/object_storage.rb
@@ -0,0 +1,265 @@
+require 'fog/aws'
+require 'carrierwave/storage/fog'
+
+#
+# This concern should add object storage support
+# to the GitlabUploader class
+#
+module ObjectStorage
+ RemoteStoreError = Class.new(StandardError)
+ UnknownStoreError = Class.new(StandardError)
+ ObjectStoreUnavailable = Class.new(StandardError)
+
+ module Store
+ LOCAL = 1
+ REMOTE = 2
+ end
+
+ module Extension
+ # this extension is the glue between the ObjectStorage::Concern and RecordsUploads::Concern
+ module RecordsUploads
+ extend ActiveSupport::Concern
+
+ prepended do |base|
+ raise ObjectStoreUnavailable, "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
+
+ base.include(::RecordsUploads::Concern)
+ end
+
+ def retrieve_from_store!(identifier)
+ paths = store_dirs.map { |store, path| File.join(path, identifier) }
+
+ unless current_upload_satisfies?(paths, model)
+ # the upload we already have isn't right, find the correct one
+ self.upload = uploads.find_by(model: model, path: paths)
+ end
+
+ super
+ end
+
+ def build_upload_from_uploader(uploader)
+ super.tap { |upload| upload.store = object_store }
+ end
+
+ def upload=(upload)
+ return unless upload
+
+ self.object_store = upload.store
+ super
+ end
+
+ private
+
+ def current_upload_satisfies?(paths, model)
+ return false unless upload
+ return false unless model
+
+ paths.include?(upload.path) &&
+ upload.model_id == model.id &&
+ upload.model_type == model.class.base_class.sti_name
+ end
+ end
+ end
+
+ module Concern
+ extend ActiveSupport::Concern
+
+ included do |base|
+ base.include(ObjectStorage)
+
+ before :store, :verify_license!
+ after :migrate, :delete_migrated_file
+ end
+
+ class_methods do
+ def object_store_options
+ options.object_store
+ end
+
+ def object_store_enabled?
+ object_store_options.enabled
+ end
+
+ def background_upload_enabled?
+ object_store_options.background_upload
+ end
+
+ def object_store_credentials
+ object_store_options.connection.to_hash.deep_symbolize_keys
+ end
+
+ def remote_store_path
+ object_store_options.remote_directory
+ end
+
+ def licensed?
+ License.feature_available?(:object_storage)
+ end
+ end
+
+ def file_storage?
+ storage.is_a?(CarrierWave::Storage::File)
+ end
+
+ def file_cache_storage?
+ cache_storage.is_a?(CarrierWave::Storage::File)
+ end
+
+ def object_store
+ @object_store ||= model.try(store_serialization_column) || Store::LOCAL
+ end
+
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def object_store=(value)
+ @object_store = value || Store::LOCAL
+ @storage = storage_for(object_store)
+ end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
+
+ # Return true if the current file is part or the model (i.e. is mounted in the model)
+ #
+ def persist_object_store?
+ model.respond_to?(:"#{store_serialization_column}=")
+ end
+
+ # Save the current @object_store to the model <mounted_as>_store column
+ def persist_object_store!
+ return unless persist_object_store?
+
+ updated = model.update_column(store_serialization_column, object_store)
+ raise ActiveRecordError unless updated
+ end
+
+ def use_file
+ if file_storage?
+ return yield path
+ end
+
+ begin
+ cache_stored_file!
+ yield cache_path
+ ensure
+ cache_storage.delete_dir!(cache_path(nil))
+ end
+ end
+
+ def filename
+ super || file&.filename
+ end
+
+ #
+ # Move the file to another store
+ #
+ # new_store: Enum (Store::LOCAL, Store::REMOTE)
+ #
+ def migrate!(new_store)
+ return unless object_store != new_store
+ return unless file
+
+ new_file = nil
+ file_to_delete = file
+ from_object_store = object_store
+ self.object_store = new_store # changes the storage and file
+
+ cache_stored_file! if file_storage?
+
+ with_callbacks(:migrate, file_to_delete) do
+ with_callbacks(:store, file_to_delete) do # for #store_versions!
+ new_file = storage.store!(file)
+ persist_object_store!
+ self.file = new_file
+ end
+ end
+
+ file
+ rescue => e
+ # in case of failure delete new file
+ new_file.delete unless new_file.nil?
+ # revert back to the old file
+ self.object_store = from_object_store
+ self.file = file_to_delete
+ raise e
+ end
+
+ def schedule_migration_to_object_storage(*args)
+ return unless self.class.object_store_enabled?
+ return unless self.class.background_upload_enabled?
+ return unless self.class.licensed?
+ return unless self.file_storage?
+
+ ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
+ end
+
+ def fog_directory
+ self.class.remote_store_path
+ end
+
+ def fog_credentials
+ self.class.object_store_credentials
+ end
+
+ def fog_public
+ false
+ end
+
+ def delete_migrated_file(migrated_file)
+ migrated_file.delete if exists?
+ end
+
+ def verify_license!(_file)
+ return if file_storage?
+
+ raise 'Object Storage feature is missing' unless self.class.licensed?
+ end
+
+ def exists?
+ file.present?
+ end
+
+ def store_dir(store = nil)
+ store_dirs[store || object_store]
+ end
+
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join(dynamic_segment)
+ }
+ end
+
+ private
+
+ # this is a hack around CarrierWave. The #migrate method needs to be
+ # able to force the current file to the migrated file upon success.
+ def file=(file)
+ @file = file # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+
+ def serialization_column
+ model.class.uploader_options.dig(mounted_as, :mount_on) || mounted_as
+ end
+
+ # Returns the column where the 'store' is saved
+ # defaults to 'store'
+ def store_serialization_column
+ [serialization_column, 'store'].compact.join('_').to_sym
+ end
+
+ def storage
+ @storage ||= storage_for(object_store)
+ end
+
+ def storage_for(store)
+ case store
+ when Store::REMOTE
+ raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
+
+ CarrierWave::Storage::Fog.new(self)
+ when Store::LOCAL
+ CarrierWave::Storage::File.new(self)
+ else
+ raise UnknownStoreError
+ end
+ end
+ end
+end
diff --git a/ee/lib/gitlab/geo/file_transfer.rb b/ee/lib/gitlab/geo/file_transfer.rb
new file mode 100644
index 00000000000..16db6f2d448
--- /dev/null
+++ b/ee/lib/gitlab/geo/file_transfer.rb
@@ -0,0 +1,24 @@
+module Gitlab
+ module Geo
+ class FileTransfer < Transfer
+ def initialize(file_type, upload)
+ @file_type = file_type
+ @file_id = upload.id
+ @filename = upload.absolute_path
+ @request_data = build_request_data(upload)
+ rescue ObjectStorage::RemoteStoreError
+ Rails.logger.warn "Cannot transfer a remote object."
+ end
+
+ private
+
+ def build_request_data(upload)
+ {
+ id: upload.model_id,
+ type: upload.model_type,
+ checksum: upload.checksum
+ }
+ end
+ end
+ end
+end
diff --git a/ee/lib/gitlab/geo/log_cursor/daemon.rb b/ee/lib/gitlab/geo/log_cursor/daemon.rb
new file mode 100644
index 00000000000..d4596286641
--- /dev/null
+++ b/ee/lib/gitlab/geo/log_cursor/daemon.rb
@@ -0,0 +1,266 @@
+module Gitlab
+ module Geo
+ module LogCursor
+ class Daemon
+ VERSION = '0.2.0'.freeze
+ BATCH_SIZE = 250
+ SECONDARY_CHECK_INTERVAL = 1.minute
+
+ attr_reader :options
+
+ def initialize(options = {})
+ @options = options
+ @exit = false
+ logger.geo_logger.build.level = options[:debug] ? :debug : Rails.logger.level
+ end
+
+ def run!
+ trap_signals
+
+ until exit?
+ # Prevent the node from processing events unless it's a secondary
+ unless Geo.secondary?
+ sleep(SECONDARY_CHECK_INTERVAL)
+ next
+ end
+
+ lease = Lease.try_obtain_with_ttl { run_once! }
+
+ return if exit?
+
+ # When no new event is found sleep for a few moments
+ arbitrary_sleep(lease[:ttl])
+ end
+ end
+
+ def run_once!
+ LogCursor::Events.fetch_in_batches { |batch| handle_events(batch) }
+ end
+
+ def handle_events(batch)
+ batch.each do |event_log|
+ next unless can_replay?(event_log)
+
+ begin
+ event = event_log.event
+ handler = "handle_#{event.class.name.demodulize.underscore}"
+
+ __send__(handler, event, event_log.created_at) # rubocop:disable GitlabSecurity/PublicSend
+ rescue NoMethodError => e
+ logger.error(e.message)
+ raise e
+ end
+ end
+ end
+
+ private
+
+ def trap_signals
+ trap(:TERM) do
+ quit!
+ end
+ trap(:INT) do
+ quit!
+ end
+ end
+
+ # Safe shutdown
+ def quit!
+ $stdout.puts 'Exiting...'
+
+ @exit = true
+ end
+
+ def exit?
+ @exit
+ end
+
+ def can_replay?(event_log)
+ return true if event_log.project_id.nil?
+
+ Gitlab::Geo.current_node&.projects_include?(event_log.project_id)
+ end
+
+ def handle_repository_created_event(event, created_at)
+ registry = find_or_initialize_registry(event.project_id, resync_repository: true, resync_wiki: event.wiki_path.present?)
+
+ logger.event_info(
+ created_at,
+ message: 'Repository created',
+ project_id: event.project_id,
+ repo_path: event.repo_path,
+ wiki_path: event.wiki_path,
+ resync_repository: registry.resync_repository,
+ resync_wiki: registry.resync_wiki)
+
+ registry.save!
+
+ ::Geo::ProjectSyncWorker.perform_async(event.project_id, Time.now)
+ end
+
+ def handle_repository_updated_event(event, created_at)
+ registry = find_or_initialize_registry(event.project_id, "resync_#{event.source}" => true)
+
+ logger.event_info(
+ created_at,
+ message: 'Repository update',
+ project_id: event.project_id,
+ source: event.source,
+ resync_repository: registry.resync_repository,
+ resync_wiki: registry.resync_wiki)
+
+ registry.save!
+
+ ::Geo::ProjectSyncWorker.perform_async(event.project_id, Time.now)
+ end
+
+ def handle_repository_deleted_event(event, created_at)
+ job_id = ::Geo::RepositoryDestroyService
+ .new(event.project_id, event.deleted_project_name, event.deleted_path, event.repository_storage_name)
+ .async_execute
+
+ logger.event_info(
+ created_at,
+ message: 'Deleted project',
+ project_id: event.project_id,
+ repository_storage_name: event.repository_storage_name,
+ disk_path: event.deleted_path,
+ job_id: job_id)
+
+ # No need to create a project entry if it doesn't exist
+ ::Geo::ProjectRegistry.where(project_id: event.project_id).delete_all
+ end
+
+ def handle_repositories_changed_event(event, created_at)
+ return unless Gitlab::Geo.current_node.id == event.geo_node_id
+
+ job_id = ::Geo::RepositoriesCleanUpWorker.perform_in(1.hour, event.geo_node_id)
+
+ if job_id
+ logger.info('Scheduled repositories clean up for Geo node', geo_node_id: event.geo_node_id, job_id: job_id)
+ else
+ logger.error('Could not schedule repositories clean up for Geo node', geo_node_id: event.geo_node_id)
+ end
+ end
+
+ def handle_repository_renamed_event(event, created_at)
+ return unless event.project_id
+
+ old_path = event.old_path_with_namespace
+ new_path = event.new_path_with_namespace
+
+ job_id = ::Geo::RenameRepositoryService
+ .new(event.project_id, old_path, new_path)
+ .async_execute
+
+ logger.event_info(
+ created_at,
+ message: 'Renaming project',
+ project_id: event.project_id,
+ old_path: old_path,
+ new_path: new_path,
+ job_id: job_id)
+ end
+
+ def handle_hashed_storage_migrated_event(event, created_at)
+ return unless event.project_id
+
+ job_id = ::Geo::HashedStorageMigrationService.new(
+ event.project_id,
+ old_disk_path: event.old_disk_path,
+ new_disk_path: event.new_disk_path,
+ old_storage_version: event.old_storage_version
+ ).async_execute
+
+ logger.event_info(
+ created_at,
+ message: 'Migrating project to hashed storage',
+ project_id: event.project_id,
+ old_storage_version: event.old_storage_version,
+ new_storage_version: event.new_storage_version,
+ old_disk_path: event.old_disk_path,
+ new_disk_path: event.new_disk_path,
+ job_id: job_id)
+ end
+
+ def handle_hashed_storage_attachments_event(event, created_at)
+ job_id = ::Geo::HashedStorageAttachmentsMigrationService.new(
+ event.project_id,
+ old_attachments_path: event.old_attachments_path,
+ new_attachments_path: event.new_attachments_path
+ ).async_execute
+
+ logger.event_info(
+ created_at,
+ message: 'Migrating attachments to hashed storage',
+ project_id: event.project_id,
+ old_attachments_path: event.old_attachments_path,
+ new_attachments_path: event.new_attachments_path,
+ job_id: job_id
+ )
+ end
+
+ def handle_lfs_object_deleted_event(event, created_at)
+ file_path = File.join(LfsObjectUploader.root, event.file_path)
+
+ job_id = ::Geo::FileRemovalWorker.perform_async(file_path)
+
+ logger.event_info(
+ created_at,
+ message: 'Deleted LFS object',
+ oid: event.oid,
+ file_id: event.lfs_object_id,
+ file_path: file_path,
+ job_id: job_id)
+
+ ::Geo::FileRegistry.lfs_objects.where(file_id: event.lfs_object_id).delete_all
+ end
+
+ def handle_job_artifact_deleted_event(event, created_at)
+ file_registry_job_artifacts = ::Geo::FileRegistry.job_artifacts.where(file_id: event.job_artifact_id)
+ return unless file_registry_job_artifacts.any? # avoid race condition
+
+ file_path = File.join(::JobArtifactUploader.root, event.file_path)
+
+ if File.file?(file_path)
+ deleted = delete_file(file_path) # delete synchronously to ensure consistency
+ return unless deleted # do not delete file from registry if deletion failed
+ end
+
+ logger.event_info(
+ created_at,
+ message: 'Deleted job artifact',
+ file_id: event.job_artifact_id,
+ file_path: file_path)
+
+ file_registry_job_artifacts.delete_all
+ end
+
+ def find_or_initialize_registry(project_id, attrs)
+ registry = ::Geo::ProjectRegistry.find_or_initialize_by(project_id: project_id)
+ registry.assign_attributes(attrs)
+ registry
+ end
+
+ def delete_file(path)
+ File.delete(path)
+ rescue => ex
+ logger.error("Failed to remove file", exception: ex.class.name, details: ex.message, filename: path)
+ false
+ end
+
+ # Sleeps for the expired TTL that remains on the lease plus some random seconds.
+ #
+ # This allows multiple GeoLogCursors to randomly process a batch of events,
+ # without favouring the shortest path (or latency).
+ def arbitrary_sleep(delay)
+ sleep(delay + rand(1..20) * 0.1)
+ end
+
+ def logger
+ Gitlab::Geo::LogCursor::Logger
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index 80feb629d54..1f80646a2ea 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -215,9 +215,9 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
- artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
- artifacts = uploaded_file(:file, artifacts_upload_path)
- metadata = uploaded_file(:metadata, artifacts_upload_path)
+ workhorse_upload_path = JobArtifactUploader.workhorse_upload_path
+ artifacts = uploaded_file(:file, workhorse_upload_path)
+ metadata = uploaded_file(:metadata, workhorse_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb
index 7a582a20056..4383124d150 100644
--- a/lib/backup/artifacts.rb
+++ b/lib/backup/artifacts.rb
@@ -3,7 +3,7 @@ require 'backup/files'
module Backup
class Artifacts < Files
def initialize
- super('artifacts', LegacyArtifactUploader.local_store_path)
+ super('artifacts', JobArtifactUploader.root)
end
def create_files_dir
diff --git a/lib/gitlab/background_migration/populate_untracked_uploads.rb b/lib/gitlab/background_migration/populate_untracked_uploads.rb
index 81e95e5832d..759bdeb4bb3 100644
--- a/lib/gitlab/background_migration/populate_untracked_uploads.rb
+++ b/lib/gitlab/background_migration/populate_untracked_uploads.rb
@@ -143,7 +143,7 @@ module Gitlab
end
def absolute_path
- File.join(CarrierWave.root, path)
+ File.join(Gitlab.config.uploads.storage_path, path)
end
end
diff --git a/lib/gitlab/background_migration/prepare_untracked_uploads.rb b/lib/gitlab/background_migration/prepare_untracked_uploads.rb
index 476c46341ae..8d126a34dff 100644
--- a/lib/gitlab/background_migration/prepare_untracked_uploads.rb
+++ b/lib/gitlab/background_migration/prepare_untracked_uploads.rb
@@ -10,9 +10,12 @@ module Gitlab
FIND_BATCH_SIZE = 500
RELATIVE_UPLOAD_DIR = "uploads".freeze
- ABSOLUTE_UPLOAD_DIR = "#{CarrierWave.root}/#{RELATIVE_UPLOAD_DIR}".freeze
+ ABSOLUTE_UPLOAD_DIR = File.join(
+ Gitlab.config.uploads.storage_path,
+ RELATIVE_UPLOAD_DIR
+ )
FOLLOW_UP_MIGRATION = 'PopulateUntrackedUploads'.freeze
- START_WITH_CARRIERWAVE_ROOT_REGEX = %r{\A#{CarrierWave.root}/}
+ START_WITH_ROOT_REGEX = %r{\A#{Gitlab.config.uploads.storage_path}/}
EXCLUDED_HASHED_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/@hashed/*".freeze
EXCLUDED_TMP_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/tmp/*".freeze
@@ -80,7 +83,7 @@ module Gitlab
paths = []
stdout.each_line("\0") do |line|
- paths << line.chomp("\0").sub(START_WITH_CARRIERWAVE_ROOT_REGEX, '')
+ paths << line.chomp("\0").sub(START_WITH_ROOT_REGEX, '')
if paths.size >= batch_size
yield(paths)
diff --git a/lib/gitlab/gfm/uploads_rewriter.rb b/lib/gitlab/gfm/uploads_rewriter.rb
index 8fab5489616..3fdc3c27f73 100644
--- a/lib/gitlab/gfm/uploads_rewriter.rb
+++ b/lib/gitlab/gfm/uploads_rewriter.rb
@@ -27,7 +27,7 @@ module Gitlab
with_link_in_tmp_dir(file.file) do |open_tmp_file|
new_uploader.store!(open_tmp_file)
end
- new_uploader.to_markdown
+ new_uploader.markdown_link
end
end
diff --git a/lib/gitlab/import_export/uploads_saver.rb b/lib/gitlab/import_export/uploads_saver.rb
index 627a487d577..2f08dda55fd 100644
--- a/lib/gitlab/import_export/uploads_saver.rb
+++ b/lib/gitlab/import_export/uploads_saver.rb
@@ -17,15 +17,13 @@ module Gitlab
false
end
- private
+ def uploads_path
+ FileUploader.absolute_base_dir(@project)
+ end
def uploads_export_path
File.join(@shared.export_path, 'uploads')
end
-
- def uploads_path
- FileUploader.dynamic_path_segment(@project)
- end
end
end
end
diff --git a/lib/gitlab/uploads_transfer.rb b/lib/gitlab/uploads_transfer.rb
index b5f41240529..7d7400bdabf 100644
--- a/lib/gitlab/uploads_transfer.rb
+++ b/lib/gitlab/uploads_transfer.rb
@@ -1,7 +1,7 @@
module Gitlab
class UploadsTransfer < ProjectTransfer
def root_dir
- File.join(CarrierWave.root, FileUploader.base_dir)
+ FileUploader.root
end
end
end
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index 5ab6cd5a4ef..dfe8acd4833 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -51,14 +51,14 @@ module Gitlab
def lfs_upload_ok(oid, size)
{
- StoreLFSPath: "#{Gitlab.config.lfs.storage_path}/tmp/upload",
+ StoreLFSPath: LfsObjectUploader.workhorse_upload_path,
LfsOid: oid,
LfsSize: size
}
end
def artifact_upload_ok
- { TempPath: JobArtifactUploader.artifacts_upload_path }
+ { TempPath: JobArtifactUploader.workhorse_upload_path }
end
def send_git_blob(repository, blob)
diff --git a/lib/tasks/gitlab/artifacts.rake b/lib/tasks/gitlab/artifacts.rake
index 494317d99c7..bfca4bfb3f7 100644
--- a/lib/tasks/gitlab/artifacts.rake
+++ b/lib/tasks/gitlab/artifacts.rake
@@ -12,8 +12,8 @@ namespace :gitlab do
.with_artifacts_stored_locally
.find_each(batch_size: 10) do |build|
begin
- build.artifacts_file.migrate!(ObjectStoreUploader::REMOTE_STORE)
- build.artifacts_metadata.migrate!(ObjectStoreUploader::REMOTE_STORE)
+ build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
+ build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
rescue => e
diff --git a/lib/tasks/gitlab/lfs.rake b/lib/tasks/gitlab/lfs.rake
index c17c05f8589..a45e5ca91e0 100644
--- a/lib/tasks/gitlab/lfs.rake
+++ b/lib/tasks/gitlab/lfs.rake
@@ -10,7 +10,7 @@ namespace :gitlab do
LfsObject.with_files_stored_locally
.find_each(batch_size: 10) do |lfs_object|
begin
- lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
rescue => e
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
index 67a11e56e94..6a1869d1a48 100644
--- a/spec/controllers/groups/uploads_controller_spec.rb
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -6,5 +6,7 @@ describe Groups::UploadsController do
{ group_id: model }
end
- it_behaves_like 'handle uploads'
+ it_behaves_like 'handle uploads' do
+ let(:uploader_class) { NamespaceFileUploader }
+ end
end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 46d618fa682..4ea6f869aa3 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -145,8 +145,8 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do
it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
- let(:store) { ObjectStoreUploader::LOCAL_STORE }
- let(:archive_path) { JobArtifactUploader.local_store_path }
+ let(:store) { ObjectStorage::Store::LOCAL }
+ let(:archive_path) { JobArtifactUploader.root }
end
end
@@ -158,7 +158,7 @@ describe Projects::ArtifactsController do
it_behaves_like 'a valid file' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
- let(:store) { ObjectStoreUploader::REMOTE_STORE }
+ let(:store) { ObjectStorage::Store::REMOTE }
let(:archive_path) { 'https://' }
end
end
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index e4310a4847b..08e2ccf893a 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -47,7 +47,7 @@ describe Projects::RawController do
end
it 'serves the file' do
- expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
+ expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
@@ -58,7 +58,7 @@ describe Projects::RawController do
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
stub_lfs_object_storage
- lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect to file' do
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index b1f601a19e5..376b229ffc9 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -180,6 +180,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png'
+
response
end
end
@@ -196,6 +197,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png'
+
response
end
end
@@ -220,6 +222,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
+
response
end
end
@@ -239,6 +242,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
+
response
end
end
@@ -291,6 +295,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
+
response
end
end
@@ -322,6 +327,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
+
response
end
end
@@ -341,6 +347,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
+
response
end
end
@@ -384,6 +391,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
+
response
end
end
@@ -420,6 +428,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
+
response
end
end
@@ -439,6 +448,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
+
response
end
end
@@ -491,6 +501,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
+
response
end
end
@@ -522,6 +533,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png'
+
response
end
end
@@ -541,6 +553,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png'
+
response
end
end
diff --git a/spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb b/spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb
new file mode 100644
index 00000000000..9f0f5f2ab87
--- /dev/null
+++ b/spec/ee/spec/finders/geo/attachment_registry_finder_spec.rb
@@ -0,0 +1,270 @@
+require 'spec_helper'
+
+describe Geo::AttachmentRegistryFinder, :geo do
+ include ::EE::GeoHelpers
+
+ let(:secondary) { create(:geo_node) }
+
+ let(:synced_group) { create(:group) }
+ let(:synced_subgroup) { create(:group, parent: synced_group) }
+ let(:unsynced_group) { create(:group) }
+ let(:synced_project) { create(:project, group: synced_group) }
+ let(:unsynced_project) { create(:project, group: unsynced_group, repository_storage: 'broken') }
+
+ let!(:upload_1) { create(:upload, model: synced_group) }
+ let!(:upload_2) { create(:upload, model: unsynced_group) }
+ let!(:upload_3) { create(:upload, :issuable_upload, model: synced_project) }
+ let!(:upload_4) { create(:upload, model: unsynced_project) }
+ let(:upload_5) { create(:upload, model: synced_project) }
+ let(:upload_6) { create(:upload, :personal_snippet_upload) }
+ let(:upload_7) { create(:upload, model: synced_subgroup) }
+ let(:lfs_object) { create(:lfs_object) }
+
+ subject { described_class.new(current_node: secondary) }
+
+ before do
+ stub_current_geo_node(secondary)
+ end
+
+ # Disable transactions via :delete method because a foreign table
+ # can't see changes inside a transaction of a different connection.
+ context 'FDW', :delete do
+ before do
+ skip('FDW is not configured') if Gitlab::Database.postgresql? && !Gitlab::Geo.fdw?
+ end
+
+ describe '#find_synced_attachments' do
+ it 'delegates to #fdw_find_synced_attachments' do
+ expect(subject).to receive(:fdw_find_synced_attachments).and_call_original
+
+ subject.find_synced_attachments
+ end
+
+ it 'returns synced avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+ synced_attachments = subject.find_synced_attachments
+
+ expect(synced_attachments.pluck(:id)).to match_array([upload_1.id, upload_2.id, upload_6.id, upload_7.id])
+ end
+
+ context 'with selective sync' do
+ it 'falls back to legacy queries' do
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+ expect(subject).to receive(:legacy_find_synced_attachments)
+
+ subject.find_synced_attachments
+ end
+ end
+ end
+
+ describe '#find_failed_attachments' do
+ it 'delegates to #fdw_find_failed_attachments' do
+ expect(subject).to receive(:fdw_find_failed_attachments).and_call_original
+
+ subject.find_failed_attachments
+ end
+
+ it 'returns failed avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+ failed_attachments = subject.find_failed_attachments
+
+ expect(failed_attachments.pluck(:id)).to match_array([upload_3.id, upload_6.id, upload_7.id])
+ end
+
+ context 'with selective sync' do
+ it 'falls back to legacy queries' do
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+ expect(subject).to receive(:legacy_find_failed_attachments)
+
+ subject.find_failed_attachments
+ end
+ end
+ end
+
+ describe '#find_unsynced_attachments' do
+ it 'delegates to #fdw_find_unsynced_attachments' do
+ expect(subject).to receive(:fdw_find_unsynced_attachments).and_call_original
+
+ subject.find_unsynced_attachments(batch_size: 10)
+ end
+
+ it 'returns uploads without an entry on the tracking database' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+ uploads = subject.find_unsynced_attachments(batch_size: 10)
+
+ expect(uploads.map(&:id)).to match_array([upload_2.id, upload_3.id, upload_4.id])
+ end
+
+ it 'excludes uploads without an entry on the tracking database' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+ uploads = subject.find_unsynced_attachments(batch_size: 10, except_registry_ids: [upload_2.id])
+
+ expect(uploads.map(&:id)).to match_array([upload_3.id, upload_4.id])
+ end
+ end
+ end
+
+ context 'Legacy' do
+ before do
+ allow(Gitlab::Geo).to receive(:fdw?).and_return(false)
+ end
+
+ describe '#find_synced_attachments' do
+ it 'delegates to #legacy_find_synced_attachments' do
+ expect(subject).to receive(:legacy_find_synced_attachments).and_call_original
+
+ subject.find_synced_attachments
+ end
+
+ it 'returns synced avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+ synced_attachments = subject.find_synced_attachments
+
+ expect(synced_attachments).to match_array([upload_1, upload_2, upload_6, upload_7])
+ end
+
+ context 'with selective sync by namespace' do
+ it 'returns synced avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id)
+ create(:geo_file_registry, :avatar, file_id: upload_4.id)
+ create(:geo_file_registry, :avatar, file_id: upload_5.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+ synced_attachments = subject.find_synced_attachments
+
+ expect(synced_attachments).to match_array([upload_1, upload_3, upload_6, upload_7])
+ end
+ end
+
+ context 'with selective sync by shard' do
+ it 'returns synced avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id)
+ create(:geo_file_registry, :avatar, file_id: upload_4.id)
+ create(:geo_file_registry, :avatar, file_id: upload_5.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+ secondary.update!(selective_sync_type: 'shards', selective_sync_shards: ['default'])
+
+ synced_attachments = subject.find_synced_attachments
+
+ expect(synced_attachments).to match_array([upload_1, upload_3, upload_6])
+ end
+ end
+ end
+
+ describe '#find_failed_attachments' do
+ it 'delegates to #legacy_find_failed_attachments' do
+ expect(subject).to receive(:legacy_find_failed_attachments).and_call_original
+
+ subject.find_failed_attachments
+ end
+
+ it 'returns failed avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+ failed_attachments = subject.find_failed_attachments
+
+ expect(failed_attachments).to match_array([upload_3, upload_6, upload_7])
+ end
+
+ context 'with selective sync by namespace' do
+ it 'returns failed avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_4.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_5.id)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+
+ failed_attachments = subject.find_failed_attachments
+
+ expect(failed_attachments).to match_array([upload_1, upload_3, upload_6, upload_7])
+ end
+ end
+
+ context 'with selective sync by shard' do
+ it 'returns failed avatars, attachment, personal snippets and files' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_2.id)
+ create(:geo_file_registry, :avatar, file_id: upload_3.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_4.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_5.id)
+ create(:geo_file_registry, :avatar, file_id: upload_6.id, success: false)
+ create(:geo_file_registry, :avatar, file_id: upload_7.id, success: false)
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id, success: false)
+
+ secondary.update!(selective_sync_type: 'shards', selective_sync_shards: ['default'])
+
+ failed_attachments = subject.find_failed_attachments
+
+ expect(failed_attachments).to match_array([upload_1, upload_3, upload_6])
+ end
+ end
+ end
+
+ describe '#find_unsynced_attachments' do
+ it 'delegates to #legacy_find_unsynced_attachments' do
+ expect(subject).to receive(:legacy_find_unsynced_attachments).and_call_original
+
+ subject.find_unsynced_attachments(batch_size: 10)
+ end
+
+ it 'returns LFS objects without an entry on the tracking database' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+ uploads = subject.find_unsynced_attachments(batch_size: 10)
+
+ expect(uploads).to match_array([upload_2, upload_3, upload_4])
+ end
+
+ it 'excludes uploads without an entry on the tracking database' do
+ create(:geo_file_registry, :avatar, file_id: upload_1.id, success: true)
+
+ uploads = subject.find_unsynced_attachments(batch_size: 10, except_registry_ids: [upload_2.id])
+
+ expect(uploads).to match_array([upload_3, upload_4])
+ end
+ end
+ end
+end
diff --git a/spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb b/spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb
new file mode 100644
index 00000000000..4cb2a1ec08f
--- /dev/null
+++ b/spec/ee/spec/lib/gitlab/geo/file_transfer_spec.rb
@@ -0,0 +1,22 @@
+require 'spec_helper'
+
+describe Gitlab::Geo::FileTransfer do
+ let(:user) { create(:user, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+ let(:upload) { Upload.find_by(model: user, uploader: 'AvatarUploader') }
+
+ subject { described_class.new(:file, upload) }
+
+ describe '#execute' do
+ context 'user avatar' do
+ it 'sets an absolute path' do
+ expect(subject.file_type).to eq(:file)
+ expect(subject.file_id).to eq(upload.id)
+ expect(subject.filename).to eq(upload.absolute_path)
+ expect(Pathname.new(subject.filename).absolute?).to be_truthy
+ expect(subject.request_data).to eq({ id: upload.model_id,
+ type: 'User',
+ checksum: upload.checksum })
+ end
+ end
+ end
+end
diff --git a/spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb b/spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb
new file mode 100644
index 00000000000..af475a966a0
--- /dev/null
+++ b/spec/ee/spec/lib/gitlab/geo/log_cursor/daemon_spec.rb
@@ -0,0 +1,414 @@
+require 'spec_helper'
+
+describe Gitlab::Geo::LogCursor::Daemon, :postgresql, :clean_gitlab_redis_shared_state do
+ include ::EE::GeoHelpers
+
+ set(:primary) { create(:geo_node, :primary) }
+ set(:secondary) { create(:geo_node) }
+
+ let(:options) { {} }
+ subject(:daemon) { described_class.new(options) }
+
+ around do |example|
+ Sidekiq::Testing.fake! { example.run }
+ end
+
+ before do
+ stub_current_geo_node(secondary)
+
+ allow(daemon).to receive(:trap_signals)
+ allow(daemon).to receive(:arbitrary_sleep).and_return(0.1)
+ end
+
+ describe '#run!' do
+ it 'traps signals' do
+ is_expected.to receive(:exit?).and_return(true)
+ is_expected.to receive(:trap_signals)
+
+ daemon.run!
+ end
+
+ it 'delegates to #run_once! in a loop' do
+ is_expected.to receive(:exit?).and_return(false, false, false, true)
+ is_expected.to receive(:run_once!).twice
+
+ daemon.run!
+ end
+
+ it 'skips execution if cannot achieve a lease' do
+ is_expected.to receive(:exit?).and_return(false, true)
+ is_expected.not_to receive(:run_once!)
+ expect_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain_with_ttl).and_return({ ttl: 1, uuid: false })
+
+ daemon.run!
+ end
+
+ it 'skips execution if not a Geo node' do
+ stub_current_geo_node(nil)
+
+ is_expected.to receive(:exit?).and_return(false, true)
+ is_expected.to receive(:sleep).with(1.minute)
+ is_expected.not_to receive(:run_once!)
+
+ daemon.run!
+ end
+
+ it 'skips execution if the current node is a primary' do
+ stub_current_geo_node(primary)
+
+ is_expected.to receive(:exit?).and_return(false, true)
+ is_expected.to receive(:sleep).with(1.minute)
+ is_expected.not_to receive(:run_once!)
+
+ daemon.run!
+ end
+ end
+
+ describe '#run_once!' do
+ context 'when replaying a repository created event' do
+ let(:project) { create(:project) }
+ let(:repository_created_event) { create(:geo_repository_created_event, project: project) }
+ let(:event_log) { create(:geo_event_log, repository_created_event: repository_created_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+ it 'creates a new project registry' do
+ expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(1)
+ end
+
+ it 'sets resync attributes to true' do
+ daemon.run_once!
+
+ registry = Geo::ProjectRegistry.last
+
+ expect(registry).to have_attributes(project_id: project.id, resync_repository: true, resync_wiki: true)
+ end
+
+ it 'sets resync_wiki to false if wiki_path is nil' do
+ repository_created_event.update!(wiki_path: nil)
+
+ daemon.run_once!
+
+ registry = Geo::ProjectRegistry.last
+
+ expect(registry).to have_attributes(project_id: project.id, resync_repository: true, resync_wiki: false)
+ end
+
+ it 'performs Geo::ProjectSyncWorker' do
+ expect(Geo::ProjectSyncWorker).to receive(:perform_async)
+ .with(project.id, anything).once
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when replaying a repository updated event' do
+ let(:project) { create(:project) }
+ let(:repository_updated_event) { create(:geo_repository_updated_event, project: project) }
+ let(:event_log) { create(:geo_event_log, repository_updated_event: repository_updated_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+ it 'creates a new project registry if it does not exist' do
+ expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(1)
+ end
+
+ it 'sets resync_repository to true if event source is repository' do
+ repository_updated_event.update!(source: Geo::RepositoryUpdatedEvent::REPOSITORY)
+ registry = create(:geo_project_registry, :synced, project: repository_updated_event.project)
+
+ daemon.run_once!
+
+ expect(registry.reload.resync_repository).to be true
+ end
+
+ it 'sets resync_wiki to true if event source is wiki' do
+ repository_updated_event.update!(source: Geo::RepositoryUpdatedEvent::WIKI)
+ registry = create(:geo_project_registry, :synced, project: repository_updated_event.project)
+
+ daemon.run_once!
+
+ expect(registry.reload.resync_wiki).to be true
+ end
+
+ it 'performs Geo::ProjectSyncWorker' do
+ expect(Geo::ProjectSyncWorker).to receive(:perform_async)
+ .with(project.id, anything).once
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when replaying a repository deleted event' do
+ let(:event_log) { create(:geo_event_log, :deleted_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:repository_deleted_event) { event_log.repository_deleted_event }
+ let(:project) { repository_deleted_event.project }
+
+ it 'does not create a tracking database entry' do
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+
+ it 'schedules a GeoRepositoryDestroyWorker' do
+ project_id = repository_deleted_event.project_id
+ project_name = repository_deleted_event.deleted_project_name
+ project_path = repository_deleted_event.deleted_path
+
+ expect(::GeoRepositoryDestroyWorker).to receive(:perform_async)
+ .with(project_id, project_name, project_path, project.repository_storage)
+
+ daemon.run_once!
+ end
+
+ it 'removes the tracking database entry if exist' do
+ create(:geo_project_registry, :synced, project: project)
+
+ expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(-1)
+ end
+ end
+
+ context 'when replaying a repositories changed event' do
+ let(:repositories_changed_event) { create(:geo_repositories_changed_event, geo_node: secondary) }
+ let(:event_log) { create(:geo_event_log, repositories_changed_event: repositories_changed_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+ it 'schedules a GeoRepositoryDestroyWorker when event node is the current node' do
+ expect(Geo::RepositoriesCleanUpWorker).to receive(:perform_in).with(within(5.minutes).of(1.hour), secondary.id)
+
+ daemon.run_once!
+ end
+
+ it 'does not schedule a GeoRepositoryDestroyWorker when event node is not the current node' do
+ stub_current_geo_node(build(:geo_node))
+
+ expect(Geo::RepositoriesCleanUpWorker).not_to receive(:perform_in)
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when node has namespace restrictions' do
+ let(:group_1) { create(:group) }
+ let(:group_2) { create(:group) }
+ let(:project) { create(:project, group: group_1) }
+ let(:repository_updated_event) { create(:geo_repository_updated_event, project: project) }
+ let(:event_log) { create(:geo_event_log, repository_updated_event: repository_updated_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+
+ before do
+ allow(Geo::ProjectSyncWorker).to receive(:perform_async)
+ end
+
+ it 'replays events for projects that belong to selected namespaces to replicate' do
+ secondary.update!(namespaces: [group_1])
+
+ expect { daemon.run_once! }.to change(Geo::ProjectRegistry, :count).by(1)
+ end
+
+ it 'does not replay events for projects that do not belong to selected namespaces to replicate' do
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [group_2])
+
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+
+ it 'does not replay events for projects that do not belong to selected shards to replicate' do
+ secondary.update!(selective_sync_type: 'shards', selective_sync_shards: ['broken'])
+
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+ end
+
+ context 'when processing a repository renamed event' do
+ let(:event_log) { create(:geo_event_log, :renamed_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:repository_renamed_event) { event_log.repository_renamed_event }
+
+ it 'does not create a new project registry' do
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+
+ it 'schedules a Geo::RenameRepositoryWorker' do
+ project_id = repository_renamed_event.project_id
+ old_path_with_namespace = repository_renamed_event.old_path_with_namespace
+ new_path_with_namespace = repository_renamed_event.new_path_with_namespace
+
+ expect(::Geo::RenameRepositoryWorker).to receive(:perform_async)
+ .with(project_id, old_path_with_namespace, new_path_with_namespace)
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when processing a hashed storage migration event' do
+ let(:event_log) { create(:geo_event_log, :hashed_storage_migration_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:hashed_storage_migrated_event) { event_log.hashed_storage_migrated_event }
+
+ it 'does not create a new project registry' do
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+
+ it 'schedules a Geo::HashedStorageMigrationWorker' do
+ project = hashed_storage_migrated_event.project
+ old_disk_path = hashed_storage_migrated_event.old_disk_path
+ new_disk_path = hashed_storage_migrated_event.new_disk_path
+ old_storage_version = project.storage_version
+
+ expect(::Geo::HashedStorageMigrationWorker).to receive(:perform_async)
+ .with(project.id, old_disk_path, new_disk_path, old_storage_version)
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when processing an attachment migration event to hashed storage' do
+ let(:event_log) { create(:geo_event_log, :hashed_storage_attachments_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:hashed_storage_attachments_event) { event_log.hashed_storage_attachments_event }
+
+ it 'does not create a new project registry' do
+ expect { daemon.run_once! }.not_to change(Geo::ProjectRegistry, :count)
+ end
+
+ it 'schedules a Geo::HashedStorageAttachmentsMigrationWorker' do
+ project = hashed_storage_attachments_event.project
+ old_attachments_path = hashed_storage_attachments_event.old_attachments_path
+ new_attachments_path = hashed_storage_attachments_event.new_attachments_path
+
+ expect(::Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async)
+ .with(project.id, old_attachments_path, new_attachments_path)
+
+ daemon.run_once!
+ end
+ end
+
+ context 'when replaying a LFS object deleted event' do
+ let(:event_log) { create(:geo_event_log, :lfs_object_deleted_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:lfs_object_deleted_event) { event_log.lfs_object_deleted_event }
+ let(:lfs_object) { lfs_object_deleted_event.lfs_object }
+
+ it 'does not create a tracking database entry' do
+ expect { daemon.run_once! }.not_to change(Geo::FileRegistry, :count)
+ end
+
+ it 'schedules a Geo::FileRemovalWorker' do
+ file_path = File.join(LfsObjectUploader.root, lfs_object_deleted_event.file_path)
+
+ expect(::Geo::FileRemovalWorker).to receive(:perform_async)
+ .with(file_path)
+
+ daemon.run_once!
+ end
+
+ it 'removes the tracking database entry if exist' do
+ create(:geo_file_registry, :lfs, file_id: lfs_object.id)
+
+ expect { daemon.run_once! }.to change(Geo::FileRegistry.lfs_objects, :count).by(-1)
+ end
+ end
+
+ context 'when replaying a job artifact event' do
+ let(:event_log) { create(:geo_event_log, :job_artifact_deleted_event) }
+ let!(:event_log_state) { create(:geo_event_log_state, event_id: event_log.id - 1) }
+ let(:job_artifact_deleted_event) { event_log.job_artifact_deleted_event }
+ let(:job_artifact) { job_artifact_deleted_event.job_artifact }
+
+ context 'with a tracking database entry' do
+ before do
+ create(:geo_file_registry, :job_artifact, file_id: job_artifact.id)
+ end
+
+ context 'with a file' do
+ context 'when the delete succeeds' do
+ it 'removes the tracking database entry' do
+ expect { daemon.run_once! }.to change(Geo::FileRegistry.job_artifacts, :count).by(-1)
+ end
+
+ it 'deletes the file' do
+ expect { daemon.run_once! }.to change { File.exist?(job_artifact.file.path) }.from(true).to(false)
+ end
+ end
+
+ context 'when the delete fails' do
+ before do
+ expect(daemon).to receive(:delete_file).and_return(false)
+ end
+
+ it 'does not remove the tracking database entry' do
+ expect { daemon.run_once! }.not_to change(Geo::FileRegistry.job_artifacts, :count)
+ end
+ end
+ end
+
+ context 'without a file' do
+ before do
+ FileUtils.rm(job_artifact.file.path)
+ end
+
+ it 'removes the tracking database entry' do
+ expect { daemon.run_once! }.to change(Geo::FileRegistry.job_artifacts, :count).by(-1)
+ end
+ end
+ end
+
+ context 'without a tracking database entry' do
+ it 'does not create a tracking database entry' do
+ expect { daemon.run_once! }.not_to change(Geo::FileRegistry, :count)
+ end
+
+ it 'does not delete the file (yet, due to possible race condition)' do
+ expect { daemon.run_once! }.not_to change { File.exist?(job_artifact.file.path) }.from(true)
+ end
+ end
+ end
+ end
+
+ describe '#delete_file' do
+ context 'when the file exists' do
+ let!(:file) { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png") }
+
+ context 'when the delete does not raise an exception' do
+ it 'returns true' do
+ expect(daemon.send(:delete_file, file.path)).to be_truthy
+ end
+
+ it 'does not log an error' do
+ expect(daemon).not_to receive(:logger)
+
+ daemon.send(:delete_file, file.path)
+ end
+ end
+
+ context 'when the delete raises an exception' do
+ before do
+ expect(File).to receive(:delete).and_raise('something went wrong')
+ end
+
+ it 'returns false' do
+ expect(daemon.send(:delete_file, file.path)).to be_falsey
+ end
+
+ it 'logs an error' do
+ logger = double(logger)
+ expect(daemon).to receive(:logger).and_return(logger)
+ expect(logger).to receive(:error).with('Failed to remove file', exception: 'RuntimeError', details: 'something went wrong', filename: file.path)
+
+ daemon.send(:delete_file, file.path)
+ end
+ end
+ end
+
+ context 'when the file does not exist' do
+ it 'returns false' do
+ expect(daemon.send(:delete_file, '/does/not/exist')).to be_falsey
+ end
+
+ it 'logs an error' do
+ logger = double(logger)
+ expect(daemon).to receive(:logger).and_return(logger)
+ expect(logger).to receive(:error).with('Failed to remove file', exception: 'Errno::ENOENT', details: 'No such file or directory @ unlink_internal - /does/not/exist', filename: '/does/not/exist')
+
+ daemon.send(:delete_file, '/does/not/exist')
+ end
+ end
+ end
+end
diff --git a/spec/ee/spec/models/ee/lfs_object_spec.rb b/spec/ee/spec/models/ee/lfs_object_spec.rb
index b02327b4c73..e425f5bc112 100644
--- a/spec/ee/spec/models/ee/lfs_object_spec.rb
+++ b/spec/ee/spec/models/ee/lfs_object_spec.rb
@@ -8,14 +8,14 @@ describe LfsObject do
expect(subject.local_store?).to eq true
end
- it 'returns true when file_store is equal to LfsObjectUploader::LOCAL_STORE' do
- subject.file_store = LfsObjectUploader::LOCAL_STORE
+ it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
+ subject.file_store = LfsObjectUploader::Store::LOCAL
expect(subject.local_store?).to eq true
end
- it 'returns false whe file_store is equal to LfsObjectUploader::REMOTE_STORE' do
- subject.file_store = LfsObjectUploader::REMOTE_STORE
+ it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
+ subject.file_store = LfsObjectUploader::Store::REMOTE
expect(subject.local_store?).to eq false
end
diff --git a/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb
new file mode 100644
index 00000000000..9fa618fdc47
--- /dev/null
+++ b/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -0,0 +1,50 @@
+require 'spec_helper'
+
+describe Projects::HashedStorage::MigrateAttachmentsService do
+ let(:project) { create(:project, storage_version: 1) }
+ let(:service) { described_class.new(project) }
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+ let(:old_attachments_path) { legacy_storage.disk_path }
+ let(:new_attachments_path) { hashed_storage.disk_path }
+
+ describe '#execute' do
+ set(:primary) { create(:geo_node, :primary) }
+ set(:secondary) { create(:geo_node) }
+
+ context 'on success' do
+ before do
+ TestEnv.clean_test_path
+ FileUtils.mkdir_p(File.join(FileUploader.root, old_attachments_path))
+ end
+
+ it 'returns true' do
+ expect(service.execute).to be_truthy
+ end
+
+ it 'creates a Geo::HashedStorageAttachmentsEvent' do
+ expect { service.execute }.to change(Geo::EventLog, :count).by(1)
+
+ event = Geo::EventLog.first.event
+
+ expect(event).to be_a(Geo::HashedStorageAttachmentsEvent)
+ expect(event).to have_attributes(
+ old_attachments_path: old_attachments_path,
+ new_attachments_path: new_attachments_path
+ )
+ end
+ end
+
+ context 'on failure' do
+ it 'does not create a Geo event when skipped' do
+ expect { service.execute }.not_to change { Geo::EventLog.count }
+ end
+
+ it 'does not create a Geo event on failure' do
+ expect(service).to receive(:move_folder!).and_raise(::Projects::HashedStorage::AttachmentMigrationError)
+ expect { service.execute }.to raise_error(::Projects::HashedStorage::AttachmentMigrationError)
+ expect(Geo::EventLog.count).to eq(0)
+ end
+ end
+ end
+end
diff --git a/spec/ee/spec/services/geo/file_download_service_spec.rb b/spec/ee/spec/services/geo/file_download_service_spec.rb
new file mode 100644
index 00000000000..4fb0d89dbde
--- /dev/null
+++ b/spec/ee/spec/services/geo/file_download_service_spec.rb
@@ -0,0 +1,227 @@
+require 'spec_helper'
+
+describe Geo::FileDownloadService do
+ include ::EE::GeoHelpers
+
+ set(:primary) { create(:geo_node, :primary) }
+ set(:secondary) { create(:geo_node) }
+
+ before do
+ stub_current_geo_node(secondary)
+
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
+ end
+
+ describe '#execute' do
+ context 'user avatar' do
+ let(:user) { create(:user, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+ let(:upload) { Upload.find_by(model: user, uploader: 'AvatarUploader') }
+
+ subject(:execute!) { described_class.new(:avatar, upload.id).execute }
+
+ it 'downloads a user avatar' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ expect(Geo::FileRegistry.last.retry_count).to eq(1)
+ expect(Geo::FileRegistry.last.retry_at).to be_present
+ end
+
+ it 'registers when the download fails with some other error' do
+ stub_transfer(Gitlab::Geo::FileTransfer, nil)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'group avatar' do
+ let(:group) { create(:group, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+ let(:upload) { Upload.find_by(model: group, uploader: 'AvatarUploader') }
+
+ subject(:execute!) { described_class.new(:avatar, upload.id).execute }
+
+ it 'downloads a group avatar' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'project avatar' do
+ let(:project) { create(:project, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
+ let(:upload) { Upload.find_by(model: project, uploader: 'AvatarUploader') }
+
+ subject(:execute!) { described_class.new(:avatar, upload.id).execute }
+
+ it 'downloads a project avatar' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'with an attachment' do
+ let(:note) { create(:note, :with_attachment) }
+ let(:upload) { Upload.find_by(model: note, uploader: 'AttachmentUploader') }
+
+ subject(:execute!) { described_class.new(:attachment, upload.id).execute }
+
+ it 'downloads the attachment' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'with a snippet' do
+ let(:upload) { create(:upload, :personal_snippet_upload) }
+
+ subject(:execute!) { described_class.new(:personal_file, upload.id).execute }
+
+ it 'downloads the file' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'with file upload' do
+ let(:project) { create(:project) }
+ let(:upload) { Upload.find_by(model: project, uploader: 'FileUploader') }
+
+ subject { described_class.new(:file, upload.id) }
+
+ before do
+ FileUploader.new(project).store!(fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png'))
+ end
+
+ it 'downloads the file' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'with namespace file upload' do
+ let(:group) { create(:group) }
+ let(:upload) { Upload.find_by(model: group, uploader: 'NamespaceFileUploader') }
+
+ subject { described_class.new(:file, upload.id) }
+
+ before do
+ NamespaceFileUploader.new(group).store!(fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png'))
+ end
+
+ it 'downloads the file' do
+ stub_transfer(Gitlab::Geo::FileTransfer, 100)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::FileTransfer, -1)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+ end
+
+ context 'LFS object' do
+ let(:lfs_object) { create(:lfs_object) }
+
+ subject { described_class.new(:lfs, lfs_object.id) }
+
+ it 'downloads an LFS object' do
+ stub_transfer(Gitlab::Geo::LfsTransfer, 100)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::LfsTransfer, -1)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+
+ it 'logs a message' do
+ stub_transfer(Gitlab::Geo::LfsTransfer, 100)
+
+ expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, success: true, bytes_downloaded: 100)).and_call_original
+
+ subject.execute
+ end
+ end
+
+ context 'job artifacts' do
+ let(:job_artifact) { create(:ci_job_artifact) }
+
+ subject { described_class.new(:job_artifact, job_artifact.id) }
+
+ it 'downloads a job artifact' do
+ stub_transfer(Gitlab::Geo::JobArtifactTransfer, 100)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
+ end
+
+ it 'registers when the download fails' do
+ stub_transfer(Gitlab::Geo::JobArtifactTransfer, -1)
+
+ expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
+ end
+
+ it 'logs a message' do
+ stub_transfer(Gitlab::Geo::JobArtifactTransfer, 100)
+
+ expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, success: true, bytes_downloaded: 100)).and_call_original
+
+ subject.execute
+ end
+ end
+
+ context 'bad object type' do
+ it 'raises an error' do
+ expect { described_class.new(:bad, 1).execute }.to raise_error(NameError)
+ end
+ end
+
+ def stub_transfer(kls, result)
+ instance = double("(instance of #{kls})", download_from_primary: result)
+ allow(kls).to receive(:new).and_return(instance)
+ end
+ end
+end
diff --git a/spec/ee/spec/services/geo/files_expire_service_spec.rb b/spec/ee/spec/services/geo/files_expire_service_spec.rb
new file mode 100644
index 00000000000..09b0b386ed1
--- /dev/null
+++ b/spec/ee/spec/services/geo/files_expire_service_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+# Disable transactions via :delete method because a foreign table
+# can't see changes inside a transaction of a different connection.
+describe Geo::FilesExpireService, :geo, :delete do
+ let(:project) { create(:project) }
+ let!(:old_full_path) { project.full_path }
+ subject { described_class.new(project, old_full_path) }
+
+ describe '#execute' do
+ let(:file_uploader) { build(:file_uploader, project: project) }
+ let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
+ let!(:file_registry) { create(:geo_file_registry, file_id: upload.id) }
+
+ before do
+ project.update(path: "#{project.path}_renamed")
+ end
+
+ context 'when in Geo secondary node' do
+ before do
+ allow(Gitlab::Geo).to receive(:secondary?) { true }
+ end
+
+ it 'remove file from disk' do
+ file_path = File.join(subject.base_dir, upload.path)
+ expect(File.exist?(file_path)).to be_truthy
+
+ Sidekiq::Testing.inline! { subject.execute }
+
+ expect(File.exist?(file_path)).to be_falsey
+ end
+
+ it 'removes file_registry associates with upload' do
+ expect(file_registry.success).to be_truthy
+
+ subject.execute
+
+ expect { file_registry.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'when not in Geo secondary node' do
+ it 'no-op execute action' do
+ expect(subject).not_to receive(:schedule_file_removal)
+ expect(subject).not_to receive(:mark_for_resync!)
+
+ subject.execute
+ end
+ end
+ end
+end
diff --git a/spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb b/spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb
new file mode 100644
index 00000000000..40e06705cf5
--- /dev/null
+++ b/spec/ee/spec/services/geo/hashed_storage_attachments_migration_service_spec.rb
@@ -0,0 +1,83 @@
+require 'spec_helper'
+
+def base_path(storage)
+ File.join(FileUploader.root, storage.disk_path)
+end
+
+describe Geo::HashedStorageAttachmentsMigrationService do
+ let!(:project) { create(:project) }
+
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+
+ let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
+ let(:file_uploader) { build(:file_uploader, project: project) }
+ let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
+ let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
+
+ subject(:service) do
+ described_class.new(project.id,
+ old_attachments_path: legacy_storage.disk_path,
+ new_attachments_path: hashed_storage.disk_path)
+ end
+
+ describe '#execute' do
+ context 'when succeeds' do
+ it 'moves attachments to hashed storage layout' do
+ expect(File.file?(old_path)).to be_truthy
+ expect(File.file?(new_path)).to be_falsey
+ expect(File.exist?(base_path(legacy_storage))).to be_truthy
+ expect(File.exist?(base_path(hashed_storage))).to be_falsey
+ expect(FileUtils).to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)).and_call_original
+
+ service.execute
+
+ expect(File.exist?(base_path(hashed_storage))).to be_truthy
+ expect(File.exist?(base_path(legacy_storage))).to be_falsey
+ expect(File.file?(old_path)).to be_falsey
+ expect(File.file?(new_path)).to be_truthy
+ end
+ end
+
+ context 'when original folder does not exist anymore' do
+ before do
+ FileUtils.rm_rf(base_path(legacy_storage))
+ end
+
+ it 'skips moving folders and go to next' do
+ expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+ service.execute
+
+ expect(File.exist?(base_path(hashed_storage))).to be_falsey
+ expect(File.file?(new_path)).to be_falsey
+ end
+ end
+
+ context 'when target folder already exists' do
+ before do
+ FileUtils.mkdir_p(base_path(hashed_storage))
+ end
+
+ it 'raises AttachmentMigrationError' do
+ expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+ expect { service.execute }.to raise_error(::Geo::AttachmentMigrationError)
+ end
+ end
+ end
+
+ describe '#async_execute' do
+ it 'starts the worker' do
+ expect(Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async)
+
+ service.async_execute
+ end
+
+ it 'returns job id' do
+ allow(Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async).and_return('foo')
+
+ expect(service.async_execute).to eq('foo')
+ end
+ end
+end
diff --git a/spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb b/spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb
new file mode 100644
index 00000000000..ad7cad3128a
--- /dev/null
+++ b/spec/ee/spec/workers/geo/file_download_dispatch_worker_spec.rb
@@ -0,0 +1,291 @@
+require 'spec_helper'
+
+describe Geo::FileDownloadDispatchWorker, :geo do
+ include ::EE::GeoHelpers
+
+ let(:primary) { create(:geo_node, :primary, host: 'primary-geo-node') }
+ let(:secondary) { create(:geo_node) }
+
+ before do
+ stub_current_geo_node(secondary)
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:renew).and_return(true)
+ allow_any_instance_of(described_class).to receive(:over_time?).and_return(false)
+ WebMock.stub_request(:get, /primary-geo-node/).to_return(status: 200, body: "", headers: {})
+ end
+
+ subject { described_class.new }
+
+ shared_examples '#perform' do |skip_tests|
+ before do
+ skip('FDW is not configured') if skip_tests
+ end
+
+ it 'does not schedule anything when tracking database is not configured' do
+ create(:lfs_object, :with_file)
+
+ allow(Gitlab::Geo).to receive(:geo_database_configured?) { false }
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+ subject.perform
+
+ # We need to unstub here or the DatabaseCleaner will have issues since it
+ # will appear as though the tracking DB were not available
+ allow(Gitlab::Geo).to receive(:geo_database_configured?).and_call_original
+ end
+
+ it 'does not schedule anything when node is disabled' do
+ create(:lfs_object, :with_file)
+
+ secondary.enabled = false
+ secondary.save
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+ subject.perform
+ end
+
+ context 'with LFS objects' do
+ let!(:lfs_object_local_store) { create(:lfs_object, :with_file) }
+ let!(:lfs_object_remote_store) { create(:lfs_object, :with_file) }
+
+ before do
+ stub_lfs_object_storage
+ lfs_object_remote_store.file.migrate!(LfsObjectUploader::Store::REMOTE)
+ end
+
+ it 'filters S3-backed files' do
+ expect(Geo::FileDownloadWorker).to receive(:perform_async).with(:lfs, lfs_object_local_store.id)
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with(:lfs, lfs_object_remote_store.id)
+
+ subject.perform
+ end
+ end
+
+ context 'with job artifacts' do
+ it 'performs Geo::FileDownloadWorker for unsynced job artifacts' do
+ artifact = create(:ci_job_artifact)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with(:job_artifact, artifact.id).once.and_return(spy)
+
+ subject.perform
+ end
+
+ it 'performs Geo::FileDownloadWorker for failed-sync job artifacts' do
+ artifact = create(:ci_job_artifact)
+
+ Geo::FileRegistry.create!(file_type: :job_artifact, file_id: artifact.id, bytes: 0, success: false)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with('job_artifact', artifact.id).once.and_return(spy)
+
+ subject.perform
+ end
+
+ it 'does not perform Geo::FileDownloadWorker for synced job artifacts' do
+ artifact = create(:ci_job_artifact)
+
+ Geo::FileRegistry.create!(file_type: :job_artifact, file_id: artifact.id, bytes: 1234, success: true)
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+ subject.perform
+ end
+
+ it 'does not perform Geo::FileDownloadWorker for synced job artifacts even with 0 bytes downloaded' do
+ artifact = create(:ci_job_artifact)
+
+ Geo::FileRegistry.create!(file_type: :job_artifact, file_id: artifact.id, bytes: 0, success: true)
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async)
+
+ subject.perform
+ end
+ end
+
+ # Test the case where we have:
+ #
+ # 1. A total of 10 files in the queue, and we can load a maximimum of 5 and send 2 at a time.
+ # 2. We send 2, wait for 1 to finish, and then send again.
+ it 'attempts to load a new batch without pending downloads' do
+ stub_const('Geo::BaseSchedulerWorker::DB_RETRIEVE_BATCH_SIZE', 5)
+ secondary.update!(files_max_capacity: 2)
+ allow_any_instance_of(::Gitlab::Geo::Transfer).to receive(:download_from_primary).and_return(100)
+
+ avatar = fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'))
+ create_list(:lfs_object, 2, :with_file)
+ create_list(:user, 2, avatar: avatar)
+ create_list(:note, 2, :with_attachment)
+ create_list(:upload, 1, :personal_snippet_upload)
+ create_list(:ci_job_artifact, 1)
+ create(:appearance, logo: avatar, header_logo: avatar)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async).exactly(10).times.and_call_original
+ # For 10 downloads, we expect four database reloads:
+ # 1. Load the first batch of 5.
+ # 2. 4 get sent out, 1 remains. This triggers another reload, which loads in the next 5.
+ # 3. Those 4 get sent out, and 1 remains.
+ # 3. Since the second reload filled the pipe with 4, we need to do a final reload to ensure
+ # zero are left.
+ expect(subject).to receive(:load_pending_resources).exactly(4).times.and_call_original
+
+ Sidekiq::Testing.inline! do
+ subject.perform
+ end
+ end
+
+ context 'with a failed file' do
+ let(:failed_registry) { create(:geo_file_registry, :lfs, file_id: 999, success: false) }
+
+ it 'does not stall backfill' do
+ unsynced = create(:lfs_object, :with_file)
+
+ stub_const('Geo::BaseSchedulerWorker::DB_RETRIEVE_BATCH_SIZE', 1)
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with(:lfs, failed_registry.file_id)
+ expect(Geo::FileDownloadWorker).to receive(:perform_async).with(:lfs, unsynced.id)
+
+ subject.perform
+ end
+
+ it 'retries failed files' do
+ expect(Geo::FileDownloadWorker).to receive(:perform_async).with('lfs', failed_registry.file_id)
+
+ subject.perform
+ end
+
+ it 'does not retries failed files when retry_at is tomorrow' do
+ failed_registry = create(:geo_file_registry, :lfs, file_id: 999, success: false, retry_at: Date.tomorrow)
+
+ expect(Geo::FileDownloadWorker).not_to receive(:perform_async).with('lfs', failed_registry.file_id)
+
+ subject.perform
+ end
+
+ it 'does not retries failed files when retry_at is in the past' do
+ failed_registry = create(:geo_file_registry, :lfs, file_id: 999, success: false, retry_at: Date.yesterday)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async).with('lfs', failed_registry.file_id)
+
+ subject.perform
+ end
+ end
+
+ context 'when node has namespace restrictions' do
+ let(:synced_group) { create(:group) }
+ let(:project_in_synced_group) { create(:project, group: synced_group) }
+ let(:unsynced_project) { create(:project) }
+
+ before do
+ allow(ProjectCacheWorker).to receive(:perform_async).and_return(true)
+
+ secondary.update!(selective_sync_type: 'namespaces', namespaces: [synced_group])
+ end
+
+ it 'does not perform Geo::FileDownloadWorker for LFS object that does not belong to selected namespaces to replicate' do
+ lfs_object_in_synced_group = create(:lfs_objects_project, project: project_in_synced_group)
+ create(:lfs_objects_project, project: unsynced_project)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with(:lfs, lfs_object_in_synced_group.lfs_object_id).once.and_return(spy)
+
+ subject.perform
+ end
+
+ it 'does not perform Geo::FileDownloadWorker for job artifact that does not belong to selected namespaces to replicate' do
+ create(:ci_job_artifact, project: unsynced_project)
+ job_artifact_in_synced_group = create(:ci_job_artifact, project: project_in_synced_group)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with(:job_artifact, job_artifact_in_synced_group.id).once.and_return(spy)
+
+ subject.perform
+ end
+
+ it 'does not perform Geo::FileDownloadWorker for upload objects that do not belong to selected namespaces to replicate' do
+ avatar = fixture_file_upload(Rails.root.join('spec/fixtures/dk.png'))
+ avatar_in_synced_group = create(:upload, model: synced_group, path: avatar)
+ create(:upload, model: create(:group), path: avatar)
+ avatar_in_project_in_synced_group = create(:upload, model: project_in_synced_group, path: avatar)
+ create(:upload, model: unsynced_project, path: avatar)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with('avatar', avatar_in_project_in_synced_group.id).once.and_return(spy)
+
+ expect(Geo::FileDownloadWorker).to receive(:perform_async)
+ .with('avatar', avatar_in_synced_group.id).once.and_return(spy)
+
+ subject.perform
+ end
+ end
+ end
+
+ # Disable transactions via :delete method because a foreign table
+ # can't see changes inside a transaction of a different connection.
+ describe 'when PostgreSQL FDW is available', :geo, :delete do
+ # Skip if FDW isn't activated on this database
+ it_behaves_like '#perform', Gitlab::Database.postgresql? && !Gitlab::Geo.fdw?
+ end
+
+ describe 'when PostgreSQL FDW is not enabled', :geo do
+ before do
+ allow(Gitlab::Geo).to receive(:fdw?).and_return(false)
+ end
+
+ it_behaves_like '#perform', false
+ end
+
+ describe '#take_batch' do
+ it 'returns a batch of jobs' do
+ a = [[2, :lfs], [3, :lfs]]
+ b = []
+ c = [[3, :job_artifact], [8, :job_artifact], [9, :job_artifact]]
+ expect(subject).to receive(:db_retrieve_batch_size).and_return(4)
+
+ expect(subject.send(:take_batch, a, b, c)).to eq([
+ [3, :job_artifact],
+ [2, :lfs],
+ [8, :job_artifact],
+ [3, :lfs]
+ ])
+ end
+ end
+
+ describe '#interleave' do
+ # Notice ties are resolved by taking the "first" tied element
+ it 'interleaves 2 arrays' do
+ a = %w{1 2 3}
+ b = %w{A B C}
+ expect(subject.send(:interleave, a, b)).to eq(%w{1 A 2 B 3 C})
+ end
+
+ # Notice there are no ties in this call
+ it 'interleaves 2 arrays with a longer second array' do
+ a = %w{1 2}
+ b = %w{A B C}
+ expect(subject.send(:interleave, a, b)).to eq(%w{A 1 B 2 C})
+ end
+
+ it 'interleaves 2 arrays with a longer first array' do
+ a = %w{1 2 3}
+ b = %w{A B}
+ expect(subject.send(:interleave, a, b)).to eq(%w{1 A 2 B 3})
+ end
+
+ it 'interleaves 3 arrays' do
+ a = %w{1 2 3}
+ b = %w{A B C}
+ c = %w{i ii iii}
+ expect(subject.send(:interleave, a, b, c)).to eq(%w{1 A i 2 B ii 3 C iii})
+ end
+
+ it 'interleaves 3 arrays of unequal length' do
+ a = %w{1 2}
+ b = %w{A}
+ c = %w{i ii iii iiii}
+ expect(subject.send(:interleave, a, b, c)).to eq(%w{i 1 ii A iii 2 iiii})
+ end
+ end
+end
diff --git a/spec/ee/workers/object_storage_upload_worker_spec.rb b/spec/ee/workers/object_storage_upload_worker_spec.rb
index d421fdf95a9..32ddcbe9757 100644
--- a/spec/ee/workers/object_storage_upload_worker_spec.rb
+++ b/spec/ee/workers/object_storage_upload_worker_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
describe ObjectStorageUploadWorker do
- let(:local) { ObjectStoreUploader::LOCAL_STORE }
- let(:remote) { ObjectStoreUploader::REMOTE_STORE }
+ let(:local) { ObjectStorage::Store::LOCAL }
+ let(:remote) { ObjectStorage::Store::REMOTE }
def perform
described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 436735e7ed3..9bb456e89ff 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -6,7 +6,7 @@ FactoryBot.define do
file_type :archive
trait :remote_store do
- file_store JobArtifactUploader::REMOTE_STORE
+ file_store JobArtifactUploader::Store::REMOTE
end
after :build do |artifact|
diff --git a/spec/factories/geo/event_log.rb b/spec/factories/geo/event_log.rb
new file mode 100644
index 00000000000..dbe2f400f97
--- /dev/null
+++ b/spec/factories/geo/event_log.rb
@@ -0,0 +1,121 @@
+FactoryBot.define do
+ factory :geo_event_log, class: Geo::EventLog do
+ trait :created_event do
+ repository_created_event factory: :geo_repository_created_event
+ end
+
+ trait :updated_event do
+ repository_updated_event factory: :geo_repository_updated_event
+ end
+
+ trait :deleted_event do
+ repository_deleted_event factory: :geo_repository_deleted_event
+ end
+
+ trait :renamed_event do
+ repository_renamed_event factory: :geo_repository_renamed_event
+ end
+
+ trait :hashed_storage_migration_event do
+ hashed_storage_migrated_event factory: :geo_hashed_storage_migrated_event
+ end
+
+ trait :hashed_storage_attachments_event do
+ hashed_storage_attachments_event factory: :geo_hashed_storage_attachments_event
+ end
+
+ trait :lfs_object_deleted_event do
+ lfs_object_deleted_event factory: :geo_lfs_object_deleted_event
+ end
+
+ trait :job_artifact_deleted_event do
+ job_artifact_deleted_event factory: :geo_job_artifact_deleted_event
+ end
+ end
+
+ factory :geo_repository_created_event, class: Geo::RepositoryCreatedEvent do
+ project
+
+ repository_storage_name { project.repository_storage }
+ repository_storage_path { project.repository_storage_path }
+ add_attribute(:repo_path) { project.disk_path }
+ project_name { project.name }
+ wiki_path { project.wiki.disk_path }
+ end
+
+ factory :geo_repository_updated_event, class: Geo::RepositoryUpdatedEvent do
+ project
+
+ source 0
+ branches_affected 0
+ tags_affected 0
+ end
+
+ factory :geo_repository_deleted_event, class: Geo::RepositoryDeletedEvent do
+ project
+
+ repository_storage_name { project.repository_storage }
+ repository_storage_path { project.repository_storage_path }
+ deleted_path { project.path_with_namespace }
+ deleted_project_name { project.name }
+ end
+
+ factory :geo_repositories_changed_event, class: Geo::RepositoriesChangedEvent do
+ geo_node
+ end
+
+ factory :geo_repository_renamed_event, class: Geo::RepositoryRenamedEvent do
+ project { create(:project, :repository) }
+
+ repository_storage_name { project.repository_storage }
+ repository_storage_path { project.repository_storage_path }
+ old_path_with_namespace { project.path_with_namespace }
+ new_path_with_namespace { project.path_with_namespace + '_new' }
+ old_wiki_path_with_namespace { project.wiki.path_with_namespace }
+ new_wiki_path_with_namespace { project.wiki.path_with_namespace + '_new' }
+ old_path { project.path }
+ new_path { project.path + '_new' }
+ end
+
+ factory :geo_hashed_storage_migrated_event, class: Geo::HashedStorageMigratedEvent do
+ project { create(:project, :repository) }
+
+ repository_storage_name { project.repository_storage }
+ repository_storage_path { project.repository_storage_path }
+ old_disk_path { project.path_with_namespace }
+ new_disk_path { project.path_with_namespace + '_new' }
+ old_wiki_disk_path { project.wiki.path_with_namespace }
+ new_wiki_disk_path { project.wiki.path_with_namespace + '_new' }
+ new_storage_version { Project::HASHED_STORAGE_FEATURES[:repository] }
+ end
+
+ factory :geo_hashed_storage_attachments_event, class: Geo::HashedStorageAttachmentsEvent do
+ project { create(:project, :repository) }
+
+ old_attachments_path { Storage::LegacyProject.new(project).disk_path }
+ new_attachments_path { Storage::HashedProject.new(project).disk_path }
+ end
+
+ factory :geo_lfs_object_deleted_event, class: Geo::LfsObjectDeletedEvent do
+ lfs_object { create(:lfs_object, :with_file) }
+
+ after(:build, :stub) do |event, _|
+ local_store_path = Pathname.new(LfsObjectUploader.root)
+ relative_path = Pathname.new(event.lfs_object.file.path).relative_path_from(local_store_path)
+
+ event.oid = event.lfs_object.oid
+ event.file_path = relative_path
+ end
+ end
+
+ factory :geo_job_artifact_deleted_event, class: Geo::JobArtifactDeletedEvent do
+ job_artifact { create(:ci_job_artifact, :archive) }
+
+ after(:build, :stub) do |event, _|
+ local_store_path = Pathname.new(JobArtifactUploader.root)
+ relative_path = Pathname.new(event.job_artifact.file.path).relative_path_from(local_store_path)
+
+ event.file_path = relative_path
+ end
+ end
+end
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index 1512f5a0e58..8c531cf5909 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -18,7 +18,7 @@ FactoryBot.define do
end
trait :with_avatar do
- avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
+ avatar { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :access_requestable do
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index 707ecbd6be5..0889c5090fb 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -122,11 +122,11 @@ FactoryBot.define do
end
trait :with_attachment do
- attachment { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png") }
+ attachment { fixture_file_upload(Rails.root.join( "spec/fixtures/dk.png"), "image/png") }
end
trait :with_svg_attachment do
- attachment { fixture_file_upload(Rails.root + "spec/fixtures/unsanitized.svg", "image/svg+xml") }
+ attachment { fixture_file_upload(Rails.root.join("spec/fixtures/unsanitized.svg"), "image/svg+xml") }
end
transient do
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index d0f3911f730..16d328a5bc2 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -90,7 +90,7 @@ FactoryBot.define do
end
trait :with_avatar do
- avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
+ avatar { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :broken_storage do
diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb
index c39500faea1..9e8a55eaedb 100644
--- a/spec/factories/uploads.rb
+++ b/spec/factories/uploads.rb
@@ -1,24 +1,43 @@
FactoryBot.define do
factory :upload do
model { build(:project) }
- path { "uploads/-/system/project/avatar/avatar.jpg" }
size 100.kilobytes
uploader "AvatarUploader"
+ store ObjectStorage::Store::LOCAL
- trait :personal_snippet do
+ # we should build a mount agnostic upload by default
+ transient do
+ mounted_as :avatar
+ secret SecureRandom.hex
+ end
+
+ # this needs to comply with RecordsUpload::Concern#upload_path
+ path { File.join("uploads/-/system", model.class.to_s.underscore, mounted_as.to_s, 'avatar.jpg') }
+
+ trait :personal_snippet_upload do
model { build(:personal_snippet) }
+ path { File.join(secret, 'myfile.jpg') }
uploader "PersonalFileUploader"
end
trait :issuable_upload do
- path { "#{SecureRandom.hex}/myfile.jpg" }
+ path { File.join(secret, 'myfile.jpg') }
uploader "FileUploader"
end
trait :namespace_upload do
- path { "#{SecureRandom.hex}/myfile.jpg" }
model { build(:group) }
+ path { File.join(secret, 'myfile.jpg') }
uploader "NamespaceFileUploader"
end
+
+ trait :attachment_upload do
+ transient do
+ mounted_as :attachment
+ end
+
+ model { build(:note) }
+ uploader "AttachmentUploader"
+ end
end
end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index e62e0b263ca..769fd656e7a 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -38,7 +38,7 @@ FactoryBot.define do
end
trait :with_avatar do
- avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
+ avatar { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :two_factor_via_otp do
diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
index 8bb9ebe0419..370c2490b97 100644
--- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
+++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
@@ -23,6 +23,27 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
+ # E.g. The installation is in use at the time of migration, and someone has
+ # just uploaded a file
+ shared_examples 'does not add files in /uploads/tmp' do
+ let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
+
+ before do
+ FileUtils.mkdir(File.dirname(tmp_file))
+ FileUtils.touch(tmp_file)
+ end
+
+ after do
+ FileUtils.rm(tmp_file)
+ end
+
+ it 'does not add files from /uploads/tmp' do
+ described_class.new.perform
+
+ expect(untracked_files_for_uploads.count).to eq(5)
+ end
+ end
+
it 'ensures the untracked_files_for_uploads table exists' do
expect do
described_class.new.perform
@@ -109,24 +130,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
- # E.g. The installation is in use at the time of migration, and someone has
- # just uploaded a file
context 'when there are files in /uploads/tmp' do
- let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
-
- before do
- FileUtils.touch(tmp_file)
- end
-
- after do
- FileUtils.rm(tmp_file)
- end
-
- it 'does not add files from /uploads/tmp' do
- described_class.new.perform
-
- expect(untracked_files_for_uploads.count).to eq(5)
- end
+ it_behaves_like 'does not add files in /uploads/tmp'
end
end
end
@@ -197,24 +202,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
- # E.g. The installation is in use at the time of migration, and someone has
- # just uploaded a file
context 'when there are files in /uploads/tmp' do
- let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
-
- before do
- FileUtils.touch(tmp_file)
- end
-
- after do
- FileUtils.rm(tmp_file)
- end
-
- it 'does not add files from /uploads/tmp' do
- described_class.new.perform
-
- expect(untracked_files_for_uploads.count).to eq(5)
- end
+ it_behaves_like 'does not add files in /uploads/tmp'
end
end
end
diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
index 39e3b875c49..326ed2f2ecf 100644
--- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
@@ -17,7 +17,7 @@ describe Gitlab::Gfm::UploadsRewriter do
end
let(:text) do
- "Text and #{image_uploader.to_markdown} and #{zip_uploader.to_markdown}"
+ "Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}"
end
describe '#rewrite' do
diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
index 63992ea8ab8..a685521cbf0 100644
--- a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb
@@ -4,7 +4,6 @@ describe Gitlab::ImportExport::UploadsRestorer do
describe 'bundle a project Git repo' do
let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
- let(:uploads_path) { FileUploader.dynamic_path_segment(project) }
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
@@ -26,9 +25,9 @@ describe Gitlab::ImportExport::UploadsRestorer do
end
it 'copies the uploads to the project path' do
- restorer.restore
+ subject.restore
- uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
+ uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('dummy.txt')
end
@@ -44,9 +43,9 @@ describe Gitlab::ImportExport::UploadsRestorer do
end
it 'copies the uploads to the project path' do
- restorer.restore
+ subject.restore
- uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
+ uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('dummy.txt')
end
diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
index e8948de1f3a..959779523f4 100644
--- a/spec/lib/gitlab/import_export/uploads_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
@@ -30,7 +30,7 @@ describe Gitlab::ImportExport::UploadsSaver do
it 'copies the uploads to the export path' do
saver.save
- uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
+ uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('banana_sample.gif')
end
@@ -52,7 +52,7 @@ describe Gitlab::ImportExport::UploadsSaver do
it 'copies the uploads to the export path' do
saver.save
- uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
+ uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('banana_sample.gif')
end
diff --git a/spec/migrations/remove_empty_fork_networks_spec.rb b/spec/migrations/remove_empty_fork_networks_spec.rb
index cf6ae5cda74..ca9086a84d0 100644
--- a/spec/migrations/remove_empty_fork_networks_spec.rb
+++ b/spec/migrations/remove_empty_fork_networks_spec.rb
@@ -12,6 +12,10 @@ describe RemoveEmptyForkNetworks, :migration do
deleted_project.destroy!
end
+ after do
+ Upload.reset_column_information
+ end
+
it 'deletes only the fork network without members' do
expect(fork_networks.count).to eq(2)
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index b3f160f3119..138b2a4935f 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -204,7 +204,7 @@ describe Namespace do
let(:parent) { create(:group, name: 'parent', path: 'parent') }
let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) }
- let(:uploads_dir) { File.join(CarrierWave.root, FileUploader.base_dir) }
+ let(:uploads_dir) { FileUploader.root }
let(:pages_dir) { File.join(TestEnv.pages_path) }
before do
diff --git a/spec/models/upload_spec.rb b/spec/models/upload_spec.rb
index 345382ea8c7..42f3d609770 100644
--- a/spec/models/upload_spec.rb
+++ b/spec/models/upload_spec.rb
@@ -45,51 +45,6 @@ describe Upload do
end
end
- describe '.remove_path' do
- it 'removes all records at the given path' do
- described_class.create!(
- size: File.size(__FILE__),
- path: __FILE__,
- model: build_stubbed(:user),
- uploader: 'AvatarUploader'
- )
-
- expect { described_class.remove_path(__FILE__) }
- .to change { described_class.count }.from(1).to(0)
- end
- end
-
- describe '.record' do
- let(:fake_uploader) do
- double(
- file: double(size: 12_345),
- relative_path: 'foo/bar.jpg',
- model: build_stubbed(:user),
- class: 'AvatarUploader'
- )
- end
-
- it 'removes existing paths before creation' do
- expect(described_class).to receive(:remove_path)
- .with(fake_uploader.relative_path)
-
- described_class.record(fake_uploader)
- end
-
- it 'creates a new record and assigns size, path, model, and uploader' do
- upload = described_class.record(fake_uploader)
-
- aggregate_failures do
- expect(upload).to be_persisted
- expect(upload.size).to eq fake_uploader.file.size
- expect(upload.path).to eq fake_uploader.relative_path
- expect(upload.model_id).to eq fake_uploader.model.id
- expect(upload.model_type).to eq fake_uploader.model.class.to_s
- expect(upload.uploader).to eq fake_uploader.class
- end
- end
- end
-
describe '#absolute_path' do
it 'returns the path directly when already absolute' do
path = '/path/to/namespace/project/secret/file.jpg'
@@ -111,27 +66,27 @@ describe Upload do
end
end
- describe '#calculate_checksum' do
- it 'calculates the SHA256 sum' do
- upload = described_class.new(
- path: __FILE__,
- size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
- )
+ describe '#calculate_checksum!' do
+ let(:upload) do
+ described_class.new(path: __FILE__,
+ size: described_class::CHECKSUM_THRESHOLD - 1.megabyte)
+ end
+
+ it 'sets `checksum` to SHA256 sum of the file' do
expected = Digest::SHA256.file(__FILE__).hexdigest
- expect { upload.calculate_checksum }
+ expect { upload.calculate_checksum! }
.to change { upload.checksum }.from(nil).to(expected)
end
- it 'returns nil for a non-existant file' do
- upload = described_class.new(
- path: __FILE__,
- size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
- )
-
+ it 'sets `checksum` to nil for a non-existant file' do
expect(upload).to receive(:exist?).and_return(false)
- expect(upload.calculate_checksum).to be_nil
+ checksum = Digest::SHA256.file(__FILE__).hexdigest
+ upload.checksum = checksum
+
+ expect { upload.calculate_checksum! }
+ .to change { upload.checksum }.from(checksum).to(nil)
end
end
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 5c6eee09285..8086b91a488 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -947,7 +947,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
- allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
+ allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
end
context 'when job has been erased' do
@@ -1124,7 +1124,7 @@ describe API::Runner do
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
- allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
+ allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir)
end
after do
@@ -1153,7 +1153,7 @@ describe API::Runner do
context 'when job has artifacts' do
let(:job) { create(:ci_build) }
- let(:store) { JobArtifactUploader::LOCAL_STORE }
+ let(:store) { JobArtifactUploader::Store::LOCAL }
before do
create(:ci_job_artifact, :archive, file_store: store, job: job)
@@ -1175,7 +1175,7 @@ describe API::Runner do
end
context 'when artifacts are stored remotely' do
- let(:store) { JobArtifactUploader::REMOTE_STORE }
+ let(:store) { JobArtifactUploader::Store::REMOTE }
let!(:job) { create(:ci_build) }
it 'download artifacts' do
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 8bfc8693981..0a8788fd57e 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -245,7 +245,7 @@ describe 'Git LFS API and storage' do
context 'when LFS uses object storage' do
let(:before_get) do
stub_lfs_object_storage
- lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect' do
@@ -975,7 +975,7 @@ describe 'Git LFS API and storage' do
end
it 'responds with status 200, location of lfs store and object details' do
- expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
+ expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
@@ -1132,7 +1132,7 @@ describe 'Git LFS API and storage' do
end
it 'with location of lfs store and object details' do
- expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
+ expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
@@ -1246,7 +1246,7 @@ describe 'Git LFS API and storage' do
end
def setup_tempfile(lfs_tmp)
- upload_path = "#{Gitlab.config.lfs.storage_path}/tmp/upload"
+ upload_path = LfsObjectUploader.workhorse_upload_path
FileUtils.mkdir_p(upload_path)
FileUtils.touch(File.join(upload_path, lfs_tmp))
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 53ea88332fb..dfe9adbbcdc 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -244,7 +244,7 @@ describe Issues::MoveService do
context 'issue description with uploads' do
let(:uploader) { build(:file_uploader, project: old_project) }
- let(:description) { "Text and #{uploader.to_markdown}" }
+ let(:description) { "Text and #{uploader.markdown_link}" }
include_context 'issue move executed'
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
index 50e59954f73..15699574b3a 100644
--- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -6,7 +6,7 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
- let!(:upload) { Upload.find_by(path: file_uploader.relative_path) }
+ let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
@@ -58,6 +58,6 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
end
def base_path(storage)
- FileUploader.dynamic_path_builder(storage.disk_path)
+ File.join(FileUploader.root, storage.disk_path)
end
end
diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
index 935c08221e0..7ce80c82439 100644
--- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
@@ -2,6 +2,8 @@ shared_examples 'handle uploads' do
let(:user) { create(:user) }
let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
+ let(:secret) { FileUploader.generate_secret }
+ let(:uploader_class) { FileUploader }
describe "POST #create" do
context 'when a user is not authorized to upload a file' do
@@ -65,7 +67,12 @@ shared_examples 'handle uploads' do
describe "GET #show" do
let(:show_upload) do
- get :show, params.merge(secret: "123456", filename: "image.jpg")
+ get :show, params.merge(secret: secret, filename: "rails_sample.jpg")
+ end
+
+ before do
+ expect(FileUploader).to receive(:generate_secret).and_return(secret)
+ UploadService.new(model, jpg, uploader_class).execute
end
context "when the model is public" do
@@ -75,11 +82,6 @@ shared_examples 'handle uploads' do
context "when not signed in" do
context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
it "responds with status 200" do
show_upload
@@ -88,6 +90,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+ end
+
it "responds with status 404" do
show_upload
@@ -102,11 +108,6 @@ shared_examples 'handle uploads' do
end
context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
it "responds with status 200" do
show_upload
@@ -115,6 +116,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+ end
+
it "responds with status 404" do
show_upload
@@ -131,11 +136,6 @@ shared_examples 'handle uploads' do
context "when not signed in" do
context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
context "when the file is an image" do
before do
allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
@@ -149,6 +149,10 @@ shared_examples 'handle uploads' do
end
context "when the file is not an image" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:image?).and_return(false)
+ end
+
it "redirects to the sign in page" do
show_upload
@@ -158,6 +162,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+ end
+
it "redirects to the sign in page" do
show_upload
@@ -177,11 +185,6 @@ shared_examples 'handle uploads' do
end
context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
it "responds with status 200" do
show_upload
@@ -190,6 +193,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+ end
+
it "responds with status 404" do
show_upload
@@ -200,11 +207,6 @@ shared_examples 'handle uploads' do
context "when the user doesn't have access to the model" do
context "when the file exists" do
- before do
- allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
- allow(jpg).to receive(:exists?).and_return(true)
- end
-
context "when the file is an image" do
before do
allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
@@ -218,6 +220,10 @@ shared_examples 'handle uploads' do
end
context "when the file is not an image" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:image?).and_return(false)
+ end
+
it "responds with status 404" do
show_upload
@@ -227,6 +233,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
+ before do
+ allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
+ end
+
it "responds with status 404" do
show_upload
diff --git a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
new file mode 100644
index 00000000000..0022b2f803f
--- /dev/null
+++ b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
@@ -0,0 +1,126 @@
+shared_context 'with storage' do |store, **stub_params|
+ before do
+ subject.object_store = store
+ end
+end
+
+shared_examples "migrates" do |to_store:, from_store: nil|
+ let(:to) { to_store }
+ let(:from) { from_store || subject.object_store }
+
+ def migrate(to)
+ subject.migrate!(to)
+ end
+
+ def checksum
+ Digest::SHA256.hexdigest(subject.read)
+ end
+
+ before do
+ migrate(from)
+ end
+
+ it 'does nothing when migrating to the current store' do
+ expect { migrate(from) }.not_to change { subject.object_store }.from(from)
+ end
+
+ it 'migrate to the specified store' do
+ from_checksum = checksum
+
+ expect { migrate(to) }.to change { subject.object_store }.from(from).to(to)
+ expect(checksum).to eq(from_checksum)
+ end
+
+ it 'removes the original file after the migration' do
+ original_file = subject.file.path
+ migrate(to)
+
+ expect(File.exist?(original_file)).to be_falsey
+ end
+
+ context 'migration is unsuccessful' do
+ shared_examples "handles gracefully" do |error:|
+ it 'does not update the object_store' do
+ expect { migrate(to) }.to raise_error(error)
+ expect(subject.object_store).to eq(from)
+ end
+
+ it 'does not delete the original file' do
+ expect { migrate(to) }.to raise_error(error)
+ expect(subject.exists?).to be_truthy
+ end
+ end
+
+ context 'when the store is not supported' do
+ let(:to) { -1 } # not a valid store
+
+ include_examples "handles gracefully", error: ObjectStorage::UnknownStoreError
+ end
+
+ context 'upon a fog failure' do
+ before do
+ storage_class = subject.send(:storage_for, to).class
+ expect_any_instance_of(storage_class).to receive(:store!).and_raise("Store failure.")
+ end
+
+ include_examples "handles gracefully", error: "Store failure."
+ end
+
+ context 'upon a database failure' do
+ before do
+ expect(uploader).to receive(:persist_object_store!).and_raise("ActiveRecord failure.")
+ end
+
+ include_examples "handles gracefully", error: "ActiveRecord failure."
+ end
+ end
+end
+
+shared_examples "matches the method pattern" do |method|
+ let(:target) { subject }
+ let(:args) { nil }
+ let(:pattern) { patterns[method] }
+
+ it do
+ return skip "No pattern provided, skipping." unless pattern
+
+ expect(target.method(method).call(*args)).to match(pattern)
+ end
+end
+
+shared_examples "builds correct paths" do |**patterns|
+ let(:patterns) { patterns }
+
+ before do
+ allow(subject).to receive(:filename).and_return('<filename>')
+ end
+
+ describe "#store_dir" do
+ it_behaves_like "matches the method pattern", :store_dir
+ end
+
+ describe "#cache_dir" do
+ it_behaves_like "matches the method pattern", :cache_dir
+ end
+
+ describe "#work_dir" do
+ it_behaves_like "matches the method pattern", :work_dir
+ end
+
+ describe "#upload_path" do
+ it_behaves_like "matches the method pattern", :upload_path
+ end
+
+ describe ".absolute_path" do
+ it_behaves_like "matches the method pattern", :absolute_path do
+ let(:target) { subject.class }
+ let(:args) { [upload] }
+ end
+ end
+
+ describe ".base_dir" do
+ it_behaves_like "matches the method pattern", :base_dir do
+ let(:target) { subject.class }
+ end
+ end
+end
diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb
index 4f469648d5c..93477e513f2 100644
--- a/spec/support/stub_object_storage.rb
+++ b/spec/support/stub_object_storage.rb
@@ -30,4 +30,11 @@ module StubConfiguration
remote_directory: 'lfs-objects',
**params)
end
+
+ def stub_uploads_object_storage(uploader = described_class, **params)
+ stub_object_storage_uploader(config: Gitlab.config.uploads.object_store,
+ uploader: uploader,
+ remote_directory: 'uploads',
+ **params)
+ end
end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index 664698fcbaf..3b79d769e02 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -239,7 +239,7 @@ module TestEnv
end
def artifacts_path
- Gitlab.config.artifacts.path
+ Gitlab.config.artifacts.storage_path
end
# When no cached assets exist, manually hit the root path to create them
diff --git a/spec/support/track_untracked_uploads_helpers.rb b/spec/support/track_untracked_uploads_helpers.rb
index d05eda08201..5752078d2a0 100644
--- a/spec/support/track_untracked_uploads_helpers.rb
+++ b/spec/support/track_untracked_uploads_helpers.rb
@@ -1,6 +1,6 @@
module TrackUntrackedUploadsHelpers
def uploaded_file
- fixture_path = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
+ fixture_path = Rails.root.join('spec/fixtures/rails_sample.jpg')
fixture_file_upload(fixture_path)
end
diff --git a/spec/tasks/gitlab/artifacts_rake_spec.rb b/spec/tasks/gitlab/artifacts_rake_spec.rb
index a30823b8875..570c7fa7503 100644
--- a/spec/tasks/gitlab/artifacts_rake_spec.rb
+++ b/spec/tasks/gitlab/artifacts_rake_spec.rb
@@ -18,7 +18,7 @@ describe 'gitlab:artifacts namespace rake task' do
let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
context 'when local storage is used' do
- let(:store) { ObjectStoreUploader::LOCAL_STORE }
+ let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
@@ -27,8 +27,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
- expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
- expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
@@ -38,8 +38,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
- expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
- expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
@@ -47,8 +47,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "fails to migrate to remote storage" do
subject
- expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
- expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE)
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
@@ -56,13 +56,13 @@ describe 'gitlab:artifacts namespace rake task' do
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
- let(:store) { ObjectStoreUploader::REMOTE_STORE }
+ let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do
subject
- expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
- expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
end
@@ -72,7 +72,7 @@ describe 'gitlab:artifacts namespace rake task' do
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
context 'when local storage is used' do
- let(:store) { ObjectStoreUploader::LOCAL_STORE }
+ let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
@@ -81,7 +81,7 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
- expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
@@ -91,7 +91,7 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
- expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
@@ -99,19 +99,19 @@ describe 'gitlab:artifacts namespace rake task' do
it "fails to migrate to remote storage" do
subject
- expect(artifact.reload.file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
- let(:store) { ObjectStoreUploader::REMOTE_STORE }
+ let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do
subject
- expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
end
diff --git a/spec/tasks/gitlab/lfs_rake_spec.rb b/spec/tasks/gitlab/lfs_rake_spec.rb
index faed24f2010..f1b677bd6ee 100644
--- a/spec/tasks/gitlab/lfs_rake_spec.rb
+++ b/spec/tasks/gitlab/lfs_rake_spec.rb
@@ -6,8 +6,8 @@ describe 'gitlab:lfs namespace rake task' do
end
describe 'migrate' do
- let(:local) { ObjectStoreUploader::LOCAL_STORE }
- let(:remote) { ObjectStoreUploader::REMOTE_STORE }
+ let(:local) { ObjectStorage::Store::LOCAL }
+ let(:remote) { ObjectStorage::Store::REMOTE }
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
def lfs_migrate
diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb
index 04ee6e9bfad..70618f6bc19 100644
--- a/spec/uploaders/attachment_uploader_spec.rb
+++ b/spec/uploaders/attachment_uploader_spec.rb
@@ -1,28 +1,37 @@
require 'spec_helper'
describe AttachmentUploader do
- let(:uploader) { described_class.new(build_stubbed(:user)) }
+ let(:note) { create(:note, :with_attachment) }
+ let(:uploader) { note.attachment }
+ let(:upload) { create(:upload, :attachment_upload, model: uploader.model) }
- describe "#store_dir" do
- it "stores in the system dir" do
- expect(uploader.store_dir).to start_with("uploads/-/system/user")
- end
+ subject { uploader }
- it "uses the old path when using object storage" do
- expect(described_class).to receive(:file_storage?).and_return(false)
- expect(uploader.store_dir).to start_with("uploads/user")
- end
- end
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/note/attachment/],
+ upload_path: %r[uploads/-/system/note/attachment/],
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/]
- describe '#move_to_cache' do
- it 'is true' do
- expect(uploader.move_to_cache).to eq(true)
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[note/attachment/],
+ upload_path: %r[note/attachment/]
end
- describe '#move_to_store' do
- it 'is true' do
- expect(uploader.move_to_store).to eq(true)
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb
index 1dc574699d8..6f4dbae26ab 100644
--- a/spec/uploaders/avatar_uploader_spec.rb
+++ b/spec/uploaders/avatar_uploader_spec.rb
@@ -1,28 +1,40 @@
require 'spec_helper'
describe AvatarUploader do
- let(:uploader) { described_class.new(build_stubbed(:user)) }
+ let(:model) { build_stubbed(:user) }
+ let(:uploader) { described_class.new(model, :avatar) }
+ let(:upload) { create(:upload, model: model) }
- describe "#store_dir" do
- it "stores in the system dir" do
- expect(uploader.store_dir).to start_with("uploads/-/system/user")
- end
+ subject { uploader }
- it "uses the old path when using object storage" do
- expect(described_class).to receive(:file_storage?).and_return(false)
- expect(uploader.store_dir).to start_with("uploads/user")
- end
- end
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/user/avatar/],
+ upload_path: %r[uploads/-/system/user/avatar/],
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
- describe '#move_to_cache' do
- it 'is false' do
- expect(uploader.move_to_cache).to eq(false)
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[user/avatar/],
+ upload_path: %r[user/avatar/]
end
- describe '#move_to_store' do
- it 'is false' do
- expect(uploader.move_to_store).to eq(false)
+ context "with a file" do
+ let(:project) { create(:project, :with_avatar) }
+ let(:uploader) { project.avatar }
+ let(:upload) { uploader.upload }
+
+ before do
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb
index 0cf462e9553..bc024cd307c 100644
--- a/spec/uploaders/file_mover_spec.rb
+++ b/spec/uploaders/file_mover_spec.rb
@@ -3,13 +3,13 @@ require 'spec_helper'
describe FileMover do
let(:filename) { 'banana_sample.gif' }
let(:file) { fixture_file_upload(Rails.root.join('spec', 'fixtures', filename)) }
+ let(:temp_file_path) { File.join('uploads/-/system/temp', 'secret55', filename) }
+
let(:temp_description) do
- 'test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) same ![banana_sample]'\
- '(/uploads/-/system/temp/secret55/banana_sample.gif)'
+ "test ![banana_sample](/#{temp_file_path}) "\
+ "same ![banana_sample](/#{temp_file_path}) "
end
- let(:temp_file_path) { File.join('secret55', filename).to_s }
- let(:file_path) { File.join('uploads', '-', 'system', 'personal_snippet', snippet.id.to_s, 'secret55', filename).to_s }
-
+ let(:file_path) { File.join('uploads/-/system/personal_snippet', snippet.id.to_s, 'secret55', filename) }
let(:snippet) { create(:personal_snippet, description: temp_description) }
subject { described_class.new(file_path, snippet).execute }
@@ -28,8 +28,8 @@ describe FileMover do
expect(snippet.reload.description)
.to eq(
- "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"\
- " same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"
+ "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "\
+ "same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "
)
end
@@ -50,8 +50,8 @@ describe FileMover do
expect(snippet.reload.description)
.to eq(
- "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"\
- " same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"
+ "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "\
+ "same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "
)
end
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index fd195d6f9b8..b92d52727c1 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -1,118 +1,78 @@
require 'spec_helper'
describe FileUploader do
- let(:uploader) { described_class.new(build_stubbed(:project)) }
+ let(:group) { create(:group, name: 'awesome') }
+ let(:project) { create(:project, namespace: group, name: 'project') }
+ let(:uploader) { described_class.new(project) }
+ let(:upload) { double(model: project, path: 'secret/foo.jpg') }
- context 'legacy storage' do
- let(:project) { build_stubbed(:project) }
-
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: project, path: 'secret/foo.jpg')
-
- dynamic_segment = project.full_path
-
- expect(described_class.absolute_path(upload))
- .to end_with("#{dynamic_segment}/secret/foo.jpg")
- end
- end
+ subject { uploader }
- describe "#store_dir" do
- it "stores in the namespace path" do
- uploader = described_class.new(project)
-
- expect(uploader.store_dir).to include(project.full_path)
- expect(uploader.store_dir).not_to include("system")
- end
- end
+ shared_examples 'builds correct legacy storage paths' do
+ include_examples 'builds correct paths',
+ store_dir: %r{awesome/project/\h+},
+ absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg}
end
- context 'hashed storage' do
+ shared_examples 'uses hashed storage' do
context 'when rolled out attachments' do
- let(:project) { build_stubbed(:project, :hashed) }
-
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: project, path: 'secret/foo.jpg')
-
- dynamic_segment = project.disk_path
-
- expect(described_class.absolute_path(upload))
- .to end_with("#{dynamic_segment}/secret/foo.jpg")
- end
+ before do
+ allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed')
end
- describe "#store_dir" do
- it "stores in the namespace path" do
- uploader = described_class.new(project)
+ let(:project) { build_stubbed(:project, :hashed, namespace: group, name: 'project') }
- expect(uploader.store_dir).to include(project.disk_path)
- expect(uploader.store_dir).not_to include("system")
- end
- end
+ it_behaves_like 'builds correct paths',
+ store_dir: %r{ca/fe/fe/ed/\h+},
+ absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
end
context 'when only repositories are rolled out' do
- let(:project) { build_stubbed(:project, storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
+ let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: project, path: 'secret/foo.jpg')
+ it_behaves_like 'builds correct legacy storage paths'
+ end
+ end
- dynamic_segment = project.full_path
+ context 'legacy storage' do
+ it_behaves_like 'builds correct legacy storage paths'
+ include_examples 'uses hashed storage'
+ end
- expect(described_class.absolute_path(upload))
- .to end_with("#{dynamic_segment}/secret/foo.jpg")
- end
- end
+ context 'object store is remote' do
+ before do
+ stub_uploads_object_storage
+ end
- describe "#store_dir" do
- it "stores in the namespace path" do
- uploader = described_class.new(project)
+ include_context 'with storage', described_class::Store::REMOTE
- expect(uploader.store_dir).to include(project.full_path)
- expect(uploader.store_dir).not_to include("system")
- end
- end
- end
+ it_behaves_like 'builds correct legacy storage paths'
+ include_examples 'uses hashed storage'
end
describe 'initialize' do
- it 'generates a secret if none is provided' do
- expect(SecureRandom).to receive(:hex).and_return('secret')
-
- uploader = described_class.new(double)
-
- expect(uploader.secret).to eq 'secret'
- end
+ let(:uploader) { described_class.new(double, 'secret') }
it 'accepts a secret parameter' do
- expect(SecureRandom).not_to receive(:hex)
-
- uploader = described_class.new(double, 'secret')
-
- expect(uploader.secret).to eq 'secret'
+ expect(described_class).not_to receive(:generate_secret)
+ expect(uploader.secret).to eq('secret')
end
end
- describe '#move_to_cache' do
- it 'is true' do
- expect(uploader.move_to_cache).to eq(true)
+ describe '#secret' do
+ it 'generates a secret if none is provided' do
+ expect(described_class).to receive(:generate_secret).and_return('secret')
+ expect(uploader.secret).to eq('secret')
end
end
- describe '#move_to_store' do
- it 'is true' do
- expect(uploader.move_to_store).to eq(true)
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png')))
+ stub_uploads_object_storage
end
- end
-
- describe '#relative_path' do
- it 'removes the leading dynamic path segment' do
- fixture = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
- uploader.store!(fixture_file_upload(fixture))
- expect(uploader.relative_path).to match(/\A\h{32}\/rails_sample.jpg\z/)
- end
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index decea35c86d..fda70a8441b 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -1,46 +1,26 @@
require 'spec_helper'
describe JobArtifactUploader do
- let(:store) { described_class::LOCAL_STORE }
+ let(:store) { described_class::Store::LOCAL }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) }
- let(:local_path) { Gitlab.config.artifacts.path }
- describe '#store_dir' do
- subject { uploader.store_dir }
+ subject { uploader }
- let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.project_id}/#{job_artifact.id}" }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z],
+ cache_dir: %r[artifacts/tmp/cache],
+ work_dir: %r[artifacts/tmp/work]
- context 'when using local storage' do
- it { is_expected.to start_with(local_path) }
- it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
- it { is_expected.to end_with(path) }
- end
-
- context 'when using remote storage' do
- let(:store) { described_class::REMOTE_STORE }
-
- before do
- stub_artifacts_object_storage
- end
-
- it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
- it { is_expected.to end_with(path) }
+ context "object store is REMOTE" do
+ before do
+ stub_artifacts_object_storage
end
- end
-
- describe '#cache_dir' do
- subject { uploader.cache_dir }
-
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/cache') }
- end
- describe '#work_dir' do
- subject { uploader.work_dir }
+ include_context 'with storage', described_class::Store::REMOTE
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/work') }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
end
context 'file is stored in valid local_path' do
@@ -55,7 +35,7 @@ describe JobArtifactUploader do
subject { uploader.file.path }
- it { is_expected.to start_with(local_path) }
+ it { is_expected.to start_with("#{uploader.root}/#{uploader.class.base_dir}") }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb
index 7b316072f47..eeb6fd90c9d 100644
--- a/spec/uploaders/legacy_artifact_uploader_spec.rb
+++ b/spec/uploaders/legacy_artifact_uploader_spec.rb
@@ -1,51 +1,35 @@
require 'rails_helper'
describe LegacyArtifactUploader do
- let(:store) { described_class::LOCAL_STORE }
+ let(:store) { described_class::Store::LOCAL }
let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
- let(:local_path) { Gitlab.config.artifacts.path }
+ let(:local_path) { described_class.root }
- describe '.local_store_path' do
- subject { described_class.local_store_path }
+ subject { uploader }
- it "delegate to artifacts path" do
- expect(Gitlab.config.artifacts).to receive(:path)
-
- subject
- end
- end
-
- describe '.artifacts_upload_path' do
- subject { described_class.artifacts_upload_path }
+ # TODO: move to Workhorse::UploadPath
+ describe '.workhorse_upload_path' do
+ subject { described_class.workhorse_upload_path }
it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('tmp/uploads/') }
+ it { is_expected.to end_with('tmp/uploads') }
end
- describe '#store_dir' do
- subject { uploader.store_dir }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z],
+ cache_dir: %r[artifacts/tmp/cache],
+ work_dir: %r[artifacts/tmp/work]
- let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
-
- context 'when using local storage' do
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with(path) }
+ context 'object store is remote' do
+ before do
+ stub_artifacts_object_storage
end
- end
- describe '#cache_dir' do
- subject { uploader.cache_dir }
+ include_context 'with storage', described_class::Store::REMOTE
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/cache') }
- end
-
- describe '#work_dir' do
- subject { uploader.work_dir }
-
- it { is_expected.to start_with(local_path) }
- it { is_expected.to end_with('/tmp/work') }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z]
end
describe '#filename' do
@@ -70,7 +54,7 @@ describe LegacyArtifactUploader do
subject { uploader.file.path }
- it { is_expected.to start_with(local_path) }
+ it { is_expected.to start_with("#{uploader.root}") }
it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") }
it { is_expected.to include("/#{job.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb
index 9b8e2835ebc..2e4bd008afe 100644
--- a/spec/uploaders/lfs_object_uploader_spec.rb
+++ b/spec/uploaders/lfs_object_uploader_spec.rb
@@ -5,37 +5,22 @@ describe LfsObjectUploader do
let(:uploader) { described_class.new(lfs_object, :file) }
let(:path) { Gitlab.config.lfs.storage_path }
- describe '#move_to_cache' do
- it 'is true' do
- expect(uploader.move_to_cache).to eq(true)
- end
- end
-
- describe '#move_to_store' do
- it 'is true' do
- expect(uploader.move_to_store).to eq(true)
- end
- end
+ subject { uploader }
- describe '#store_dir' do
- subject { uploader.store_dir }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}],
+ cache_dir: %r[/lfs-objects/tmp/cache],
+ work_dir: %r[/lfs-objects/tmp/work]
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with("#{lfs_object.oid[0, 2]}/#{lfs_object.oid[2, 2]}") }
- end
-
- describe '#cache_dir' do
- subject { uploader.cache_dir }
-
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with('/tmp/cache') }
- end
+ context "object store is REMOTE" do
+ before do
+ stub_lfs_object_storage
+ end
- describe '#work_dir' do
- subject { uploader.work_dir }
+ include_context 'with storage', described_class::Store::REMOTE
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with('/tmp/work') }
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}]
end
describe 'migration to object storage' do
@@ -73,7 +58,7 @@ describe LfsObjectUploader do
end
describe 'remote file' do
- let(:remote) { described_class::REMOTE_STORE }
+ let(:remote) { described_class::Store::REMOTE }
let(:lfs_object) { create(:lfs_object, file_store: remote) }
context 'with object storage enabled' do
@@ -103,7 +88,7 @@ describe LfsObjectUploader do
end
def store_file(lfs_object)
- lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+ lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png")
lfs_object.save!
end
end
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
index c6c4500c179..2f2c27127fc 100644
--- a/spec/uploaders/namespace_file_uploader_spec.rb
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -1,21 +1,39 @@
require 'spec_helper'
+IDENTIFIER = %r{\h+/\S+}
+
describe NamespaceFileUploader do
let(:group) { build_stubbed(:group) }
let(:uploader) { described_class.new(group) }
+ let(:upload) { create(:upload, :namespace_upload, model: group) }
+
+ subject { uploader }
- describe "#store_dir" do
- it "stores in the namespace id directory" do
- expect(uploader.store_dir).to include(group.id.to_s)
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/namespace/\d+],
+ upload_path: IDENTIFIER,
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}]
+
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
- end
- describe ".absolute_path" do
- it "stores in thecorrect directory" do
- upload_record = create(:upload, :namespace_upload, model: group)
+ include_context 'with storage', described_class::Store::REMOTE
- expect(described_class.absolute_path(upload_record))
- .to include("-/system/namespace/#{group.id}")
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[namespace/\d+/\h+],
+ upload_path: IDENTIFIER
+ end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
new file mode 100644
index 00000000000..e01ad9af1dc
--- /dev/null
+++ b/spec/uploaders/object_storage_spec.rb
@@ -0,0 +1,350 @@
+require 'rails_helper'
+require 'carrierwave/storage/fog'
+
+class Implementation < GitlabUploader
+ include ObjectStorage::Concern
+ include ::RecordsUploads::Concern
+ prepend ::ObjectStorage::Extension::RecordsUploads
+
+ storage_options Gitlab.config.uploads
+
+ private
+
+ # user/:id
+ def dynamic_segment
+ File.join(model.class.to_s.underscore, model.id.to_s)
+ end
+end
+
+describe ObjectStorage do
+ let(:uploader_class) { Implementation }
+ let(:object) { build_stubbed(:user) }
+ let(:uploader) { uploader_class.new(object, :file) }
+
+ before do
+ allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
+ end
+
+ describe '#object_store=' do
+ it "reload the local storage" do
+ uploader.object_store = described_class::Store::LOCAL
+ expect(uploader.file_storage?).to be_truthy
+ end
+
+ it "reload the REMOTE storage" do
+ uploader.object_store = described_class::Store::REMOTE
+ expect(uploader.file_storage?).to be_falsey
+ end
+ end
+
+ context 'object_store is Store::LOCAL' do
+ before do
+ uploader.object_store = described_class::Store::LOCAL
+ end
+
+ describe '#store_dir' do
+ it 'is the composition of (base_dir, dynamic_segment)' do
+ expect(uploader.store_dir).to start_with("uploads/-/system/user/")
+ end
+ end
+ end
+
+ context 'object_store is Store::REMOTE' do
+ before do
+ uploader.object_store = described_class::Store::REMOTE
+ end
+
+ describe '#store_dir' do
+ it 'is the composition of (dynamic_segment)' do
+ expect(uploader.store_dir).to start_with("user/")
+ end
+ end
+ end
+
+ describe '#object_store' do
+ it "delegates to <mount>_store on model" do
+ expect(object).to receive(:file_store)
+
+ uploader.object_store
+ end
+
+ context 'when store is null' do
+ before do
+ expect(object).to receive(:file_store).and_return(nil)
+ end
+
+ it "returns Store::LOCAL" do
+ expect(uploader.object_store).to eq(described_class::Store::LOCAL)
+ end
+ end
+
+ context 'when value is set' do
+ before do
+ expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE)
+ end
+
+ it "returns the given value" do
+ expect(uploader.object_store).to eq(described_class::Store::REMOTE)
+ end
+ end
+ end
+
+ describe '#file_cache_storage?' do
+ context 'when file storage is used' do
+ before do
+ uploader_class.cache_storage(:file)
+ end
+
+ it { expect(uploader).to be_file_cache_storage }
+ end
+
+ context 'when is remote storage' do
+ before do
+ uploader_class.cache_storage(:fog)
+ end
+
+ it { expect(uploader).not_to be_file_cache_storage }
+ end
+ end
+
+ # this means the model shall include
+ # include RecordsUpload::Concern
+ # prepend ObjectStorage::Extension::RecordsUploads
+ # the object_store persistence is delegated to the `Upload` model.
+ #
+ context 'when persist_object_store? is false' do
+ let(:object) { create(:project, :with_avatar) }
+ let(:uploader) { object.avatar }
+
+ it { expect(object).to be_a(Avatarable) }
+ it { expect(uploader.persist_object_store?).to be_falsey }
+
+ describe 'delegates the object_store logic to the `Upload` model' do
+ it 'sets @upload to the found `upload`' do
+ expect(uploader.upload).to eq(uploader.upload)
+ end
+
+ it 'sets @object_store to the `Upload` value' do
+ expect(uploader.object_store).to eq(uploader.upload.store)
+ end
+ end
+ end
+
+ # this means the model holds an <mounted_as>_store attribute directly
+ # and do not delegate the object_store persistence to the `Upload` model.
+ #
+ context 'persist_object_store? is true' do
+ context 'when using JobArtifactsUploader' do
+ let(:store) { described_class::Store::LOCAL }
+ let(:object) { create(:ci_job_artifact, :archive, file_store: store) }
+ let(:uploader) { object.file }
+
+ context 'checking described_class' do
+ it "uploader include described_class::Concern" do
+ expect(uploader).to be_a(described_class::Concern)
+ end
+ end
+
+ describe '#use_file' do
+ context 'when file is stored locally' do
+ it "calls a regular path" do
+ expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache])
+ end
+ end
+
+ context 'when file is stored remotely' do
+ let(:store) { described_class::Store::REMOTE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "calls a cache path" do
+ expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache])
+ end
+ end
+ end
+
+ describe '#migrate!' do
+ subject { uploader.migrate!(new_store) }
+
+ shared_examples "updates the underlying <mounted>_store" do
+ it do
+ subject
+
+ expect(object.file_store).to eq(new_store)
+ end
+ end
+
+ context 'when using the same storage' do
+ let(:new_store) { store }
+
+ it "to not migrate the storage" do
+ subject
+
+ expect(uploader).not_to receive(:store!)
+ expect(uploader.object_store).to eq(store)
+ end
+ end
+
+ context 'when migrating to local storage' do
+ let(:store) { described_class::Store::REMOTE }
+ let(:new_store) { described_class::Store::LOCAL }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_examples "updates the underlying <mounted>_store"
+
+ it "local file does not exist" do
+ expect(File.exist?(uploader.path)).to eq(false)
+ end
+
+ it "remote file exist" do
+ expect(uploader.file.exists?).to be_truthy
+ end
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ expect(File.exist?(uploader.path)).to eq(true)
+ end
+ end
+
+ context 'when migrating to remote storage' do
+ let(:new_store) { described_class::Store::REMOTE }
+ let!(:current_path) { uploader.path }
+
+ it "file does exist" do
+ expect(File.exist?(current_path)).to eq(true)
+ end
+
+ context 'when storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it "to raise an error" do
+ expect { subject }.to raise_error(/Object Storage is not enabled/)
+ end
+ end
+
+ context 'when storage is unlicensed' do
+ before do
+ stub_artifacts_object_storage(licensed: false)
+ end
+
+ it "raises an error" do
+ expect { subject }.to raise_error(/Object Storage feature is missing/)
+ end
+ end
+
+ context 'when credentials are set' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_examples "updates the underlying <mounted>_store"
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ end
+
+ it "does delete original file" do
+ subject
+
+ expect(File.exist?(current_path)).to eq(false)
+ end
+
+ context 'when subject save fails' do
+ before do
+ expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception")
+ end
+
+ it "original file is not removed" do
+ expect { subject }.to raise_error(/exception/)
+
+ expect(File.exist?(current_path)).to eq(true)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '#fog_directory' do
+ let(:remote_directory) { 'directory' }
+
+ before do
+ uploader_class.storage_options double(object_store: double(remote_directory: remote_directory))
+ end
+
+ subject { uploader.fog_directory }
+
+ it { is_expected.to eq(remote_directory) }
+ end
+
+ describe '#fog_credentials' do
+ let(:connection) { Settingslogic.new("provider" => "AWS") }
+
+ before do
+ uploader_class.storage_options double(object_store: double(connection: connection))
+ end
+
+ subject { uploader.fog_credentials }
+
+ it { is_expected.to eq(provider: 'AWS') }
+ end
+
+ describe '#fog_public' do
+ subject { uploader.fog_public }
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe '#verify_license!' do
+ subject { uploader.verify_license!(nil) }
+
+ context 'when using local storage' do
+ before do
+ expect(object).to receive(:file_store) { described_class::Store::LOCAL }
+ end
+
+ it "does not raise an error" do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when using remote storage' do
+ before do
+ uploader_class.storage_options double(object_store: double(enabled: true))
+ expect(object).to receive(:file_store) { described_class::Store::REMOTE }
+ end
+
+ context 'feature is not available' do
+ before do
+ expect(License).to receive(:feature_available?).with(:object_storage).and_return(false)
+ end
+
+ it "does raise an error" do
+ expect { subject }.to raise_error(/Object Storage feature is missing/)
+ end
+ end
+
+ context 'feature is available' do
+ before do
+ expect(License).to receive(:feature_available?).with(:object_storage).and_return(true)
+ end
+
+ it "does not raise an error" do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/uploaders/object_store_uploader_spec.rb b/spec/uploaders/object_store_uploader_spec.rb
deleted file mode 100644
index 2f52867bb91..00000000000
--- a/spec/uploaders/object_store_uploader_spec.rb
+++ /dev/null
@@ -1,315 +0,0 @@
-require 'rails_helper'
-require 'carrierwave/storage/fog'
-
-describe ObjectStoreUploader do
- let(:uploader_class) { Class.new(described_class) }
- let(:object) { double }
- let(:uploader) { uploader_class.new(object, :file) }
-
- before do
- allow(object.class).to receive(:uploader_option).with(:file, :mount_on) { nil }
- end
-
- describe '#object_store' do
- it "calls artifacts_file_store on object" do
- expect(object).to receive(:file_store)
-
- uploader.object_store
- end
-
- context 'when store is null' do
- before do
- expect(object).to receive(:file_store).twice.and_return(nil)
- end
-
- it "returns LOCAL_STORE" do
- expect(uploader.real_object_store).to be_nil
- expect(uploader.object_store).to eq(described_class::LOCAL_STORE)
- end
- end
-
- context 'when value is set' do
- before do
- expect(object).to receive(:file_store).twice.and_return(described_class::REMOTE_STORE)
- end
-
- it "returns given value" do
- expect(uploader.real_object_store).not_to be_nil
- expect(uploader.object_store).to eq(described_class::REMOTE_STORE)
- end
- end
- end
-
- describe '#object_store=' do
- it "calls artifacts_file_store= on object" do
- expect(object).to receive(:file_store=).with(described_class::REMOTE_STORE)
-
- uploader.object_store = described_class::REMOTE_STORE
- end
- end
-
- describe '#file_storage?' do
- context 'when file storage is used' do
- before do
- expect(object).to receive(:file_store).and_return(described_class::LOCAL_STORE)
- end
-
- it { expect(uploader).to be_file_storage }
- end
-
- context 'when is remote storage' do
- before do
- uploader_class.storage_options double(
- object_store: double(enabled: true))
- expect(object).to receive(:file_store).and_return(described_class::REMOTE_STORE)
- end
-
- it { expect(uploader).not_to be_file_storage }
- end
- end
-
- describe '#file_cache_storage?' do
- context 'when file storage is used' do
- before do
- uploader_class.cache_storage(:file)
- end
-
- it { expect(uploader).to be_file_cache_storage }
- end
-
- context 'when is remote storage' do
- before do
- uploader_class.cache_storage(:fog)
- end
-
- it { expect(uploader).not_to be_file_cache_storage }
- end
- end
-
- context 'when using JobArtifactsUploader' do
- let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
- let(:uploader) { artifact.file }
-
- context 'checking described_class' do
- let(:store) { described_class::LOCAL_STORE }
-
- it "uploader is of a described_class" do
- expect(uploader).to be_a(described_class)
- end
-
- it 'moves files locally' do
- expect(uploader.move_to_store).to be(true)
- expect(uploader.move_to_cache).to be(true)
- end
- end
-
- context 'when store is null' do
- let(:store) { nil }
-
- it "sets the store to LOCAL_STORE" do
- expect(artifact.file_store).to eq(described_class::LOCAL_STORE)
- end
- end
-
- describe '#use_file' do
- context 'when file is stored locally' do
- let(:store) { described_class::LOCAL_STORE }
-
- it "calls a regular path" do
- expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/)
- end
- end
-
- context 'when file is stored remotely' do
- let(:store) { described_class::REMOTE_STORE }
-
- before do
- stub_artifacts_object_storage
- end
-
- it "calls a cache path" do
- expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/)
- end
- end
- end
-
- describe '#migrate!' do
- let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
- let(:uploader) { artifact.file }
- let(:store) { described_class::LOCAL_STORE }
-
- subject { uploader.migrate!(new_store) }
-
- context 'when using the same storage' do
- let(:new_store) { store }
-
- it "to not migrate the storage" do
- subject
-
- expect(uploader.object_store).to eq(store)
- end
- end
-
- context 'when migrating to local storage' do
- let(:store) { described_class::REMOTE_STORE }
- let(:new_store) { described_class::LOCAL_STORE }
-
- before do
- stub_artifacts_object_storage
- end
-
- it "local file does not exist" do
- expect(File.exist?(uploader.path)).to eq(false)
- end
-
- it "does migrate the file" do
- subject
-
- expect(uploader.object_store).to eq(new_store)
- expect(File.exist?(uploader.path)).to eq(true)
- end
- end
-
- context 'when migrating to remote storage' do
- let(:new_store) { described_class::REMOTE_STORE }
- let!(:current_path) { uploader.path }
-
- it "file does exist" do
- expect(File.exist?(current_path)).to eq(true)
- end
-
- context 'when storage is disabled' do
- before do
- stub_artifacts_object_storage(enabled: false)
- end
-
- it "to raise an error" do
- expect { subject }.to raise_error(/Object Storage is not enabled/)
- end
- end
-
- context 'when storage is unlicensed' do
- before do
- stub_artifacts_object_storage(licensed: false)
- end
-
- it "raises an error" do
- expect { subject }.to raise_error(/Object Storage feature is missing/)
- end
- end
-
- context 'when credentials are set' do
- before do
- stub_artifacts_object_storage
- end
-
- it "does migrate the file" do
- subject
-
- expect(uploader.object_store).to eq(new_store)
- expect(File.exist?(current_path)).to eq(false)
- end
-
- it "does delete original file" do
- subject
-
- expect(File.exist?(current_path)).to eq(false)
- end
-
- context 'when subject save fails' do
- before do
- expect(artifact).to receive(:save!).and_raise(RuntimeError, "exception")
- end
-
- it "does catch an error" do
- expect { subject }.to raise_error(/exception/)
- end
-
- it "original file is not removed" do
- begin
- subject
- rescue
- end
-
- expect(File.exist?(current_path)).to eq(true)
- end
- end
- end
- end
- end
- end
-
- describe '#fog_directory' do
- let(:remote_directory) { 'directory' }
-
- before do
- uploader_class.storage_options double(
- object_store: double(remote_directory: remote_directory))
- end
-
- subject { uploader.fog_directory }
-
- it { is_expected.to eq(remote_directory) }
- end
-
- describe '#fog_credentials' do
- let(:connection) { 'connection' }
-
- before do
- uploader_class.storage_options double(
- object_store: double(connection: connection))
- end
-
- subject { uploader.fog_credentials }
-
- it { is_expected.to eq(connection) }
- end
-
- describe '#fog_public' do
- subject { uploader.fog_public }
-
- it { is_expected.to eq(false) }
- end
-
- describe '#verify_license!' do
- subject { uploader.verify_license!(nil) }
-
- context 'when using local storage' do
- before do
- expect(object).to receive(:file_store) { described_class::LOCAL_STORE }
- end
-
- it "does not raise an error" do
- expect { subject }.not_to raise_error
- end
- end
-
- context 'when using remote storage' do
- before do
- uploader_class.storage_options double(
- object_store: double(enabled: true))
- expect(object).to receive(:file_store) { described_class::REMOTE_STORE }
- end
-
- context 'feature is not available' do
- before do
- expect(License).to receive(:feature_available?).with(:object_storage) { false }
- end
-
- it "does raise an error" do
- expect { subject }.to raise_error(/Object Storage feature is missing/)
- end
- end
-
- context 'feature is available' do
- before do
- expect(License).to receive(:feature_available?).with(:object_storage) { true }
- end
-
- it "does not raise an error" do
- expect { subject }.not_to raise_error
- end
- end
- end
- end
-end
diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb
index cbafa9f478d..ef5a70f668b 100644
--- a/spec/uploaders/personal_file_uploader_spec.rb
+++ b/spec/uploaders/personal_file_uploader_spec.rb
@@ -1,25 +1,40 @@
require 'spec_helper'
+IDENTIFIER = %r{\h+/\S+}
+
describe PersonalFileUploader do
- let(:uploader) { described_class.new(build_stubbed(:project)) }
- let(:snippet) { create(:personal_snippet) }
+ let(:model) { create(:personal_snippet) }
+ let(:uploader) { described_class.new(model) }
+ let(:upload) { create(:upload, :personal_snippet_upload) }
- describe '.absolute_path' do
- it 'returns the correct absolute path by building it dynamically' do
- upload = double(model: snippet, path: 'secret/foo.jpg')
+ subject { uploader }
- dynamic_segment = "personal_snippet/#{snippet.id}"
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[uploads/-/system/personal_snippet/\d+],
+ upload_path: IDENTIFIER,
+ absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}]
- expect(described_class.absolute_path(upload)).to end_with("/-/system/#{dynamic_segment}/secret/foo.jpg")
+ # EE-specific
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[\d+/\h+],
+ upload_path: IDENTIFIER
end
describe '#to_h' do
- it 'returns the hass' do
- uploader = described_class.new(snippet, 'secret')
+ before do
+ subject.instance_variable_set(:@secret, 'secret')
+ end
+ it 'is correct' do
allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name'))
- expected_url = "/uploads/-/system/personal_snippet/#{snippet.id}/secret/file_name"
+ expected_url = "/uploads/-/system/personal_snippet/#{model.id}/secret/file_name"
expect(uploader.to_h).to eq(
alt: 'file_name',
@@ -28,4 +43,14 @@ describe PersonalFileUploader do
)
end
end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb
index 7ef7fb7d758..9a3e5d83e01 100644
--- a/spec/uploaders/records_uploads_spec.rb
+++ b/spec/uploaders/records_uploads_spec.rb
@@ -3,16 +3,16 @@ require 'rails_helper'
describe RecordsUploads do
let!(:uploader) do
class RecordsUploadsExampleUploader < GitlabUploader
- include RecordsUploads
+ include RecordsUploads::Concern
storage :file
- def model
- FactoryBot.build_stubbed(:user)
+ def dynamic_segment
+ 'co/fe/ee'
end
end
- RecordsUploadsExampleUploader.new
+ RecordsUploadsExampleUploader.new(build_stubbed(:user))
end
def upload_fixture(filename)
@@ -20,48 +20,55 @@ describe RecordsUploads do
end
describe 'callbacks' do
- it 'calls `record_upload` after `store`' do
+ let(:upload) { create(:upload) }
+
+ before do
+ uploader.upload = upload
+ end
+
+ it '#record_upload after `store`' do
expect(uploader).to receive(:record_upload).once
uploader.store!(upload_fixture('doc_sample.txt'))
end
- it 'calls `destroy_upload` after `remove`' do
- expect(uploader).to receive(:destroy_upload).once
-
+ it '#destroy_upload after `remove`' do
uploader.store!(upload_fixture('doc_sample.txt'))
+ expect(uploader).to receive(:destroy_upload).once
uploader.remove!
end
end
describe '#record_upload callback' do
- it 'returns early when not using file storage' do
- allow(uploader).to receive(:file_storage?).and_return(false)
- expect(Upload).not_to receive(:record)
-
- uploader.store!(upload_fixture('rails_sample.jpg'))
+ it 'creates an Upload record after store' do
+ expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.to change { Upload.count }.by(1)
end
- it "returns early when the file doesn't exist" do
- allow(uploader).to receive(:file).and_return(double(exists?: false))
- expect(Upload).not_to receive(:record)
-
+ it 'creates a new record and assigns size, path, model, and uploader' do
uploader.store!(upload_fixture('rails_sample.jpg'))
+
+ upload = uploader.upload
+ aggregate_failures do
+ expect(upload).to be_persisted
+ expect(upload.size).to eq uploader.file.size
+ expect(upload.path).to eq uploader.upload_path
+ expect(upload.model_id).to eq uploader.model.id
+ expect(upload.model_type).to eq uploader.model.class.to_s
+ expect(upload.uploader).to eq uploader.class.to_s
+ end
end
- it 'creates an Upload record after store' do
- expect(Upload).to receive(:record)
- .with(uploader)
+ it "does not create an Upload record when the file doesn't exist" do
+ allow(uploader).to receive(:file).and_return(double(exists?: false))
- uploader.store!(upload_fixture('rails_sample.jpg'))
+ expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count }
end
it 'does not create an Upload record if model is missing' do
- expect_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil)
- expect(Upload).not_to receive(:record).with(uploader)
+ allow_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil)
- uploader.store!(upload_fixture('rails_sample.jpg'))
+ expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count }
end
it 'it destroys Upload records at the same path before recording' do
@@ -72,29 +79,15 @@ describe RecordsUploads do
uploader: uploader.class.to_s
)
+ uploader.upload = existing
uploader.store!(upload_fixture('rails_sample.jpg'))
expect { existing.reload }.to raise_error(ActiveRecord::RecordNotFound)
- expect(Upload.count).to eq 1
+ expect(Upload.count).to eq(1)
end
end
describe '#destroy_upload callback' do
- it 'returns early when not using file storage' do
- uploader.store!(upload_fixture('rails_sample.jpg'))
-
- allow(uploader).to receive(:file_storage?).and_return(false)
- expect(Upload).not_to receive(:remove_path)
-
- uploader.remove!
- end
-
- it 'returns early when file is nil' do
- expect(Upload).not_to receive(:remove_path)
-
- uploader.remove!
- end
-
it 'it destroys Upload records at the same path after removal' do
uploader.store!(upload_fixture('rails_sample.jpg'))
diff --git a/spec/workers/upload_checksum_worker_spec.rb b/spec/workers/upload_checksum_worker_spec.rb
index 911360da66c..9e50ce15871 100644
--- a/spec/workers/upload_checksum_worker_spec.rb
+++ b/spec/workers/upload_checksum_worker_spec.rb
@@ -2,18 +2,31 @@ require 'rails_helper'
describe UploadChecksumWorker do
describe '#perform' do
- it 'rescues ActiveRecord::RecordNotFound' do
- expect { described_class.new.perform(999_999) }.not_to raise_error
+ subject { described_class.new }
+
+ context 'without a valid record' do
+ it 'rescues ActiveRecord::RecordNotFound' do
+ expect { subject.perform(999_999) }.not_to raise_error
+ end
end
- it 'calls calculate_checksum_without_delay and save!' do
- upload = spy
- expect(Upload).to receive(:find).with(999_999).and_return(upload)
+ context 'with a valid record' do
+ let(:upload) { create(:user, :with_avatar).avatar.upload }
+
+ before do
+ expect(Upload).to receive(:find).and_return(upload)
+ allow(upload).to receive(:foreground_checksumable?).and_return(false)
+ end
- described_class.new.perform(999_999)
+ it 'calls calculate_checksum!' do
+ expect(upload).to receive(:calculate_checksum!)
+ subject.perform(upload.id)
+ end
- expect(upload).to have_received(:calculate_checksum)
- expect(upload).to have_received(:save!)
+ it 'calls save!' do
+ expect(upload).to receive(:save!)
+ subject.perform(upload.id)
+ end
end
end
end