summaryrefslogtreecommitdiff
path: root/lib/backup
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-10-21 07:08:36 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-10-21 07:08:36 +0000
commit48aff82709769b098321c738f3444b9bdaa694c6 (patch)
treee00c7c43e2d9b603a5a6af576b1685e400410dee /lib/backup
parent879f5329ee916a948223f8f43d77fba4da6cd028 (diff)
downloadgitlab-ce-48aff82709769b098321c738f3444b9bdaa694c6.tar.gz
Add latest changes from gitlab-org/gitlab@13-5-stable-eev13.5.0-rc42
Diffstat (limited to 'lib/backup')
-rw-r--r--lib/backup/artifacts.rb4
-rw-r--r--lib/backup/builds.rb4
-rw-r--r--lib/backup/lfs.rb4
-rw-r--r--lib/backup/pages.rb4
-rw-r--r--lib/backup/registry.rb4
-rw-r--r--lib/backup/repositories.rb310
-rw-r--r--lib/backup/repository.rb265
-rw-r--r--lib/backup/uploads.rb4
8 files changed, 316 insertions, 283 deletions
diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb
index c2266f0bad6..6a45baa60ec 100644
--- a/lib/backup/artifacts.rb
+++ b/lib/backup/artifacts.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
-require 'backup/files'
-
module Backup
- class Artifacts < Files
+ class Artifacts < Backup::Files
attr_reader :progress
def initialize(progress)
diff --git a/lib/backup/builds.rb b/lib/backup/builds.rb
index 5e795a449de..9c3b7165de7 100644
--- a/lib/backup/builds.rb
+++ b/lib/backup/builds.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
-require 'backup/files'
-
module Backup
- class Builds < Files
+ class Builds < Backup::Files
attr_reader :progress
def initialize(progress)
diff --git a/lib/backup/lfs.rb b/lib/backup/lfs.rb
index 0dfe56e214f..514d52d7f65 100644
--- a/lib/backup/lfs.rb
+++ b/lib/backup/lfs.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
-require 'backup/files'
-
module Backup
- class Lfs < Files
+ class Lfs < Backup::Files
attr_reader :progress
def initialize(progress)
diff --git a/lib/backup/pages.rb b/lib/backup/pages.rb
index d7aab33d7cb..ae293073ba2 100644
--- a/lib/backup/pages.rb
+++ b/lib/backup/pages.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
-require 'backup/files'
-
module Backup
- class Pages < Files
+ class Pages < Backup::Files
attr_reader :progress
def initialize(progress)
diff --git a/lib/backup/registry.rb b/lib/backup/registry.rb
index d16ed2facf1..9645a07dfb8 100644
--- a/lib/backup/registry.rb
+++ b/lib/backup/registry.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
-require 'backup/files'
-
module Backup
- class Registry < Files
+ class Registry < Backup::Files
attr_reader :progress
def initialize(progress)
diff --git a/lib/backup/repositories.rb b/lib/backup/repositories.rb
new file mode 100644
index 00000000000..4248a86dc7c
--- /dev/null
+++ b/lib/backup/repositories.rb
@@ -0,0 +1,310 @@
+# frozen_string_literal: true
+
+require 'yaml'
+
+module Backup
+ class Repositories
+ attr_reader :progress
+
+ def initialize(progress)
+ @progress = progress
+ end
+
+ def dump(max_concurrency:, max_storage_concurrency:)
+ prepare
+
+ if max_concurrency <= 1 && max_storage_concurrency <= 1
+ return dump_consecutive
+ end
+
+ check_valid_storages!
+
+ semaphore = Concurrent::Semaphore.new(max_concurrency)
+ errors = Queue.new
+
+ threads = Gitlab.config.repositories.storages.keys.map do |storage|
+ Thread.new do
+ Rails.application.executor.wrap do
+ dump_storage(storage, semaphore, max_storage_concurrency: max_storage_concurrency)
+ rescue => e
+ errors << e
+ end
+ end
+ end
+
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ threads.each(&:join)
+ end
+
+ raise errors.pop unless errors.empty?
+ end
+
+ def restore
+ Project.find_each(batch_size: 1000) do |project|
+ restore_repository(project, Gitlab::GlRepository::PROJECT)
+ restore_repository(project, Gitlab::GlRepository::WIKI)
+ restore_repository(project, Gitlab::GlRepository::DESIGN)
+ end
+
+ invalid_ids = Snippet.find_each(batch_size: 1000)
+ .map { |snippet| restore_snippet_repository(snippet) }
+ .compact
+
+ cleanup_snippets_without_repositories(invalid_ids)
+
+ restore_object_pools
+ end
+
+ private
+
+ def check_valid_storages!
+ [ProjectRepository, SnippetRepository].each do |klass|
+ if klass.excluding_repository_storage(Gitlab.config.repositories.storages.keys).exists?
+ raise Error, "repositories.storages in gitlab.yml does not include all storages used by #{klass}"
+ end
+ end
+ end
+
+ def backup_repos_path
+ @backup_repos_path ||= File.join(Gitlab.config.backup.path, 'repositories')
+ end
+
+ def prepare
+ FileUtils.rm_rf(backup_repos_path)
+ FileUtils.mkdir_p(Gitlab.config.backup.path)
+ FileUtils.mkdir(backup_repos_path, mode: 0700)
+ end
+
+ def dump_consecutive
+ dump_consecutive_projects
+ dump_consecutive_snippets
+ end
+
+ def dump_consecutive_projects
+ project_relation.find_each(batch_size: 1000) do |project|
+ dump_project(project)
+ end
+ end
+
+ def dump_consecutive_snippets
+ Snippet.find_each(batch_size: 1000) { |snippet| dump_snippet(snippet) }
+ end
+
+ def dump_storage(storage, semaphore, max_storage_concurrency:)
+ errors = Queue.new
+ queue = InterlockSizedQueue.new(1)
+
+ threads = Array.new(max_storage_concurrency) do
+ Thread.new do
+ Rails.application.executor.wrap do
+ while container = queue.pop
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ semaphore.acquire
+ end
+
+ begin
+ case container
+ when Project
+ dump_project(container)
+ when Snippet
+ dump_snippet(container)
+ end
+ rescue => e
+ errors << e
+ break
+ ensure
+ semaphore.release
+ end
+ end
+ end
+ end
+ end
+
+ enqueue_records_for_storage(storage, queue, errors)
+
+ raise errors.pop unless errors.empty?
+ ensure
+ queue.close
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ threads.each(&:join)
+ end
+ end
+
+ def dump_project(project)
+ backup_repository(project, Gitlab::GlRepository::PROJECT)
+ backup_repository(project, Gitlab::GlRepository::WIKI)
+ backup_repository(project, Gitlab::GlRepository::DESIGN)
+ end
+
+ def dump_snippet(snippet)
+ backup_repository(snippet, Gitlab::GlRepository::SNIPPET)
+ end
+
+ def enqueue_records_for_storage(storage, queue, errors)
+ records_to_enqueue(storage).each do |relation|
+ relation.find_each(batch_size: 100) do |project|
+ break unless errors.empty?
+
+ queue.push(project)
+ end
+ end
+ end
+
+ def records_to_enqueue(storage)
+ [projects_in_storage(storage), snippets_in_storage(storage)]
+ end
+
+ def projects_in_storage(storage)
+ project_relation.id_in(ProjectRepository.for_repository_storage(storage).select(:project_id))
+ end
+
+ def project_relation
+ Project.includes(:route, :group, namespace: :owner)
+ end
+
+ def snippets_in_storage(storage)
+ Snippet.id_in(SnippetRepository.for_repository_storage(storage).select(:snippet_id))
+ end
+
+ def backup_repository(container, type)
+ BackupRestore.new(
+ progress,
+ type.repository_for(container),
+ backup_repos_path
+ ).backup
+ end
+
+ def restore_repository(container, type)
+ BackupRestore.new(
+ progress,
+ type.repository_for(container),
+ backup_repos_path
+ ).restore(always_create: type.project?)
+ end
+
+ def restore_object_pools
+ PoolRepository.includes(:source_project).find_each do |pool|
+ progress.puts " - Object pool #{pool.disk_path}..."
+
+ pool.source_project ||= pool.member_projects.first.root_of_fork_network
+ pool.state = 'none'
+ pool.save
+
+ pool.schedule
+ end
+ end
+
+ def restore_snippet_repository(snippet)
+ restore_repository(snippet, Gitlab::GlRepository::SNIPPET)
+
+ response = Snippets::RepositoryValidationService.new(nil, snippet).execute
+
+ if response.error?
+ snippet.repository.remove
+
+ progress.puts("Snippet #{snippet.full_path} can't be restored: #{response.message}")
+
+ snippet.id
+ else
+ nil
+ end
+ end
+
+ # Snippets without a repository should be removed because they failed to import
+ # due to having invalid repositories
+ def cleanup_snippets_without_repositories(ids)
+ Snippet.id_in(ids).delete_all
+ end
+
+ class BackupRestore
+ attr_accessor :progress, :repository, :backup_repos_path
+
+ def initialize(progress, repository, backup_repos_path)
+ @progress = progress
+ @repository = repository
+ @backup_repos_path = backup_repos_path
+ end
+
+ def backup
+ progress.puts " * #{display_repo_path} ... "
+
+ if repository.empty?
+ progress.puts " * #{display_repo_path} ... " + "[SKIPPED]".color(:cyan)
+ return
+ end
+
+ FileUtils.mkdir_p(repository_backup_path)
+
+ repository.bundle_to_disk(path_to_bundle)
+ repository.gitaly_repository_client.backup_custom_hooks(custom_hooks_tar)
+
+ progress.puts " * #{display_repo_path} ... " + "[DONE]".color(:green)
+
+ rescue => e
+ progress.puts "[Failed] backing up #{display_repo_path}".color(:red)
+ progress.puts "Error #{e}".color(:red)
+ end
+
+ def restore(always_create: false)
+ progress.puts " * #{display_repo_path} ... "
+
+ repository.remove rescue nil
+
+ if File.exist?(path_to_bundle)
+ repository.create_from_bundle(path_to_bundle)
+ restore_custom_hooks
+ elsif always_create
+ repository.create_repository
+ end
+
+ progress.puts " * #{display_repo_path} ... " + "[DONE]".color(:green)
+
+ rescue => e
+ progress.puts "[Failed] restoring #{display_repo_path}".color(:red)
+ progress.puts "Error #{e}".color(:red)
+ end
+
+ private
+
+ def display_repo_path
+ "#{repository.full_path} (#{repository.disk_path})"
+ end
+
+ def repository_backup_path
+ @repository_backup_path ||= File.join(backup_repos_path, repository.disk_path)
+ end
+
+ def path_to_bundle
+ @path_to_bundle ||= File.join(backup_repos_path, repository.disk_path + '.bundle')
+ end
+
+ def restore_custom_hooks
+ return unless File.exist?(custom_hooks_tar)
+
+ repository.gitaly_repository_client.restore_custom_hooks(custom_hooks_tar)
+ end
+
+ def custom_hooks_tar
+ File.join(repository_backup_path, "custom_hooks.tar")
+ end
+ end
+
+ class InterlockSizedQueue < SizedQueue
+ extend ::Gitlab::Utils::Override
+
+ override :pop
+ def pop(*)
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ super
+ end
+ end
+
+ override :push
+ def push(*)
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ super
+ end
+ end
+ end
+ end
+end
diff --git a/lib/backup/repository.rb b/lib/backup/repository.rb
deleted file mode 100644
index eb0b230904e..00000000000
--- a/lib/backup/repository.rb
+++ /dev/null
@@ -1,265 +0,0 @@
-# frozen_string_literal: true
-
-require 'yaml'
-
-module Backup
- class Repository
- attr_reader :progress
-
- def initialize(progress)
- @progress = progress
- end
-
- def dump(max_concurrency:, max_storage_concurrency:)
- prepare
-
- if max_concurrency <= 1 && max_storage_concurrency <= 1
- return dump_consecutive
- end
-
- if Project.excluding_repository_storage(Gitlab.config.repositories.storages.keys).exists?
- raise Error, 'repositories.storages in gitlab.yml is misconfigured'
- end
-
- semaphore = Concurrent::Semaphore.new(max_concurrency)
- errors = Queue.new
-
- threads = Gitlab.config.repositories.storages.keys.map do |storage|
- Thread.new do
- Rails.application.executor.wrap do
- dump_storage(storage, semaphore, max_storage_concurrency: max_storage_concurrency)
- rescue => e
- errors << e
- end
- end
- end
-
- ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
- threads.each(&:join)
- end
-
- raise errors.pop unless errors.empty?
- end
-
- def backup_project(project)
- path_to_project_bundle = path_to_bundle(project)
- Gitlab::GitalyClient::RepositoryService.new(project.repository)
- .create_bundle(path_to_project_bundle)
-
- backup_custom_hooks(project)
- rescue => e
- progress_warn(project, e, 'Failed to backup repo')
- end
-
- def backup_custom_hooks(project)
- FileUtils.mkdir_p(project_backup_path(project))
-
- custom_hooks_path = custom_hooks_tar(project)
- Gitlab::GitalyClient::RepositoryService.new(project.repository)
- .backup_custom_hooks(custom_hooks_path)
- end
-
- def restore_custom_hooks(project)
- return unless Dir.exist?(project_backup_path(project))
- return if Dir.glob("#{project_backup_path(project)}/custom_hooks*").none?
-
- custom_hooks_path = custom_hooks_tar(project)
- Gitlab::GitalyClient::RepositoryService.new(project.repository)
- .restore_custom_hooks(custom_hooks_path)
- end
-
- def restore
- Project.find_each(batch_size: 1000) do |project|
- progress.print " * #{project.full_path} ... "
-
- restore_repo_success =
- begin
- try_restore_repository(project)
- rescue => err
- progress.puts "Error: #{err}".color(:red)
- false
- end
-
- if restore_repo_success
- progress.puts "[DONE]".color(:green)
- else
- progress.puts "[Failed] restoring #{project.full_path} repository".color(:red)
- end
-
- wiki = ProjectWiki.new(project)
- wiki.repository.remove rescue nil
- path_to_wiki_bundle = path_to_bundle(wiki)
-
- if File.exist?(path_to_wiki_bundle)
- progress.print " * #{wiki.full_path} ... "
- begin
- wiki.repository.create_from_bundle(path_to_wiki_bundle)
- restore_custom_hooks(wiki)
-
- progress.puts "[DONE]".color(:green)
- rescue => e
- progress.puts "[Failed] restoring #{wiki.full_path} wiki".color(:red)
- progress.puts "Error #{e}".color(:red)
- end
- end
- end
-
- restore_object_pools
- end
-
- protected
-
- def try_restore_repository(project)
- path_to_project_bundle = path_to_bundle(project)
- project.repository.remove rescue nil
-
- if File.exist?(path_to_project_bundle)
- project.repository.create_from_bundle(path_to_project_bundle)
- restore_custom_hooks(project)
- else
- project.repository.create_repository
- end
-
- true
- end
-
- def path_to_bundle(project)
- File.join(backup_repos_path, project.disk_path + '.bundle')
- end
-
- def project_backup_path(project)
- File.join(backup_repos_path, project.disk_path)
- end
-
- def custom_hooks_tar(project)
- File.join(project_backup_path(project), "custom_hooks.tar")
- end
-
- def backup_repos_path
- File.join(Gitlab.config.backup.path, 'repositories')
- end
-
- def prepare
- FileUtils.rm_rf(backup_repos_path)
- FileUtils.mkdir_p(Gitlab.config.backup.path)
- FileUtils.mkdir(backup_repos_path, mode: 0700)
- end
-
- private
-
- def dump_consecutive
- Project.includes(:route, :group, namespace: :owner).find_each(batch_size: 1000) do |project|
- dump_project(project)
- end
- end
-
- def dump_storage(storage, semaphore, max_storage_concurrency:)
- errors = Queue.new
- queue = InterlockSizedQueue.new(1)
-
- threads = Array.new(max_storage_concurrency) do
- Thread.new do
- Rails.application.executor.wrap do
- while project = queue.pop
- ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
- semaphore.acquire
- end
-
- begin
- dump_project(project)
- rescue => e
- errors << e
- break
- ensure
- semaphore.release
- end
- end
- end
- end
- end
-
- Project.for_repository_storage(storage).includes(:route, :group, namespace: :owner).find_each(batch_size: 100) do |project|
- break unless errors.empty?
-
- queue.push(project)
- end
-
- raise errors.pop unless errors.empty?
- ensure
- queue.close
- ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
- threads.each(&:join)
- end
- end
-
- def dump_project(project)
- progress.puts " * #{display_repo_path(project)} ... "
-
- if project.hashed_storage?(:repository)
- FileUtils.mkdir_p(File.dirname(File.join(backup_repos_path, project.disk_path)))
- else
- FileUtils.mkdir_p(File.join(backup_repos_path, project.namespace.full_path)) if project.namespace
- end
-
- if !empty_repo?(project)
- backup_project(project)
- progress.puts " * #{display_repo_path(project)} ... " + "[DONE]".color(:green)
- else
- progress.puts " * #{display_repo_path(project)} ... " + "[SKIPPED]".color(:cyan)
- end
-
- wiki = ProjectWiki.new(project)
-
- if !empty_repo?(wiki)
- backup_project(wiki)
- progress.puts " * #{display_repo_path(project)} ... " + "[DONE] Wiki".color(:green)
- else
- progress.puts " * #{display_repo_path(project)} ... " + "[SKIPPED] Wiki".color(:cyan)
- end
- end
-
- def progress_warn(project, cmd, output)
- progress.puts "[WARNING] Executing #{cmd}".color(:orange)
- progress.puts "Ignoring error on #{display_repo_path(project)} - #{output}".color(:orange)
- end
-
- def empty_repo?(project_or_wiki)
- project_or_wiki.repository.expire_emptiness_caches
- project_or_wiki.repository.empty?
- end
-
- def display_repo_path(project)
- project.hashed_storage?(:repository) ? "#{project.full_path} (#{project.disk_path})" : project.full_path
- end
-
- def restore_object_pools
- PoolRepository.includes(:source_project).find_each do |pool|
- progress.puts " - Object pool #{pool.disk_path}..."
-
- pool.source_project ||= pool.member_projects.first.root_of_fork_network
- pool.state = 'none'
- pool.save
-
- pool.schedule
- end
- end
-
- class InterlockSizedQueue < SizedQueue
- extend ::Gitlab::Utils::Override
-
- override :pop
- def pop(*)
- ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
- super
- end
- end
-
- override :push
- def push(*)
- ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
- super
- end
- end
- end
- end
-end
diff --git a/lib/backup/uploads.rb b/lib/backup/uploads.rb
index b6a62bc3f29..9665624f71b 100644
--- a/lib/backup/uploads.rb
+++ b/lib/backup/uploads.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
-require 'backup/files'
-
module Backup
- class Uploads < Files
+ class Uploads < Backup::Files
attr_reader :progress
def initialize(progress)