summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJacob Vosmaer <contact@jacobvosmaer.nl>2014-09-29 15:02:39 +0200
committerJacob Vosmaer <contact@jacobvosmaer.nl>2014-09-29 15:03:08 +0200
commite1f6fa6f18afa8461cd82a56250222c2972c7785 (patch)
tree0ab90fe685c4054f8752dd7c5b196a39b9fa8d42
parent0e8e9e5d77478294288d34474e580916418ddd91 (diff)
downloadgitlab-ce-e1f6fa6f18afa8461cd82a56250222c2972c7785.tar.gz
Add backup upload support to the rake task
-rw-r--r--CHANGELOG1
-rw-r--r--Gemfile4
-rw-r--r--config/gitlab.yml.example8
-rw-r--r--config/initializers/1_settings.rb5
-rw-r--r--doc/raketasks/backup_restore.md99
-rw-r--r--lib/backup/manager.rb26
6 files changed, 139 insertions, 4 deletions
diff --git a/CHANGELOG b/CHANGELOG
index 3006ff4049d..d58864442b3 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -5,6 +5,7 @@ v 7.4.0
- Refactor test coverage tools usage. Use SIMPLECOV=true to generate it locally
- Increase unicorn timeout to 60 seconds
- Sort search autocomplete projects by stars count so most popular go first
+ - Support for backup uploads to remote storage
v 7.3.1
- Fix ref parsing in Gitlab::GitAccess
diff --git a/Gemfile b/Gemfile
index 9686bf9610f..d4cca8c23c6 100644
--- a/Gemfile
+++ b/Gemfile
@@ -70,8 +70,8 @@ gem "carrierwave"
gem 'dropzonejs-rails'
# for aws storage
-gem "fog", "~> 1.14", group: :aws
-gem "unf", group: :aws
+gem "fog", "~> 1.14"
+gem "unf"
# Authorization
gem "six"
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index 3092ebf3450..ad1db4b4f43 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -212,6 +212,14 @@ production: &base
backup:
path: "tmp/backups" # Relative paths are relative to Rails.root (default: tmp/backups/)
# keep_time: 604800 # default: 0 (forever) (in seconds)
+ # upload:
+ # # Fog storage connection settings, see http://fog.io/storage/ .
+ # connection:
+ # provider: AWS
+ # aws_access_key_id: AKIAKIAKI
+ # aws_secret_access_key: 'secret123'
+ # # The remote 'directory' to store your backups. For S3, this would be the bucket name.
+ # remote_directory: 'my.s3.bucket'
## GitLab Shell settings
gitlab_shell:
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 5b7e69fbc6a..caf4ef25cdd 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -129,6 +129,11 @@ Settings.gitlab_shell['ssh_path_prefix'] ||= Settings.send(:build_gitlab_shell_s
Settings['backup'] ||= Settingslogic.new({})
Settings.backup['keep_time'] ||= 0
Settings.backup['path'] = File.expand_path(Settings.backup['path'] || "tmp/backups/", Rails.root)
+Settings.backup['upload'] ||= Settingslogic.new({'remote_directory' => nil, 'connection' => nil})
+# Convert upload connection settings to use symbol keys, to make Fog happy
+if Settings.backup['upload']['connection']
+ Settings.backup['upload']['connection'] = Hash[Settings.backup['upload']['connection'].map { |k, v| [k.to_sym, v] }]
+end
#
# Git
diff --git a/doc/raketasks/backup_restore.md b/doc/raketasks/backup_restore.md
index beb2c33a67b..06ec88c3d58 100644
--- a/doc/raketasks/backup_restore.md
+++ b/doc/raketasks/backup_restore.md
@@ -46,6 +46,105 @@ Deleting tmp directories...[DONE]
Deleting old backups... [SKIPPING]
```
+## Upload backups to remote (cloud) storage
+
+Starting with GitLab 7.4 you can let the backup script upload the '.tar' file
+it creates to cloud storage using [Fog](http://fog.io/storage/). In the example
+below we use Amazon S3 for storage, but Fog also lets you use other storage
+backends; see http://fog.io/storage/ .
+
+For omnibus packages:
+
+```ruby
+gitlab_rails['backup_upload_connection'] = {
+ 'provider' => 'AWS',
+ 'aws_access_key_id' => 'AKIAKIAKI',
+ 'aws_secret_access_key' => 'secret123'
+}
+gitlab_rails['backup_upload_remote_directory'] = 'my.s3.bucket'
+```
+
+For installations from source:
+
+```yaml
+ backup:
+ # snip
+ upload:
+ # Fog storage connection settings, see http://fog.io/storage/ .
+ connection:
+ provider: AWS
+ aws_access_key_id: AKIAKIAKI
+ aws_secret_access_key: 'secret123'
+ # The remote 'directory' to store your backups. For S3, this would be the bucket name.
+ remote_directory: 'my.s3.bucket'
+```
+
+If you are uploading your backups to S3 you will probably want to create a new
+IAM user with restricted access rights. To give the upload user access only for
+uploading backups create the following three profiles, replacing `my.s3.bucket`
+with the name of your bucket:
+
+```json
+{
+ "Version": "2014-09-29",
+ "Statement": [
+ {
+ "Sid": "Stmt1411994999",
+ "Effect": "Allow",
+ "Action": [
+ "s3:AbortMultipartUpload",
+ "s3:GetBucketAcl",
+ "s3:GetBucketLocation",
+ "s3:GetObject",
+ "s3:GetObjectAcl",
+ "s3:ListMultipartUploadParts",
+ "s3:PutObject",
+ "s3:PutObjectAcl"
+ ],
+ "Resource": [
+ "arn:aws:s3:::my.s3.bucket/*"
+ ]
+ }
+ ]
+}
+```
+
+```json
+{
+ "Version": "2014-09-29",
+ "Statement": [
+ {
+ "Sid": "Stmt1411995081",
+ "Effect": "Allow",
+ "Action": [
+ "s3:ListAllMyBuckets", "s3:GetBucketLocation"
+ ],
+ "Resource": [
+ "*"
+ ]
+ }
+ ]
+}
+```
+
+```json
+{
+ "Version": "2014-09-29",
+ "Statement": [
+ {
+ "Sid": "Stmt1411995608",
+ "Effect": "Allow",
+ "Action": [
+ "s3:ListBucket"
+ ],
+ "Resource": [
+ "arn:aws:s3:::my.s3.bucket"
+ ]
+ }
+ ]
+}
+```
+
## Storing configuration files
Please be informed that a backup does not store your configuration files.
diff --git a/lib/backup/manager.rb b/lib/backup/manager.rb
index 28e323fe30d..908f10a4138 100644
--- a/lib/backup/manager.rb
+++ b/lib/backup/manager.rb
@@ -9,6 +9,7 @@ module Backup
s[:backup_created_at] = Time.now
s[:gitlab_version] = Gitlab::VERSION
s[:tar_version] = tar_version
+ tar_file = "#{s[:backup_created_at].to_i}_gitlab_backup.tar"
Dir.chdir(Gitlab.config.backup.path)
@@ -17,8 +18,29 @@ module Backup
end
# create archive
- print "Creating backup archive: #{s[:backup_created_at].to_i}_gitlab_backup.tar ... "
- if Kernel.system('tar', '-cf', "#{s[:backup_created_at].to_i}_gitlab_backup.tar", *BACKUP_CONTENTS)
+ print "Creating backup archive: #{tar_file} ... "
+ if Kernel.system('tar', '-cf', tar_file, *BACKUP_CONTENTS)
+ puts "done".green
+ else
+ puts "failed".red
+ end
+
+ upload(tar_file)
+ end
+
+ def upload(tar_file)
+ remote_directory = Gitlab.config.backup.upload.remote_directory
+ print "Uploading backup archive to remote storage #{remote_directory} ... "
+
+ connection_settings = Gitlab.config.backup.upload.connection
+ if connection_settings.blank?
+ puts "skipped".yellow
+ return
+ end
+
+ connection = ::Fog::Storage.new(connection_settings)
+ directory = connection.directories.get(remote_directory)
+ if directory.files.create(key: tar_file, body: File.open(tar_file), public: false)
puts "done".green
else
puts "failed".red