summaryrefslogtreecommitdiff
path: root/lib/backup/database.rb
blob: 4abdfa18de9d8f454e1eba4c5ec658eb34116c28 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
require 'yaml'
require 'open3'

module Backup
  class Database
    # These are the final CI tables (final prior to integration in GitLab)
    TABLES = %w{
      ci_application_settings ci_builds ci_commits ci_events ci_jobs ci_projects 
      ci_runner_projects ci_runners ci_services ci_tags ci_taggings ci_trigger_requests 
      ci_triggers ci_variables ci_web_hooks
    }

    attr_reader :config, :db_dir

    def initialize
      @config = YAML.load_file(File.join(Rails.root,'config','database.yml'))[Rails.env]
      @db_dir = File.join(GitlabCi.config.backup.path, 'db')
      FileUtils.mkdir_p(@db_dir) unless Dir.exists?(@db_dir)
    end

    def dump(mysql_to_postgresql=false)
      FileUtils.rm_f(db_file_name)
      compress_rd, compress_wr = IO.pipe
      compress_pid = spawn(*%W(gzip -1 -c), in: compress_rd, out: [db_file_name, 'w', 0600])
      compress_rd.close

      dump_pid = case config["adapter"]
      when /^mysql/ then
        $progress.print "Dumping MySQL database #{config['database']} ... "
        args = mysql_args
        args << '--compatible=postgresql' if mysql_to_postgresql
        spawn('mysqldump', *args, config['database'], *TABLES, out: compress_wr)
      when "postgresql" then
        $progress.print "Dumping PostgreSQL database #{config['database']} ... "
        pg_env
        spawn('pg_dump', '--no-owner', '--clean', *TABLES.map { |t| "--table=#{t}" }, config['database'], out: compress_wr)
      end
      compress_wr.close

      success = [compress_pid, dump_pid].all? { |pid| Process.waitpid(pid); $?.success? }

      report_success(success)
      abort 'Backup failed' unless success

      if mysql_to_postgresql && config["adapter"] =~ /^mysql/
        convert_to_postgresql
      end
    end

    def convert_to_postgresql
      mysql_dump_gz = db_file_name + '.mysql'
      psql_dump_gz = db_file_name + '.psql'
      drop_indexes_sql = File.join(db_dir, 'drop_indexes.sql')

      File.rename(db_file_name, mysql_dump_gz)

      $progress.print "Converting MySQL database dump to Postgres ... "
      statuses = Open3.pipeline(
        %W(gzip -cd #{mysql_dump_gz}),
        %W(python lib/support/mysql-postgresql-converter/db_converter.py - - #{drop_indexes_sql}),
        %W(gzip -1 -c),
        out: [psql_dump_gz, 'w', 0600]
      )

      if !statuses.compact.all?(&:success?)
        abort "mysql-to-postgresql-converter failed"
      end
      $progress.puts '[DONE]'.green
      FileUtils.rm_f(mysql_dump_gz) # save disk space during conversion

      $progress.print "Splicing in 'DROP INDEX' statements ... "
      statuses = Open3.pipeline(
        %W(lib/support/mysql-postgresql-converter/splice_drop_indexes #{psql_dump_gz} #{drop_indexes_sql}),
        %W(gzip -1 -c),
        out: [db_file_name, 'w', 0600]
      )
      if !statuses.compact.all?(&:success?)
        abort "Failed to splice in 'DROP INDEXES' statements"
      end

      $progress.puts '[DONE]'.green
    ensure
      FileUtils.rm_f([mysql_dump_gz, psql_dump_gz, drop_indexes_sql])
    end

    def restore
      decompress_rd, decompress_wr = IO.pipe
      decompress_pid = spawn(*%W(gzip -cd), out: decompress_wr, in: db_file_name)
      decompress_wr.close

      restore_pid = case config["adapter"]
      when /^mysql/ then
        $progress.print "Restoring MySQL database #{config['database']} ... "
        spawn('mysql', *mysql_args, config['database'], in: decompress_rd)
      when "postgresql" then
        $progress.print "Restoring PostgreSQL database #{config['database']} ... "
        pg_env
        spawn('psql', config['database'], in: decompress_rd)
      end
      decompress_rd.close

      success = [decompress_pid, restore_pid].all? { |pid| Process.waitpid(pid); $?.success? }

      report_success(success)
      abort 'Restore failed' unless success
    end

    protected

    def db_file_name
      File.join(db_dir, 'database.sql.gz')
    end

    def mysql_args
      args = {
        'host'      => '--host',
        'port'      => '--port',
        'socket'    => '--socket',
        'username'  => '--user',
        'encoding'  => '--default-character-set',
        'password'  => '--password'
      }
      args.map { |opt, arg| "#{arg}=#{config[opt]}" if config[opt] }.compact
    end

    def pg_env
      ENV['PGUSER']     = config["username"] if config["username"]
      ENV['PGHOST']     = config["host"] if config["host"]
      ENV['PGPORT']     = config["port"].to_s if config["port"]
      ENV['PGPASSWORD'] = config["password"].to_s if config["password"]
    end

    def report_success(success)
      if success
        $progress.puts '[DONE]'.green
      else
        $progress.puts '[FAILED]'.red
      end
    end
  end
end