summaryrefslogtreecommitdiff
path: root/db/post_migrate/20171128214150_schedule_populate_merge_request_metrics_with_events_data.rb
blob: fce1829c98271485d82616f0607b6ad45bf39eaf (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
# frozen_string_literal: true
# rubocop:disable GitlabSecurity/SqlInjection

class SchedulePopulateMergeRequestMetricsWithEventsData < ActiveRecord::Migration
  DOWNTIME = false
  BATCH_SIZE = 10_000
  MIGRATION = 'PopulateMergeRequestMetricsWithEventsData'

  disable_ddl_transaction!

  class MergeRequest < ActiveRecord::Base
    self.table_name = 'merge_requests'

    include ::EachBatch
  end

  def up
    say 'Scheduling `PopulateMergeRequestMetricsWithEventsData` jobs'
    # It will update around 4_000_000 records in batches of 10_000 merge
    # requests (running between 10 minutes) and should take around 66 hours to complete.
    # Apparently, production PostgreSQL is able to vacuum 10k-20k dead_tuples by
    # minute, and at maximum, each of these jobs should UPDATE 20k records.
    #
    # More information about the updates in `PopulateMergeRequestMetricsWithEventsData` class.
    #
    MergeRequest.all.each_batch(of: BATCH_SIZE) do |relation, index|
      range = relation.pluck('MIN(id)', 'MAX(id)').first

      BackgroundMigrationWorker.perform_in(index * 10.minutes, MIGRATION, range)
    end
  end

  def down
    execute "update merge_request_metrics set latest_closed_at = null"
    execute "update merge_request_metrics set latest_closed_by_id = null"
    execute "update merge_request_metrics set merged_by_id = null"
  end
end