summaryrefslogtreecommitdiff
path: root/app/services/ci/job_artifacts/destroy_batch_service.rb
blob: 7cb1be95a3e4a04480bbd44ac6d757aee264ef68 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
# frozen_string_literal: true

module Ci
  module JobArtifacts
    class DestroyBatchService
      include BaseServiceUtility
      include ::Gitlab::Utils::StrongMemoize

      # Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts
      # Not for use outside of the Ci:: namespace

      # Adds the passed batch of job artifacts to the `ci_deleted_objects` table
      # for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService`
      # and then deletes the batch of related `ci_job_artifacts` records.
      # Params:
      # +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE)
      # +pick_up_at+:: When to pick up for deletion of files
      # Returns:
      # +Hash+:: A hash with status and destroyed_artifacts_count keys
      def initialize(job_artifacts, pick_up_at: nil, skip_projects_on_refresh: false)
        @job_artifacts = job_artifacts.with_destroy_preloads.to_a
        @pick_up_at = pick_up_at
        @skip_projects_on_refresh = skip_projects_on_refresh
      end

      # rubocop: disable CodeReuse/ActiveRecord
      def execute(update_stats: true)
        if @skip_projects_on_refresh
          exclude_artifacts_undergoing_stats_refresh
        else
          track_artifacts_undergoing_stats_refresh
        end

        exclude_trace_artifacts

        return success(destroyed_artifacts_count: 0, statistics_updates: {}) if @job_artifacts.empty?

        destroy_related_records(@job_artifacts)

        destroy_around_hook(@job_artifacts) do
          Ci::DeletedObject.transaction do
            Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
            Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
          end
        end

        after_batch_destroy_hook(@job_artifacts)

        update_project_statistics! if update_stats

        increment_monitoring_statistics(artifacts_count, artifacts_bytes)

        Gitlab::Ci::Artifacts::Logger.log_deleted(@job_artifacts, 'Ci::JobArtifacts::DestroyBatchService#execute')

        success(destroyed_artifacts_count: artifacts_count, statistics_updates: statistics_updates_per_project)
      end
      # rubocop: enable CodeReuse/ActiveRecord

      private

      # Overriden in EE
      # :nocov:
      def destroy_around_hook(artifacts)
        yield
      end
      # :nocov:

      # Overriden in EE
      def destroy_related_records(artifacts); end

      # Overriden in EE
      def after_batch_destroy_hook(artifacts); end

      # using ! here since this can't be called inside a transaction
      def update_project_statistics!
        statistics_updates_per_project.each do |project, increments|
          ProjectStatistics.bulk_increment_statistic(project, Ci::JobArtifact.project_statistics_name, increments)
        end
      end

      def statistics_updates_per_project
        strong_memoize(:statistics_updates_per_project) do
          result = Hash.new { |updates, project| updates[project] = [] }

          @job_artifacts.each_with_object(result) do |job_artifact, result|
            increment = Gitlab::Counters::Increment.new(amount: -job_artifact.size.to_i, ref: job_artifact.id)
            result[job_artifact.project] << increment
          end
        end
      end

      def increment_monitoring_statistics(size, bytes)
        metrics.increment_destroyed_artifacts_count(size)
        metrics.increment_destroyed_artifacts_bytes(bytes)
      end

      def metrics
        @metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new
      end

      def artifacts_count
        strong_memoize(:artifacts_count) do
          @job_artifacts.count
        end
      end

      def artifacts_bytes
        strong_memoize(:artifacts_bytes) do
          @job_artifacts.sum { |artifact| artifact.try(:size) || 0 }
        end
      end

      # Traces should never be destroyed.
      def exclude_trace_artifacts
        _trace_artifacts, @job_artifacts = @job_artifacts.partition(&:trace?)
      end

      def track_artifacts_undergoing_stats_refresh
        project_ids = @job_artifacts.find_all do |artifact|
          artifact.project.refreshing_build_artifacts_size?
        end.map(&:project_id).uniq

        project_ids.each do |project_id|
          Gitlab::ProjectStatsRefreshConflictsLogger.warn_artifact_deletion_during_stats_refresh(
            method: 'Ci::JobArtifacts::DestroyBatchService#execute',
            project_id: project_id
          )
        end
      end

      def exclude_artifacts_undergoing_stats_refresh
        project_ids = Set.new

        @job_artifacts.reject! do |artifact|
          next unless artifact.project.refreshing_build_artifacts_size?

          project_ids << artifact.project_id
        end

        if project_ids.any?
          Gitlab::ProjectStatsRefreshConflictsLogger.warn_skipped_artifact_deletion_during_stats_refresh(
            method: 'Ci::JobArtifacts::DestroyBatchService#execute',
            project_ids: project_ids
          )
        end
      end
    end
  end
end

Ci::JobArtifacts::DestroyBatchService.prepend_mod_with('Ci::JobArtifacts::DestroyBatchService')