summaryrefslogtreecommitdiff
path: root/app/services/ci/destroy_expired_job_artifacts_service.rb
diff options
context:
space:
mode:
Diffstat (limited to 'app/services/ci/destroy_expired_job_artifacts_service.rb')
-rw-r--r--app/services/ci/destroy_expired_job_artifacts_service.rb49
1 files changed, 8 insertions, 41 deletions
diff --git a/app/services/ci/destroy_expired_job_artifacts_service.rb b/app/services/ci/destroy_expired_job_artifacts_service.rb
index 7d8a3c17abe..d91cfb3cc82 100644
--- a/app/services/ci/destroy_expired_job_artifacts_service.rb
+++ b/app/services/ci/destroy_expired_job_artifacts_service.rb
@@ -4,7 +4,6 @@ module Ci
class DestroyExpiredJobArtifactsService
include ::Gitlab::ExclusiveLeaseHelpers
include ::Gitlab::LoopHelpers
- include ::Gitlab::Utils::StrongMemoize
BATCH_SIZE = 100
LOOP_TIMEOUT = 5.minutes
@@ -34,50 +33,20 @@ module Ci
def destroy_job_artifacts_with_slow_iteration(start_at)
Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
- artifacts = relation.unlocked.with_destroy_preloads.to_a
+ # For performance reasons, join with ci_pipelines after the batch is queried.
+ # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
+ artifacts = relation.unlocked
+
+ service_response = destroy_batch_async(artifacts)
+ @removed_artifacts_count += service_response[:destroyed_artifacts_count]
- parallel_destroy_batch(artifacts) if artifacts.any?
break if loop_timeout?(start_at)
break if index >= LOOP_LIMIT
end
end
- def parallel_destroy_batch(job_artifacts)
- Ci::DeletedObject.transaction do
- Ci::DeletedObject.bulk_import(job_artifacts)
- Ci::JobArtifact.id_in(job_artifacts.map(&:id)).delete_all
- destroy_related_records_for(job_artifacts)
- end
-
- # This is executed outside of the transaction because it depends on Redis
- update_project_statistics_for(job_artifacts)
- increment_monitoring_statistics(job_artifacts.size)
- end
-
- # This method is implemented in EE and it must do only database work
- def destroy_related_records_for(job_artifacts); end
-
- def update_project_statistics_for(job_artifacts)
- artifacts_by_project = job_artifacts.group_by(&:project)
- artifacts_by_project.each do |project, artifacts|
- delta = -artifacts.sum { |artifact| artifact.size.to_i }
- ProjectStatistics.increment_statistic(
- project, Ci::JobArtifact.project_statistics_name, delta)
- end
- end
-
- def increment_monitoring_statistics(size)
- destroyed_artifacts_counter.increment({}, size)
- @removed_artifacts_count += size
- end
-
- def destroyed_artifacts_counter
- strong_memoize(:destroyed_artifacts_counter) do
- name = :destroyed_job_artifacts_count_total
- comment = 'Counter of destroyed expired job artifacts'
-
- ::Gitlab::Metrics.counter(name, comment)
- end
+ def destroy_batch_async(artifacts)
+ Ci::JobArtifactsDestroyBatchService.new(artifacts).execute
end
def loop_timeout?(start_at)
@@ -85,5 +54,3 @@ module Ci
end
end
end
-
-Ci::DestroyExpiredJobArtifactsService.prepend_if_ee('EE::Ci::DestroyExpiredJobArtifactsService')