summaryrefslogtreecommitdiff
path: root/app/services/ci
diff options
context:
space:
mode:
Diffstat (limited to 'app/services/ci')
-rw-r--r--app/services/ci/archive_trace_service.rb2
-rw-r--r--app/services/ci/build_report_result_service.rb29
-rw-r--r--app/services/ci/create_downstream_pipeline_service.rb15
-rw-r--r--app/services/ci/create_job_artifacts_service.rb9
-rw-r--r--app/services/ci/create_pipeline_service.rb5
-rw-r--r--app/services/ci/daily_build_group_report_result_service.rb2
-rw-r--r--app/services/ci/delete_objects_service.rb62
-rw-r--r--app/services/ci/destroy_expired_job_artifacts_service.rb7
-rw-r--r--app/services/ci/expire_pipeline_cache_service.rb13
-rw-r--r--app/services/ci/list_config_variables_service.rb13
-rw-r--r--app/services/ci/pipelines/create_artifact_service.rb1
-rw-r--r--app/services/ci/play_bridge_service.rb14
-rw-r--r--app/services/ci/play_build_service.rb4
-rw-r--r--app/services/ci/play_manual_stage_service.rb8
-rw-r--r--app/services/ci/process_pipeline_service.rb4
-rw-r--r--app/services/ci/retry_build_service.rb2
-rw-r--r--app/services/ci/update_build_queue_service.rb4
-rw-r--r--app/services/ci/update_build_state_service.rb142
18 files changed, 260 insertions, 76 deletions
diff --git a/app/services/ci/archive_trace_service.rb b/app/services/ci/archive_trace_service.rb
index 073ef465e13..9b2c7788897 100644
--- a/app/services/ci/archive_trace_service.rb
+++ b/app/services/ci/archive_trace_service.rb
@@ -27,7 +27,7 @@ module Ci
rescue => e
# Tracks this error with application logs, Sentry, and Prometheus.
# If `archive!` keeps failing for over a week, that could incur data loss.
- # (See more https://docs.gitlab.com/ee/administration/job_traces.html#new-live-trace-architecture)
+ # (See more https://docs.gitlab.com/ee/administration/job_logs.html#new-incremental-logging-architecture)
# In order to avoid interrupting the system, we do not raise an exception here.
archive_error(e, job, worker_name)
end
diff --git a/app/services/ci/build_report_result_service.rb b/app/services/ci/build_report_result_service.rb
index ca66ad8249d..76ecf428f11 100644
--- a/app/services/ci/build_report_result_service.rb
+++ b/app/services/ci/build_report_result_service.rb
@@ -2,12 +2,20 @@
module Ci
class BuildReportResultService
+ include Gitlab::Utils::UsageData
+
+ EVENT_NAME = 'i_testing_test_case_parsed'
+
def execute(build)
return unless build.has_test_reports?
+ test_suite = generate_test_suite_report(build)
+
+ track_test_cases(build, test_suite)
+
build.report_results.create!(
project_id: build.project_id,
- data: tests_params(build)
+ data: tests_params(test_suite)
)
end
@@ -17,9 +25,7 @@ module Ci
build.collect_test_reports!(Gitlab::Ci::Reports::TestReports.new)
end
- def tests_params(build)
- test_suite = generate_test_suite_report(build)
-
+ def tests_params(test_suite)
{
tests: {
name: test_suite.name,
@@ -31,5 +37,20 @@ module Ci
}
}
end
+
+ def track_test_cases(build, test_suite)
+ return if Feature.disabled?(:track_unique_test_cases_parsed, build.project)
+
+ track_usage_event(EVENT_NAME, test_case_hashes(build, test_suite))
+ end
+
+ def test_case_hashes(build, test_suite)
+ [].tap do |hashes|
+ test_suite.each_test_case do |test_case|
+ key = "#{build.project_id}-#{test_case.key}"
+ hashes << Digest::SHA256.hexdigest(key)
+ end
+ end
+ end
end
end
diff --git a/app/services/ci/create_downstream_pipeline_service.rb b/app/services/ci/create_downstream_pipeline_service.rb
index 0394cfb6119..86d0cf079fc 100644
--- a/app/services/ci/create_downstream_pipeline_service.rb
+++ b/app/services/ci/create_downstream_pipeline_service.rb
@@ -33,7 +33,7 @@ module Ci
pipeline_params.fetch(:target_revision))
downstream_pipeline = service.execute(
- pipeline_params.fetch(:source), pipeline_params[:execute_params]) do |pipeline|
+ pipeline_params.fetch(:source), **pipeline_params[:execute_params]) do |pipeline|
pipeline.variables.build(@bridge.downstream_variables)
end
@@ -77,16 +77,9 @@ module Ci
# TODO: Remove this condition if favour of model validation
# https://gitlab.com/gitlab-org/gitlab/issues/38338
- if ::Gitlab::Ci::Features.child_of_child_pipeline_enabled?(project)
- if has_max_descendants_depth?
- @bridge.drop!(:reached_max_descendant_pipelines_depth)
- return false
- end
- else
- if @bridge.triggers_child_pipeline? && @bridge.pipeline.parent_pipeline.present?
- @bridge.drop!(:bridge_pipeline_is_child_pipeline)
- return false
- end
+ if has_max_descendants_depth?
+ @bridge.drop!(:reached_max_descendant_pipelines_depth)
+ return false
end
unless can_create_downstream_pipeline?(target_ref)
diff --git a/app/services/ci/create_job_artifacts_service.rb b/app/services/ci/create_job_artifacts_service.rb
index 1fe65898d55..5efb3805bf7 100644
--- a/app/services/ci/create_job_artifacts_service.rb
+++ b/app/services/ci/create_job_artifacts_service.rb
@@ -52,24 +52,15 @@ module Ci
attr_reader :job, :project
def validate_requirements(artifact_type:, filesize:)
- return forbidden_type_error(artifact_type) if forbidden_type?(artifact_type)
return too_large_error if too_large?(artifact_type, filesize)
success
end
- def forbidden_type?(type)
- lsif?(type) && !code_navigation_enabled?
- end
-
def too_large?(type, size)
size > max_size(type) if size
end
- def code_navigation_enabled?
- Feature.enabled?(:code_navigation, project, default_enabled: true)
- end
-
def lsif?(type)
type == LSIF_ARTIFACT_TYPE
end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 70ad18e80eb..e7ede98fea4 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -70,7 +70,7 @@ module Ci
push_options: params[:push_options] || {},
chat_data: params[:chat_data],
bridge: bridge,
- **extra_options(options))
+ **extra_options(**options))
# Ensure we never persist the pipeline when dry_run: true
@pipeline.readonly! if command.dry_run?
@@ -82,8 +82,7 @@ module Ci
schedule_head_pipeline_update if pipeline.persisted?
# If pipeline is not persisted, try to recover IID
- pipeline.reset_project_iid unless pipeline.persisted? ||
- Feature.disabled?(:ci_pipeline_rewind_iid, project, default_enabled: true)
+ pipeline.reset_project_iid unless pipeline.persisted?
pipeline
end
diff --git a/app/services/ci/daily_build_group_report_result_service.rb b/app/services/ci/daily_build_group_report_result_service.rb
index 6cdf3c88f8c..c32fc27c274 100644
--- a/app/services/ci/daily_build_group_report_result_service.rb
+++ b/app/services/ci/daily_build_group_report_result_service.rb
@@ -3,8 +3,6 @@
module Ci
class DailyBuildGroupReportResultService
def execute(pipeline)
- return unless Feature.enabled?(:ci_daily_code_coverage, pipeline.project, default_enabled: true)
-
DailyBuildGroupReportResult.upsert_reports(coverage_reports(pipeline))
end
diff --git a/app/services/ci/delete_objects_service.rb b/app/services/ci/delete_objects_service.rb
new file mode 100644
index 00000000000..bac99abadc9
--- /dev/null
+++ b/app/services/ci/delete_objects_service.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+module Ci
+ class DeleteObjectsService
+ TransactionInProgressError = Class.new(StandardError)
+ TRANSACTION_MESSAGE = "can't perform network calls inside a database transaction"
+ BATCH_SIZE = 100
+ RETRY_IN = 10.minutes
+
+ def execute
+ objects = load_next_batch
+ destroy_everything(objects)
+ end
+
+ def remaining_batches_count(max_batch_count:)
+ Ci::DeletedObject
+ .ready_for_destruction(max_batch_count * BATCH_SIZE)
+ .size
+ .fdiv(BATCH_SIZE)
+ .ceil
+ end
+
+ private
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def load_next_batch
+ # `find_by_sql` performs a write in this case and we need to wrap it in
+ # a transaction to stick to the primary database.
+ Ci::DeletedObject.transaction do
+ Ci::DeletedObject.find_by_sql([
+ next_batch_sql, new_pick_up_at: RETRY_IN.from_now
+ ])
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def next_batch_sql
+ <<~SQL.squish
+ UPDATE "ci_deleted_objects"
+ SET "pick_up_at" = :new_pick_up_at
+ WHERE "ci_deleted_objects"."id" IN (#{locked_object_ids_sql})
+ RETURNING *
+ SQL
+ end
+
+ def locked_object_ids_sql
+ Ci::DeletedObject.lock_for_destruction(BATCH_SIZE).to_sql
+ end
+
+ def destroy_everything(objects)
+ raise TransactionInProgressError, TRANSACTION_MESSAGE if transaction_open?
+ return unless objects.any?
+
+ deleted = objects.select(&:delete_file_from_storage)
+ Ci::DeletedObject.id_in(deleted.map(&:id)).delete_all
+ end
+
+ def transaction_open?
+ Ci::DeletedObject.connection.transaction_open?
+ end
+ end
+end
diff --git a/app/services/ci/destroy_expired_job_artifacts_service.rb b/app/services/ci/destroy_expired_job_artifacts_service.rb
index ca6e60f819a..438b5c7496d 100644
--- a/app/services/ci/destroy_expired_job_artifacts_service.rb
+++ b/app/services/ci/destroy_expired_job_artifacts_service.rb
@@ -39,6 +39,13 @@ module Ci
return false if artifacts.empty?
artifacts.each(&:destroy!)
+ run_after_destroy(artifacts)
+
+ true # This is required because of the design of `loop_until` method.
end
+
+ def run_after_destroy(artifacts); end
end
end
+
+Ci::DestroyExpiredJobArtifactsService.prepend_if_ee('EE::Ci::DestroyExpiredJobArtifactsService')
diff --git a/app/services/ci/expire_pipeline_cache_service.rb b/app/services/ci/expire_pipeline_cache_service.rb
index 32abd1a7626..8343e0f8cd0 100644
--- a/app/services/ci/expire_pipeline_cache_service.rb
+++ b/app/services/ci/expire_pipeline_cache_service.rb
@@ -32,11 +32,18 @@ module Ci
Gitlab::Routing.url_helpers.project_new_merge_request_path(project, format: :json)
end
+ def pipelines_project_merge_request_path(merge_request)
+ Gitlab::Routing.url_helpers.pipelines_project_merge_request_path(merge_request.target_project, merge_request, format: :json)
+ end
+
+ def merge_request_widget_path(merge_request)
+ Gitlab::Routing.url_helpers.cached_widget_project_json_merge_request_path(merge_request.project, merge_request, format: :json)
+ end
+
def each_pipelines_merge_request_path(pipeline)
pipeline.all_merge_requests.each do |merge_request|
- path = Gitlab::Routing.url_helpers.pipelines_project_merge_request_path(merge_request.target_project, merge_request, format: :json)
-
- yield(path)
+ yield(pipelines_project_merge_request_path(merge_request))
+ yield(merge_request_widget_path(merge_request))
end
end
diff --git a/app/services/ci/list_config_variables_service.rb b/app/services/ci/list_config_variables_service.rb
new file mode 100644
index 00000000000..b5dc192b512
--- /dev/null
+++ b/app/services/ci/list_config_variables_service.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Ci
+ class ListConfigVariablesService < ::BaseService
+ def execute(sha)
+ config = project.ci_config_for(sha)
+ return {} unless config
+
+ result = Gitlab::Ci::YamlProcessor.new(config).execute
+ result.valid? ? result.variables_with_data : {}
+ end
+ end
+end
diff --git a/app/services/ci/pipelines/create_artifact_service.rb b/app/services/ci/pipelines/create_artifact_service.rb
index b7d334e436d..bfaf317241a 100644
--- a/app/services/ci/pipelines/create_artifact_service.rb
+++ b/app/services/ci/pipelines/create_artifact_service.rb
@@ -3,7 +3,6 @@ module Ci
module Pipelines
class CreateArtifactService
def execute(pipeline)
- return unless ::Gitlab::Ci::Features.coverage_report_view?(pipeline.project)
return unless pipeline.can_generate_coverage_reports?
return if pipeline.has_coverage_reports?
diff --git a/app/services/ci/play_bridge_service.rb b/app/services/ci/play_bridge_service.rb
new file mode 100644
index 00000000000..70c4a8e6136
--- /dev/null
+++ b/app/services/ci/play_bridge_service.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Ci
+ class PlayBridgeService < ::BaseService
+ def execute(bridge)
+ raise Gitlab::Access::AccessDeniedError unless can?(current_user, :play_job, bridge)
+
+ bridge.tap do |bridge|
+ bridge.user = current_user
+ bridge.enqueue!
+ end
+ end
+ end
+end
diff --git a/app/services/ci/play_build_service.rb b/app/services/ci/play_build_service.rb
index 9f922ffde81..6adeca624a8 100644
--- a/app/services/ci/play_build_service.rb
+++ b/app/services/ci/play_build_service.rb
@@ -3,9 +3,7 @@
module Ci
class PlayBuildService < ::BaseService
def execute(build, job_variables_attributes = nil)
- unless can?(current_user, :update_build, build)
- raise Gitlab::Access::AccessDeniedError
- end
+ raise Gitlab::Access::AccessDeniedError unless can?(current_user, :play_job, build)
# Try to enqueue the build, otherwise create a duplicate.
#
diff --git a/app/services/ci/play_manual_stage_service.rb b/app/services/ci/play_manual_stage_service.rb
index 2497fc52e6b..c6fa7803e52 100644
--- a/app/services/ci/play_manual_stage_service.rb
+++ b/app/services/ci/play_manual_stage_service.rb
@@ -9,12 +9,12 @@ module Ci
end
def execute(stage)
- stage.builds.manual.each do |build|
- next unless build.playable?
+ stage.processables.manual.each do |processable|
+ next unless processable.playable?
- build.play(current_user)
+ processable.play(current_user)
rescue Gitlab::Access::AccessDeniedError
- logger.error(message: 'Unable to play manual action', build_id: build.id)
+ logger.error(message: 'Unable to play manual action', processable_id: processable.id)
end
end
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index 18bae26613f..e511e26adfe 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -31,14 +31,14 @@ module Ci
# rubocop: disable CodeReuse/ActiveRecord
def update_retried
# find the latest builds for each name
- latest_statuses = pipeline.statuses.latest
+ latest_statuses = pipeline.latest_statuses
.group(:name)
.having('count(*) > 1')
.pluck(Arel.sql('MAX(id)'), 'name')
# mark builds that are retried
if latest_statuses.any?
- pipeline.statuses.latest
+ pipeline.latest_statuses
.where(name: latest_statuses.map(&:second))
.where.not(id: latest_statuses.map(&:first))
.update_all(retried: true)
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index 6b2e6c245f3..f397ada0696 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -58,7 +58,7 @@ module Ci
build = project.builds.new(attributes)
build.assign_attributes(::Gitlab::Ci::Pipeline::Seed::Build.environment_attributes_for(build))
build.retried = false
- BulkInsertableAssociations.with_bulk_insert(enabled: ::Gitlab::Ci::Features.bulk_insert_on_create?(project)) do
+ BulkInsertableAssociations.with_bulk_insert do
build.save!
end
build
diff --git a/app/services/ci/update_build_queue_service.rb b/app/services/ci/update_build_queue_service.rb
index 31c7178c9e7..241eba733ea 100644
--- a/app/services/ci/update_build_queue_service.rb
+++ b/app/services/ci/update_build_queue_service.rb
@@ -9,9 +9,7 @@ module Ci
private
def tick_for(build, runners)
- if Feature.enabled?(:ci_update_queues_for_online_runners, build.project, default_enabled: true)
- runners = runners.with_recent_runner_queue
- end
+ runners = runners.with_recent_runner_queue
runners.each do |runner|
runner.pick_build!(build)
diff --git a/app/services/ci/update_build_state_service.rb b/app/services/ci/update_build_state_service.rb
index 61e4c77c1e5..22a27906700 100644
--- a/app/services/ci/update_build_state_service.rb
+++ b/app/services/ci/update_build_state_service.rb
@@ -2,7 +2,11 @@
module Ci
class UpdateBuildStateService
- Result = Struct.new(:status, keyword_init: true)
+ include ::Gitlab::Utils::StrongMemoize
+ include ::Gitlab::ExclusiveLeaseHelpers
+
+ Result = Struct.new(:status, :backoff, keyword_init: true)
+ InvalidTraceError = Class.new(StandardError)
ACCEPT_TIMEOUT = 5.minutes.freeze
@@ -17,43 +21,77 @@ module Ci
def execute
overwrite_trace! if has_trace?
- if accept_request?
- accept_build_state!
- else
- check_migration_state
- update_build_state!
+ unless accept_available?
+ return update_build_state!
+ end
+
+ ensure_pending_state!
+
+ in_build_trace_lock do
+ process_build_state!
end
end
private
- def accept_build_state!
- if Time.current - ensure_pending_state.created_at > ACCEPT_TIMEOUT
- metrics.increment_trace_operation(operation: :discarded)
+ def overwrite_trace!
+ metrics.increment_trace_operation(operation: :overwrite)
- return update_build_state!
+ build.trace.set(params[:trace]) if Gitlab::Ci::Features.trace_overwrite?
+ end
+
+ def ensure_pending_state!
+ pending_state.created_at
+ end
+
+ def process_build_state!
+ if live_chunks_pending?
+ if pending_state_outdated?
+ discard_build_trace!
+ update_build_state!
+ else
+ accept_build_state!
+ end
+ else
+ validate_build_trace!
+ update_build_state!
end
+ end
+ def accept_build_state!
build.trace_chunks.live.find_each do |chunk|
chunk.schedule_to_persist!
end
metrics.increment_trace_operation(operation: :accepted)
- Result.new(status: 202)
+ ::Gitlab::Ci::Runner::Backoff.new(pending_state.created_at).then do |backoff|
+ Result.new(status: 202, backoff: backoff.to_seconds)
+ end
end
- def overwrite_trace!
- metrics.increment_trace_operation(operation: :overwrite)
+ def validate_build_trace!
+ return unless has_chunks?
- build.trace.set(params[:trace]) if Gitlab::Ci::Features.trace_overwrite?
- end
+ unless live_chunks_pending?
+ metrics.increment_trace_operation(operation: :finalized)
+ metrics.observe_migration_duration(pending_state_seconds)
+ end
- def check_migration_state
- return unless accept_available?
+ ::Gitlab::Ci::Trace::Checksum.new(build).then do |checksum|
+ unless checksum.valid?
+ metrics.increment_trace_operation(operation: :invalid)
- if has_chunks? && !live_chunks_pending?
- metrics.increment_trace_operation(operation: :finalized)
+ next unless log_invalid_chunks?
+
+ ::Gitlab::ErrorTracking.log_exception(InvalidTraceError.new,
+ project_path: build.project.full_path,
+ build_id: build.id,
+ state_crc32: checksum.state_crc32,
+ chunks_crc32: checksum.chunks_crc32,
+ chunks_count: checksum.chunks_count
+ )
+ end
end
end
@@ -76,12 +114,32 @@ module Ci
end
end
+ def discard_build_trace!
+ metrics.increment_trace_operation(operation: :discarded)
+ end
+
def accept_available?
!build_running? && has_checksum? && chunks_migration_enabled?
end
- def accept_request?
- accept_available? && live_chunks_pending?
+ def live_chunks_pending?
+ build.trace_chunks.live.any?
+ end
+
+ def has_chunks?
+ build.trace_chunks.any?
+ end
+
+ def pending_state_outdated?
+ pending_state_duration > ACCEPT_TIMEOUT
+ end
+
+ def pending_state_duration
+ Time.current - pending_state.created_at
+ end
+
+ def pending_state_seconds
+ pending_state_duration.seconds
end
def build_state
@@ -96,18 +154,14 @@ module Ci
params.dig(:checksum).present?
end
- def has_chunks?
- build.trace_chunks.any?
- end
-
- def live_chunks_pending?
- build.trace_chunks.live.any?
- end
-
def build_running?
build_state == 'running'
end
+ def pending_state
+ strong_memoize(:pending_state) { ensure_pending_state }
+ end
+
def ensure_pending_state
Ci::BuildPendingState.create_or_find_by!(
build_id: build.id,
@@ -121,8 +175,38 @@ module Ci
build.pending_state
end
+ ##
+ # This method is releasing an exclusive lock on a build trace the moment we
+ # conclude that build status has been written and the build state update
+ # has been committed to the database.
+ #
+ # Because a build state machine schedules a bunch of workers to run after
+ # build status transition to complete, we do not want to keep the lease
+ # until all the workers are scheduled because it opens a possibility of
+ # race conditions happening.
+ #
+ # Instead of keeping the lease until the transition is fully done and
+ # workers are scheduled, we immediately release the lock after the database
+ # commit happens.
+ #
+ def in_build_trace_lock(&block)
+ build.trace.lock do |_, lease| # rubocop:disable CodeReuse/ActiveRecord
+ build.run_on_status_commit { lease.cancel }
+
+ yield
+ end
+ rescue ::Gitlab::Ci::Trace::LockedError
+ metrics.increment_trace_operation(operation: :locked)
+
+ accept_build_state!
+ end
+
def chunks_migration_enabled?
::Gitlab::Ci::Features.accept_trace?(build.project)
end
+
+ def log_invalid_chunks?
+ ::Gitlab::Ci::Features.log_invalid_trace_chunks?(build.project)
+ end
end
end