summaryrefslogtreecommitdiff
path: root/app/services
diff options
context:
space:
mode:
Diffstat (limited to 'app/services')
-rw-r--r--app/services/base_project_service.rb10
-rw-r--r--app/services/boards/issues/list_service.rb2
-rw-r--r--app/services/bulk_import_service.rb70
-rw-r--r--app/services/bulk_imports/create_service.rb83
-rw-r--r--app/services/bulk_imports/file_export_service.rb42
-rw-r--r--app/services/bulk_imports/get_importable_data_service.rb45
-rw-r--r--app/services/bulk_imports/relation_export_service.rb57
-rw-r--r--app/services/bulk_imports/tree_export_service.rb43
-rw-r--r--app/services/bulk_imports/uploads_export_service.rb49
-rw-r--r--app/services/ci/archive_trace_service.rb11
-rw-r--r--app/services/ci/destroy_pipeline_service.rb3
-rw-r--r--app/services/ci/pipelines/add_job_service.rb10
-rw-r--r--app/services/ci/pipelines/hook_service.rb34
-rw-r--r--app/services/ci/process_pipeline_service.rb37
-rw-r--r--app/services/ci/queue/build_queue_service.rb2
-rw-r--r--app/services/ci/register_job_service.rb57
-rw-r--r--app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb2
-rw-r--r--app/services/ci/retry_build_service.rb9
-rw-r--r--app/services/ci/retry_pipeline_service.rb9
-rw-r--r--app/services/ci/stuck_builds/drop_pending_service.rb (renamed from app/services/ci/stuck_builds/drop_service.rb)24
-rw-r--r--app/services/ci/stuck_builds/drop_running_service.rb31
-rw-r--r--app/services/ci/stuck_builds/drop_scheduled_service.rb23
-rw-r--r--app/services/ci/update_build_state_service.rb2
-rw-r--r--app/services/ci/update_pending_build_service.rb2
-rw-r--r--app/services/clusters/agent_tokens/create_service.rb31
-rw-r--r--app/services/clusters/agents/create_service.rb29
-rw-r--r--app/services/clusters/agents/delete_service.rb23
-rw-r--r--app/services/clusters/agents/refresh_authorization_service.rb2
-rw-r--r--app/services/concerns/rate_limited_service.rb93
-rw-r--r--app/services/container_expiration_policies/cleanup_service.rb6
-rw-r--r--app/services/customer_relations/contacts/base_service.rb17
-rw-r--r--app/services/customer_relations/contacts/create_service.rb41
-rw-r--r--app/services/customer_relations/contacts/update_service.rb24
-rw-r--r--app/services/customer_relations/organizations/base_service.rb2
-rw-r--r--app/services/customer_relations/organizations/create_service.rb4
-rw-r--r--app/services/dependency_proxy/auth_token_service.rb12
-rw-r--r--app/services/dependency_proxy/find_or_create_blob_service.rb4
-rw-r--r--app/services/dependency_proxy/find_or_create_manifest_service.rb10
-rw-r--r--app/services/dependency_proxy/group_settings/update_service.rb37
-rw-r--r--app/services/deployments/older_deployments_drop_service.rb12
-rw-r--r--app/services/error_tracking/list_issues_service.rb9
-rw-r--r--app/services/feature_flags/base_service.rb8
-rw-r--r--app/services/feature_flags/create_service.rb2
-rw-r--r--app/services/feature_flags/destroy_service.rb2
-rw-r--r--app/services/feature_flags/hook_service.rb28
-rw-r--r--app/services/feature_flags/update_service.rb24
-rw-r--r--app/services/groups/transfer_service.rb14
-rw-r--r--app/services/import/validate_remote_git_endpoint_service.rb76
-rw-r--r--app/services/issuable/import_csv/base_service.rb9
-rw-r--r--app/services/issues/clone_service.rb22
-rw-r--r--app/services/issues/close_service.rb19
-rw-r--r--app/services/issues/create_service.rb4
-rw-r--r--app/services/issues/move_service.rb22
-rw-r--r--app/services/issues/relative_position_rebalancing_service.rb6
-rw-r--r--app/services/issues/reopen_service.rb15
-rw-r--r--app/services/merge_requests/mergeability/check_base_service.rb36
-rw-r--r--app/services/merge_requests/mergeability/check_ci_status_service.rb22
-rw-r--r--app/services/merge_requests/mergeability/run_checks_service.rb54
-rw-r--r--app/services/merge_requests/update_service.rb2
-rw-r--r--app/services/metrics/dashboard/annotations/create_service.rb6
-rw-r--r--app/services/metrics/dashboard/annotations/delete_service.rb4
-rw-r--r--app/services/metrics/users_starred_dashboards/create_service.rb4
-rw-r--r--app/services/packages/composer/create_package_service.rb4
-rw-r--r--app/services/projects/after_rename_service.rb8
-rw-r--r--app/services/projects/container_repository/cache_tags_created_at_service.rb70
-rw-r--r--app/services/projects/container_repository/cleanup_tags_service.rb176
-rw-r--r--app/services/projects/create_service.rb6
-rw-r--r--app/services/projects/destroy_service.rb24
-rw-r--r--app/services/projects/group_links/update_service.rb22
-rw-r--r--app/services/projects/import_service.rb23
-rw-r--r--app/services/projects/overwrite_project_service.rb4
-rw-r--r--app/services/projects/participants_service.rb13
-rw-r--r--app/services/projects/transfer_service.rb45
-rw-r--r--app/services/projects/update_pages_service.rb10
-rw-r--r--app/services/projects/update_service.rb33
-rw-r--r--app/services/search/global_service.rb2
-rw-r--r--app/services/security/ci_configuration/base_create_service.rb6
-rw-r--r--app/services/security/ci_configuration/sast_create_service.rb17
-rw-r--r--app/services/service_ping/submit_service.rb2
-rw-r--r--app/services/terraform/remote_state_handler.rb4
-rw-r--r--app/services/user_project_access_changed_service.rb2
-rw-r--r--app/services/users/update_service.rb22
-rw-r--r--app/services/users/upsert_credit_card_validation_service.rb17
83 files changed, 1476 insertions, 426 deletions
diff --git a/app/services/base_project_service.rb b/app/services/base_project_service.rb
index fb466e61673..1bf4a235a79 100644
--- a/app/services/base_project_service.rb
+++ b/app/services/base_project_service.rb
@@ -2,6 +2,8 @@
# Base class, scoped by project
class BaseProjectService < ::BaseContainerService
+ include ::Gitlab::Utils::StrongMemoize
+
attr_accessor :project
def initialize(project:, current_user: nil, params: {})
@@ -11,4 +13,12 @@ class BaseProjectService < ::BaseContainerService
end
delegate :repository, to: :project
+
+ private
+
+ def project_group
+ strong_memoize(:project_group) do
+ project.group
+ end
+ end
end
diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb
index 9a3e3bc3bdb..6021d634f86 100644
--- a/app/services/boards/issues/list_service.rb
+++ b/app/services/boards/issues/list_service.rb
@@ -22,7 +22,7 @@ module Boards
def order(items)
return items.order_closed_date_desc if list&.closed?
- items.order_by_position_and_priority(with_cte: params[:search].present?)
+ items.order_by_relative_position
end
def finder
diff --git a/app/services/bulk_import_service.rb b/app/services/bulk_import_service.rb
deleted file mode 100644
index 4e13e967dbd..00000000000
--- a/app/services/bulk_import_service.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-# frozen_string_literal: true
-
-# Entry point of the BulkImport feature.
-# This service receives a Gitlab Instance connection params
-# and a list of groups to be imported.
-#
-# Process topography:
-#
-# sync | async
-# |
-# User +--> P1 +----> Pn +---+
-# | ^ | Enqueue new job
-# | +-----+
-#
-# P1 (sync)
-#
-# - Create a BulkImport record
-# - Create a BulkImport::Entity for each group to be imported
-# - Enqueue a BulkImportWorker job (P2) to import the given groups (entities)
-#
-# Pn (async)
-#
-# - For each group to be imported (BulkImport::Entity.with_status(:created))
-# - Import the group data
-# - Create entities for each subgroup of the imported group
-# - Enqueue a BulkImportService job (Pn) to import the new entities (subgroups)
-#
-class BulkImportService
- attr_reader :current_user, :params, :credentials
-
- def initialize(current_user, params, credentials)
- @current_user = current_user
- @params = params
- @credentials = credentials
- end
-
- def execute
- bulk_import = create_bulk_import
-
- BulkImportWorker.perform_async(bulk_import.id)
-
- ServiceResponse.success(payload: bulk_import)
- rescue ActiveRecord::RecordInvalid => e
- ServiceResponse.error(
- message: e.message,
- http_status: :unprocessable_entity
- )
- end
-
- private
-
- def create_bulk_import
- BulkImport.transaction do
- bulk_import = BulkImport.create!(user: current_user, source_type: 'gitlab')
- bulk_import.create_configuration!(credentials.slice(:url, :access_token))
-
- params.each do |entity|
- BulkImports::Entity.create!(
- bulk_import: bulk_import,
- source_type: entity[:source_type],
- source_full_path: entity[:source_full_path],
- destination_name: entity[:destination_name],
- destination_namespace: entity[:destination_namespace]
- )
- end
-
- bulk_import
- end
- end
-end
diff --git a/app/services/bulk_imports/create_service.rb b/app/services/bulk_imports/create_service.rb
new file mode 100644
index 00000000000..c1becbb5609
--- /dev/null
+++ b/app/services/bulk_imports/create_service.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+# Entry point of the BulkImport feature.
+# This service receives a Gitlab Instance connection params
+# and a list of groups to be imported.
+#
+# Process topography:
+#
+# sync | async
+# |
+# User +--> P1 +----> Pn +---+
+# | ^ | Enqueue new job
+# | +-----+
+#
+# P1 (sync)
+#
+# - Create a BulkImport record
+# - Create a BulkImport::Entity for each group to be imported
+# - Enqueue a BulkImportWorker job (P2) to import the given groups (entities)
+#
+# Pn (async)
+#
+# - For each group to be imported (BulkImport::Entity.with_status(:created))
+# - Import the group data
+# - Create entities for each subgroup of the imported group
+# - Enqueue a BulkImports::CreateService job (Pn) to import the new entities (subgroups)
+#
+module BulkImports
+ class CreateService
+ attr_reader :current_user, :params, :credentials
+
+ def initialize(current_user, params, credentials)
+ @current_user = current_user
+ @params = params
+ @credentials = credentials
+ end
+
+ def execute
+ bulk_import = create_bulk_import
+
+ BulkImportWorker.perform_async(bulk_import.id)
+
+ ServiceResponse.success(payload: bulk_import)
+ rescue ActiveRecord::RecordInvalid => e
+ ServiceResponse.error(
+ message: e.message,
+ http_status: :unprocessable_entity
+ )
+ end
+
+ private
+
+ def create_bulk_import
+ BulkImport.transaction do
+ bulk_import = BulkImport.create!(
+ user: current_user,
+ source_type: 'gitlab',
+ source_version: client.instance_version
+ )
+ bulk_import.create_configuration!(credentials.slice(:url, :access_token))
+
+ params.each do |entity|
+ BulkImports::Entity.create!(
+ bulk_import: bulk_import,
+ source_type: entity[:source_type],
+ source_full_path: entity[:source_full_path],
+ destination_name: entity[:destination_name],
+ destination_namespace: entity[:destination_namespace]
+ )
+ end
+
+ bulk_import
+ end
+ end
+
+ def client
+ @client ||= BulkImports::Clients::HTTP.new(
+ url: @credentials[:url],
+ token: @credentials[:access_token]
+ )
+ end
+ end
+end
diff --git a/app/services/bulk_imports/file_export_service.rb b/app/services/bulk_imports/file_export_service.rb
new file mode 100644
index 00000000000..a7e0f998666
--- /dev/null
+++ b/app/services/bulk_imports/file_export_service.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class FileExportService
+ include Gitlab::ImportExport::CommandLineUtil
+
+ def initialize(portable, export_path, relation)
+ @portable = portable
+ @export_path = export_path
+ @relation = relation
+ end
+
+ def execute
+ export_service.execute
+
+ archive_exported_data
+ end
+
+ def exported_filename
+ "#{relation}.tar"
+ end
+
+ private
+
+ attr_reader :export_path, :portable, :relation
+
+ def export_service
+ case relation
+ when FileTransfer::ProjectConfig::UPLOADS_RELATION
+ UploadsExportService.new(portable, export_path)
+ else
+ raise BulkImports::Error, 'Unsupported relation export type'
+ end
+ end
+
+ def archive_exported_data
+ archive_file = File.join(export_path, exported_filename)
+
+ tar_cf(archive: archive_file, dir: export_path)
+ end
+ end
+end
diff --git a/app/services/bulk_imports/get_importable_data_service.rb b/app/services/bulk_imports/get_importable_data_service.rb
new file mode 100644
index 00000000000..07e0b3976a1
--- /dev/null
+++ b/app/services/bulk_imports/get_importable_data_service.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class GetImportableDataService
+ def initialize(params, query_params, credentials)
+ @params = params
+ @query_params = query_params
+ @credentials = credentials
+ end
+
+ def execute
+ {
+ version_validation: version_validation,
+ response: importables
+ }
+ end
+
+ private
+
+ def importables
+ client.get('groups', @query_params)
+ end
+
+ def version_validation
+ {
+ features: {
+ project_migration: {
+ available: client.compatible_for_project_migration?,
+ min_version: BulkImport.min_gl_version_for_project_migration.to_s
+ },
+ source_instance_version: client.instance_version.to_s
+ }
+ }
+ end
+
+ def client
+ @client ||= BulkImports::Clients::HTTP.new(
+ url: @credentials[:url],
+ token: @credentials[:access_token],
+ per_page: @params[:per_page],
+ page: @params[:page]
+ )
+ end
+ end
+end
diff --git a/app/services/bulk_imports/relation_export_service.rb b/app/services/bulk_imports/relation_export_service.rb
index 055f9cafd10..4718b3914b2 100644
--- a/app/services/bulk_imports/relation_export_service.rb
+++ b/app/services/bulk_imports/relation_export_service.rb
@@ -9,20 +9,23 @@ module BulkImports
@portable = portable
@relation = relation
@jid = jid
+ @config = FileTransfer.config_for(portable)
end
def execute
find_or_create_export! do |export|
remove_existing_export_file!(export)
- serialize_relation_to_file(export.relation_definition)
+ export_service.execute
compress_exported_relation
upload_compressed_file(export)
end
+ ensure
+ FileUtils.remove_entry(config.export_path)
end
private
- attr_reader :user, :portable, :relation, :jid
+ attr_reader :user, :portable, :relation, :jid, :config
def find_or_create_export!
validate_user_permissions!
@@ -55,52 +58,28 @@ module BulkImports
upload.save!
end
- def serialize_relation_to_file(relation_definition)
- serializer.serialize_relation(relation_definition)
- end
-
- def compress_exported_relation
- gzip(dir: export_path, filename: ndjson_filename)
+ def export_service
+ @export_service ||= if config.tree_relation?(relation)
+ TreeExportService.new(portable, config.export_path, relation)
+ elsif config.file_relation?(relation)
+ FileExportService.new(portable, config.export_path, relation)
+ else
+ raise BulkImports::Error, 'Unsupported export relation'
+ end
end
def upload_compressed_file(export)
- compressed_filename = File.join(export_path, "#{ndjson_filename}.gz")
+ compressed_file = File.join(config.export_path, "#{export_service.exported_filename}.gz")
+
upload = ExportUpload.find_or_initialize_by(export_id: export.id) # rubocop: disable CodeReuse/ActiveRecord
- File.open(compressed_filename) { |file| upload.export_file = file }
+ File.open(compressed_file) { |file| upload.export_file = file }
upload.save!
end
- def config
- @config ||= FileTransfer.config_for(portable)
- end
-
- def export_path
- @export_path ||= config.export_path
- end
-
- def portable_tree
- @portable_tree ||= config.portable_tree
- end
-
- # rubocop: disable CodeReuse/Serializer
- def serializer
- @serializer ||= ::Gitlab::ImportExport::Json::StreamingSerializer.new(
- portable,
- portable_tree,
- json_writer,
- exportable_path: ''
- )
- end
- # rubocop: enable CodeReuse/Serializer
-
- def json_writer
- @json_writer ||= ::Gitlab::ImportExport::Json::NdjsonWriter.new(export_path)
- end
-
- def ndjson_filename
- @ndjson_filename ||= "#{relation}.ndjson"
+ def compress_exported_relation
+ gzip(dir: config.export_path, filename: export_service.exported_filename)
end
end
end
diff --git a/app/services/bulk_imports/tree_export_service.rb b/app/services/bulk_imports/tree_export_service.rb
new file mode 100644
index 00000000000..b8e7ac4574b
--- /dev/null
+++ b/app/services/bulk_imports/tree_export_service.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class TreeExportService
+ def initialize(portable, export_path, relation)
+ @portable = portable
+ @export_path = export_path
+ @relation = relation
+ @config = FileTransfer.config_for(portable)
+ end
+
+ def execute
+ relation_definition = config.tree_relation_definition_for(relation)
+
+ raise BulkImports::Error, 'Unsupported relation export type' unless relation_definition
+
+ serializer.serialize_relation(relation_definition)
+ end
+
+ def exported_filename
+ "#{relation}.ndjson"
+ end
+
+ private
+
+ attr_reader :export_path, :portable, :relation, :config
+
+ # rubocop: disable CodeReuse/Serializer
+ def serializer
+ ::Gitlab::ImportExport::Json::StreamingSerializer.new(
+ portable,
+ config.portable_tree,
+ json_writer,
+ exportable_path: ''
+ )
+ end
+ # rubocop: enable CodeReuse/Serializer
+
+ def json_writer
+ ::Gitlab::ImportExport::Json::NdjsonWriter.new(export_path)
+ end
+ end
+end
diff --git a/app/services/bulk_imports/uploads_export_service.rb b/app/services/bulk_imports/uploads_export_service.rb
new file mode 100644
index 00000000000..32cc48c152c
--- /dev/null
+++ b/app/services/bulk_imports/uploads_export_service.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class UploadsExportService
+ include Gitlab::ImportExport::CommandLineUtil
+
+ BATCH_SIZE = 100
+
+ def initialize(portable, export_path)
+ @portable = portable
+ @export_path = export_path
+ end
+
+ def execute
+ portable.uploads.find_each(batch_size: BATCH_SIZE) do |upload| # rubocop: disable CodeReuse/ActiveRecord
+ uploader = upload.retrieve_uploader
+
+ next unless upload.exist?
+ next unless uploader.file
+
+ subdir_path = export_subdir_path(upload)
+ mkdir_p(subdir_path)
+ download_or_copy_upload(uploader, File.join(subdir_path, uploader.filename))
+ rescue Errno::ENAMETOOLONG => e
+ # Do not fail entire export process if downloaded file has filename that exceeds 255 characters.
+ # Ignore raised exception, skip such upload, log the error and keep going with the export instead.
+ Gitlab::ErrorTracking.log_exception(e, portable_id: portable.id, portable_class: portable.class.name, upload_id: upload.id)
+ end
+ end
+
+ private
+
+ attr_reader :portable, :export_path
+
+ def export_subdir_path(upload)
+ subdir = if upload.path == avatar_path
+ 'avatar'
+ else
+ upload.try(:secret).to_s
+ end
+
+ File.join(export_path, subdir)
+ end
+
+ def avatar_path
+ @avatar_path ||= portable.avatar&.upload&.path
+ end
+ end
+end
diff --git a/app/services/ci/archive_trace_service.rb b/app/services/ci/archive_trace_service.rb
index 995b58c6882..17cac38ace2 100644
--- a/app/services/ci/archive_trace_service.rb
+++ b/app/services/ci/archive_trace_service.rb
@@ -3,10 +3,15 @@
module Ci
class ArchiveTraceService
def execute(job, worker_name:)
+ unless job.trace.archival_attempts_available?
+ Sidekiq.logger.warn(class: worker_name, message: 'The job is out of archival attempts.', job_id: job.id)
+
+ job.trace.attempt_archive_cleanup!
+ return
+ end
+
unless job.trace.can_attempt_archival_now?
- Sidekiq.logger.warn(class: worker_name,
- message: job.trace.archival_attempts_message,
- job_id: job.id)
+ Sidekiq.logger.warn(class: worker_name, message: 'The job can not be archived right now.', job_id: job.id)
return
end
diff --git a/app/services/ci/destroy_pipeline_service.rb b/app/services/ci/destroy_pipeline_service.rb
index dd5c8e0379f..476c7523d60 100644
--- a/app/services/ci/destroy_pipeline_service.rb
+++ b/app/services/ci/destroy_pipeline_service.rb
@@ -9,6 +9,9 @@ module Ci
pipeline.cancel_running if pipeline.cancelable?
+ # Ci::Pipeline#destroy triggers `use_fast_destroy :job_artifacts` and
+ # ci_builds has ON DELETE CASCADE to ci_pipelines. The pipeline, the builds,
+ # job and pipeline artifacts all get destroyed here.
pipeline.reset.destroy!
ServiceResponse.success(message: 'Pipeline not found')
diff --git a/app/services/ci/pipelines/add_job_service.rb b/app/services/ci/pipelines/add_job_service.rb
index 53536b6fdf9..703bb22fb5d 100644
--- a/app/services/ci/pipelines/add_job_service.rb
+++ b/app/services/ci/pipelines/add_job_service.rb
@@ -16,15 +16,7 @@ module Ci
def execute!(job, &block)
assign_pipeline_attributes(job)
- if Feature.enabled?(:ci_pipeline_add_job_with_lock, pipeline.project, default_enabled: :yaml)
- in_lock("ci:pipelines:#{pipeline.id}:add-job", ttl: LOCK_TIMEOUT, sleep_sec: LOCK_SLEEP, retries: LOCK_RETRIES) do
- Ci::Pipeline.transaction do
- yield(job)
-
- job.update_older_statuses_retried!
- end
- end
- else
+ in_lock("ci:pipelines:#{pipeline.id}:add-job", ttl: LOCK_TIMEOUT, sleep_sec: LOCK_SLEEP, retries: LOCK_RETRIES) do
Ci::Pipeline.transaction do
yield(job)
diff --git a/app/services/ci/pipelines/hook_service.rb b/app/services/ci/pipelines/hook_service.rb
new file mode 100644
index 00000000000..629ed7e1ebd
--- /dev/null
+++ b/app/services/ci/pipelines/hook_service.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Ci
+ module Pipelines
+ class HookService
+ include Gitlab::Utils::StrongMemoize
+
+ HOOK_NAME = :pipeline_hooks
+
+ def initialize(pipeline)
+ @pipeline = pipeline
+ end
+
+ def execute
+ project.execute_hooks(hook_data, HOOK_NAME) if project.has_active_hooks?(HOOK_NAME)
+ project.execute_integrations(hook_data, HOOK_NAME) if project.has_active_integrations?(HOOK_NAME)
+ end
+
+ private
+
+ attr_reader :pipeline
+
+ def project
+ @project ||= pipeline.project
+ end
+
+ def hook_data
+ strong_memoize(:hook_data) do
+ Gitlab::DataBuilder::Pipeline.build(pipeline)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index fb26d5d3356..664915c5e2f 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -11,8 +11,6 @@ module Ci
def execute
increment_processing_counter
- update_retried
-
Ci::PipelineProcessing::AtomicProcessingService
.new(pipeline)
.execute
@@ -24,41 +22,6 @@ module Ci
private
- # This method is for compatibility and data consistency and should be removed with 9.3 version of GitLab
- # This replicates what is db/post_migrate/20170416103934_upate_retried_for_ci_build.rb
- # and ensures that functionality will not be broken before migration is run
- # this updates only when there are data that needs to be updated, there are two groups with no retried flag
- # rubocop: disable CodeReuse/ActiveRecord
- def update_retried
- return if Feature.enabled?(:ci_remove_update_retried_from_process_pipeline, pipeline.project, default_enabled: :yaml)
-
- # find the latest builds for each name
- latest_statuses = pipeline.latest_statuses
- .group(:name)
- .having('count(*) > 1')
- .pluck(Arel.sql('MAX(id)'), 'name')
-
- # mark builds that are retried
- if latest_statuses.any?
- updated_count = pipeline.latest_statuses
- .where(name: latest_statuses.map(&:second))
- .where.not(id: latest_statuses.map(&:first))
- .update_all(retried: true)
-
- # This counter is temporary. It will be used to check whether if we still use this method or not
- # after setting correct value of `GenericCommitStatus#retried`.
- # More info: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/50465#note_491657115
- if updated_count > 0
- Gitlab::AppJsonLogger.info(event: 'update_retried_is_used',
- project_id: pipeline.project.id,
- pipeline_id: pipeline.id)
-
- metrics.legacy_update_jobs_counter.increment
- end
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
def increment_processing_counter
metrics.pipeline_processing_events_counter.increment
end
diff --git a/app/services/ci/queue/build_queue_service.rb b/app/services/ci/queue/build_queue_service.rb
index 3276c427923..3c886cb023f 100644
--- a/app/services/ci/queue/build_queue_service.rb
+++ b/app/services/ci/queue/build_queue_service.rb
@@ -90,7 +90,7 @@ module Ci
def runner_projects_relation
if ::Feature.enabled?(:ci_pending_builds_project_runners_decoupling, runner, default_enabled: :yaml)
- runner.runner_projects.select(:project_id)
+ runner.runner_projects.select('"ci_runner_projects"."project_id"::bigint')
else
runner.projects.without_deleted.with_builds_enabled
end
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index c46ddd22558..67ef4f10709 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -22,7 +22,8 @@ module Ci
end
def execute(params = {})
- db_all_caught_up = ::Gitlab::Database::LoadBalancing::Sticking.all_caught_up?(:runner, runner.id)
+ db_all_caught_up =
+ ::Ci::Runner.sticking.all_caught_up?(:runner, runner.id)
@metrics.increment_queue_operation(:queue_attempt)
@@ -103,42 +104,40 @@ module Ci
# rubocop: disable CodeReuse/ActiveRecord
def each_build(params, &blk)
- ::Gitlab::Database.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/339429') do
- queue = ::Ci::Queue::BuildQueueService.new(runner)
-
- builds = begin
- if runner.instance_type?
- queue.builds_for_shared_runner
- elsif runner.group_type?
- queue.builds_for_group_runner
- else
- queue.builds_for_project_runner
- end
- end
+ queue = ::Ci::Queue::BuildQueueService.new(runner)
- if runner.ref_protected?
- builds = queue.builds_for_protected_runner(builds)
+ builds = begin
+ if runner.instance_type?
+ queue.builds_for_shared_runner
+ elsif runner.group_type?
+ queue.builds_for_group_runner
+ else
+ queue.builds_for_project_runner
end
+ end
- # pick builds that does not have other tags than runner's one
- builds = queue.builds_matching_tag_ids(builds, runner.tags.ids)
+ if runner.ref_protected?
+ builds = queue.builds_for_protected_runner(builds)
+ end
- # pick builds that have at least one tag
- unless runner.run_untagged?
- builds = queue.builds_with_any_tags(builds)
- end
+ # pick builds that does not have other tags than runner's one
+ builds = queue.builds_matching_tag_ids(builds, runner.tags.ids)
- # pick builds that older than specified age
- if params.key?(:job_age)
- builds = queue.builds_queued_before(builds, params[:job_age].seconds.ago)
- end
+ # pick builds that have at least one tag
+ unless runner.run_untagged?
+ builds = queue.builds_with_any_tags(builds)
+ end
- build_ids = retrieve_queue(-> { queue.execute(builds) })
+ # pick builds that older than specified age
+ if params.key?(:job_age)
+ builds = queue.builds_queued_before(builds, params[:job_age].seconds.ago)
+ end
- @metrics.observe_queue_size(-> { build_ids.size }, @runner.runner_type)
+ build_ids = retrieve_queue(-> { queue.execute(builds) })
- build_ids.each { |build_id| yield Ci::Build.find(build_id) }
- end
+ @metrics.observe_queue_size(-> { build_ids.size }, @runner.runner_type)
+
+ build_ids.each { |build_id| yield Ci::Build.find(build_id) }
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb b/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
index 1d329fe7b53..dfd97498fc8 100644
--- a/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
+++ b/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
@@ -9,7 +9,7 @@ module Ci
free_resources = resource_group.resources.free.count
- resource_group.processables.waiting_for_resource.take(free_resources).each do |processable|
+ resource_group.upcoming_processables.take(free_resources).each do |processable|
processable.enqueue_waiting_for_resource
end
end
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index 08520c9514c..07cfbb9ce3c 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -17,7 +17,7 @@ module Ci
def execute(build)
build.ensure_scheduling_type!
- reprocess!(build).tap do |new_build|
+ clone!(build).tap do |new_build|
check_assignable_runners!(new_build)
next if new_build.failed?
@@ -31,7 +31,12 @@ module Ci
end
# rubocop: disable CodeReuse/ActiveRecord
- def reprocess!(build)
+ def clone!(build)
+ # Cloning a build requires a strict type check to ensure
+ # the attributes being used for the clone are taken straight
+ # from the model and not overridden by other abstractions.
+ raise TypeError unless build.instance_of?(Ci::Build)
+
check_access!(build)
new_build = clone_build(build)
diff --git a/app/services/ci/retry_pipeline_service.rb b/app/services/ci/retry_pipeline_service.rb
index 02ee40d2cf6..9ad46ca7585 100644
--- a/app/services/ci/retry_pipeline_service.rb
+++ b/app/services/ci/retry_pipeline_service.rb
@@ -9,20 +9,15 @@ module Ci
raise Gitlab::Access::AccessDeniedError
end
- needs = Set.new
-
pipeline.ensure_scheduling_type!
builds_relation(pipeline).find_each do |build|
next unless can_be_retried?(build)
- Ci::RetryBuildService.new(project, current_user)
- .reprocess!(build)
-
- needs += build.needs.map(&:name)
+ Ci::RetryBuildService.new(project, current_user).clone!(build)
end
- pipeline.builds.latest.skipped.find_each do |skipped|
+ pipeline.processables.latest.skipped.find_each do |skipped|
retry_optimistic_lock(skipped, name: 'ci_retry_pipeline') { |build| build.process(current_user) }
end
diff --git a/app/services/ci/stuck_builds/drop_service.rb b/app/services/ci/stuck_builds/drop_pending_service.rb
index 3fee9a94381..4653e701973 100644
--- a/app/services/ci/stuck_builds/drop_service.rb
+++ b/app/services/ci/stuck_builds/drop_pending_service.rb
@@ -2,27 +2,21 @@
module Ci
module StuckBuilds
- class DropService
+ class DropPendingService
include DropHelpers
- BUILD_RUNNING_OUTDATED_TIMEOUT = 1.hour
BUILD_PENDING_OUTDATED_TIMEOUT = 1.day
- BUILD_SCHEDULED_OUTDATED_TIMEOUT = 1.hour
BUILD_PENDING_STUCK_TIMEOUT = 1.hour
BUILD_LOOKBACK = 5.days
def execute
- Gitlab::AppLogger.info "#{self.class}: Cleaning stuck builds"
-
- drop(running_timed_out_builds, failure_reason: :stuck_or_timeout_failure)
+ Gitlab::AppLogger.info "#{self.class}: Cleaning pending timed-out builds"
drop(
pending_builds(BUILD_PENDING_OUTDATED_TIMEOUT.ago),
failure_reason: :stuck_or_timeout_failure
)
- drop(scheduled_timed_out_builds, failure_reason: :stale_schedule)
-
drop_stuck(
pending_builds(BUILD_PENDING_STUCK_TIMEOUT.ago),
failure_reason: :stuck_or_timeout_failure
@@ -43,20 +37,6 @@ module Ci
end
end
# rubocop: enable CodeReuse/ActiveRecord
-
- def scheduled_timed_out_builds
- Ci::Build.where(status: :scheduled).where( # rubocop: disable CodeReuse/ActiveRecord
- 'ci_builds.scheduled_at IS NOT NULL AND ci_builds.scheduled_at < ?',
- BUILD_SCHEDULED_OUTDATED_TIMEOUT.ago
- )
- end
-
- def running_timed_out_builds
- Ci::Build.running.where( # rubocop: disable CodeReuse/ActiveRecord
- 'ci_builds.updated_at < ?',
- BUILD_RUNNING_OUTDATED_TIMEOUT.ago
- )
- end
end
end
end
diff --git a/app/services/ci/stuck_builds/drop_running_service.rb b/app/services/ci/stuck_builds/drop_running_service.rb
new file mode 100644
index 00000000000..a79224cc231
--- /dev/null
+++ b/app/services/ci/stuck_builds/drop_running_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Ci
+ module StuckBuilds
+ class DropRunningService
+ include DropHelpers
+
+ BUILD_RUNNING_OUTDATED_TIMEOUT = 1.hour
+
+ def execute
+ Gitlab::AppLogger.info "#{self.class}: Cleaning running, timed-out builds"
+
+ drop(running_timed_out_builds, failure_reason: :stuck_or_timeout_failure)
+ end
+
+ private
+
+ def running_timed_out_builds
+ if Feature.enabled?(:ci_new_query_for_running_stuck_jobs, default_enabled: :yaml)
+ Ci::Build
+ .running
+ .created_at_before(BUILD_RUNNING_OUTDATED_TIMEOUT.ago)
+ .updated_at_before(BUILD_RUNNING_OUTDATED_TIMEOUT.ago)
+ .order(created_at: :asc, project_id: :asc) # rubocop:disable CodeReuse/ActiveRecord
+ else
+ Ci::Build.running.updated_at_before(BUILD_RUNNING_OUTDATED_TIMEOUT.ago)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/ci/stuck_builds/drop_scheduled_service.rb b/app/services/ci/stuck_builds/drop_scheduled_service.rb
new file mode 100644
index 00000000000..d4f4252c2c0
--- /dev/null
+++ b/app/services/ci/stuck_builds/drop_scheduled_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Ci
+ module StuckBuilds
+ class DropScheduledService
+ include DropHelpers
+
+ BUILD_SCHEDULED_OUTDATED_TIMEOUT = 1.hour
+
+ def execute
+ Gitlab::AppLogger.info "#{self.class}: Cleaning scheduled, timed-out builds"
+
+ drop(scheduled_timed_out_builds, failure_reason: :stale_schedule)
+ end
+
+ private
+
+ def scheduled_timed_out_builds
+ Ci::Build.scheduled.scheduled_at_before(BUILD_SCHEDULED_OUTDATED_TIMEOUT.ago)
+ end
+ end
+ end
+end
diff --git a/app/services/ci/update_build_state_service.rb b/app/services/ci/update_build_state_service.rb
index abd50d2f110..3b403f92486 100644
--- a/app/services/ci/update_build_state_service.rb
+++ b/app/services/ci/update_build_state_service.rb
@@ -73,9 +73,11 @@ module Ci
::Gitlab::Ci::Trace::Checksum.new(build).then do |checksum|
unless checksum.valid?
metrics.increment_trace_operation(operation: :invalid)
+ metrics.increment_error_counter(type: :chunks_invalid_checksum)
if checksum.corrupted?
metrics.increment_trace_operation(operation: :corrupted)
+ metrics.increment_error_counter(type: :chunks_invalid_size)
end
next unless log_invalid_chunks?
diff --git a/app/services/ci/update_pending_build_service.rb b/app/services/ci/update_pending_build_service.rb
index dcba06e60bf..d546dbcfe3d 100644
--- a/app/services/ci/update_pending_build_service.rb
+++ b/app/services/ci/update_pending_build_service.rb
@@ -2,7 +2,7 @@
module Ci
class UpdatePendingBuildService
- VALID_PARAMS = %i[instance_runners_enabled].freeze
+ VALID_PARAMS = %i[instance_runners_enabled namespace_id namespace_traversal_ids].freeze
InvalidParamsError = Class.new(StandardError)
InvalidModelError = Class.new(StandardError)
diff --git a/app/services/clusters/agent_tokens/create_service.rb b/app/services/clusters/agent_tokens/create_service.rb
new file mode 100644
index 00000000000..ae2617f510b
--- /dev/null
+++ b/app/services/clusters/agent_tokens/create_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Clusters
+ module AgentTokens
+ class CreateService < ::BaseContainerService
+ ALLOWED_PARAMS = %i[agent_id description name].freeze
+
+ def execute
+ return error_no_permissions unless current_user.can?(:create_cluster, container)
+
+ token = ::Clusters::AgentToken.new(filtered_params.merge(created_by_user: current_user))
+
+ if token.save
+ ServiceResponse.success(payload: { secret: token.token, token: token })
+ else
+ ServiceResponse.error(message: token.errors.full_messages)
+ end
+ end
+
+ private
+
+ def error_no_permissions
+ ServiceResponse.error(message: s_('ClusterAgent|User has insufficient permissions to create a token for this project'))
+ end
+
+ def filtered_params
+ params.slice(*ALLOWED_PARAMS)
+ end
+ end
+ end
+end
diff --git a/app/services/clusters/agents/create_service.rb b/app/services/clusters/agents/create_service.rb
new file mode 100644
index 00000000000..568f168d63b
--- /dev/null
+++ b/app/services/clusters/agents/create_service.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Agents
+ class CreateService < BaseService
+ def execute(name:)
+ return error_no_permissions unless cluster_agent_permissions?
+
+ agent = ::Clusters::Agent.new(name: name, project: project, created_by_user: current_user)
+
+ if agent.save
+ success.merge(cluster_agent: agent)
+ else
+ error(agent.errors.full_messages)
+ end
+ end
+
+ private
+
+ def cluster_agent_permissions?
+ current_user.can?(:admin_pipeline, project) && current_user.can?(:create_cluster, project)
+ end
+
+ def error_no_permissions
+ error(s_('ClusterAgent|You have insufficient permissions to create a cluster agent for this project'))
+ end
+ end
+ end
+end
diff --git a/app/services/clusters/agents/delete_service.rb b/app/services/clusters/agents/delete_service.rb
new file mode 100644
index 00000000000..2132dffa606
--- /dev/null
+++ b/app/services/clusters/agents/delete_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Agents
+ class DeleteService < ::BaseContainerService
+ def execute(cluster_agent)
+ return error_no_permissions unless current_user.can?(:admin_cluster, cluster_agent)
+
+ if cluster_agent.destroy
+ ServiceResponse.success
+ else
+ ServiceResponse.error(message: cluster_agent.errors.full_messages)
+ end
+ end
+
+ private
+
+ def error_no_permissions
+ ServiceResponse.error(message: s_('ClusterAgent|You have insufficient permissions to delete this cluster agent'))
+ end
+ end
+ end
+end
diff --git a/app/services/clusters/agents/refresh_authorization_service.rb b/app/services/clusters/agents/refresh_authorization_service.rb
index a9e3340dbf5..7f401eef720 100644
--- a/app/services/clusters/agents/refresh_authorization_service.rb
+++ b/app/services/clusters/agents/refresh_authorization_service.rb
@@ -99,7 +99,7 @@ module Clusters
end
def group_root_ancestor?
- root_ancestor.group?
+ root_ancestor.group_namespace?
end
end
end
diff --git a/app/services/concerns/rate_limited_service.rb b/app/services/concerns/rate_limited_service.rb
new file mode 100644
index 00000000000..87cba7814fe
--- /dev/null
+++ b/app/services/concerns/rate_limited_service.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+module RateLimitedService
+ extend ActiveSupport::Concern
+
+ RateLimitedNotSetupError = Class.new(StandardError)
+
+ class RateLimitedError < StandardError
+ def initialize(key:, rate_limiter:)
+ @key = key
+ @rate_limiter = rate_limiter
+ end
+
+ def headers
+ # TODO: This will be fleshed out in https://gitlab.com/gitlab-org/gitlab/-/issues/342370
+ {}
+ end
+
+ def log_request(request, current_user)
+ rate_limiter.class.log_request(request, "#{key}_request_limit".to_sym, current_user)
+ end
+
+ private
+
+ attr_reader :key, :rate_limiter
+ end
+
+ class RateLimiterScopedAndKeyed
+ attr_reader :key, :opts, :rate_limiter_klass
+
+ def initialize(key:, opts:, rate_limiter_klass:)
+ @key = key
+ @opts = opts
+ @rate_limiter_klass = rate_limiter_klass
+ end
+
+ def rate_limit!(service)
+ evaluated_scope = evaluated_scope_for(service)
+ return if feature_flag_disabled?(evaluated_scope[:project])
+
+ rate_limiter = new_rate_limiter(evaluated_scope)
+ if rate_limiter.throttled?
+ raise RateLimitedError.new(key: key, rate_limiter: rate_limiter), _('This endpoint has been requested too many times. Try again later.')
+ end
+ end
+
+ private
+
+ def users_allowlist
+ @users_allowlist ||= opts[:users_allowlist] ? opts[:users_allowlist].call : []
+ end
+
+ def evaluated_scope_for(service)
+ opts[:scope].each_with_object({}) do |var, all|
+ all[var] = service.public_send(var) # rubocop: disable GitlabSecurity/PublicSend
+ end
+ end
+
+ def feature_flag_disabled?(project)
+ Feature.disabled?("rate_limited_service_#{key}", project, default_enabled: :yaml)
+ end
+
+ def new_rate_limiter(evaluated_scope)
+ rate_limiter_klass.new(key, **opts.merge(scope: evaluated_scope.values, users_allowlist: users_allowlist))
+ end
+ end
+
+ prepended do
+ attr_accessor :rate_limiter_bypassed
+ cattr_accessor :rate_limiter_scoped_and_keyed
+
+ def self.rate_limit(key:, opts:, rate_limiter_klass: ::Gitlab::ApplicationRateLimiter)
+ self.rate_limiter_scoped_and_keyed = RateLimiterScopedAndKeyed.new(key: key,
+ opts: opts,
+ rate_limiter_klass: rate_limiter_klass)
+ end
+ end
+
+ def execute_without_rate_limiting(*args, **kwargs)
+ self.rate_limiter_bypassed = true
+ execute(*args, **kwargs)
+ ensure
+ self.rate_limiter_bypassed = false
+ end
+
+ def execute(*args, **kwargs)
+ raise RateLimitedNotSetupError if rate_limiter_scoped_and_keyed.nil?
+
+ rate_limiter_scoped_and_keyed.rate_limit!(self) unless rate_limiter_bypassed
+
+ super
+ end
+end
diff --git a/app/services/container_expiration_policies/cleanup_service.rb b/app/services/container_expiration_policies/cleanup_service.rb
index cd988cdc5fe..0da5e552c48 100644
--- a/app/services/container_expiration_policies/cleanup_service.rb
+++ b/app/services/container_expiration_policies/cleanup_service.rb
@@ -4,7 +4,7 @@ module ContainerExpirationPolicies
class CleanupService
attr_reader :repository
- SERVICE_RESULT_FIELDS = %i[original_size before_truncate_size after_truncate_size before_delete_size deleted_size].freeze
+ SERVICE_RESULT_FIELDS = %i[original_size before_truncate_size after_truncate_size before_delete_size deleted_size cached_tags_count].freeze
def initialize(repository)
@repository = repository
@@ -24,8 +24,8 @@ module ContainerExpirationPolicies
begin
service_result = Projects::ContainerRepository::CleanupTagsService
- .new(project, nil, policy_params.merge('container_expiration_policy' => true))
- .execute(repository)
+ .new(repository, nil, policy_params.merge('container_expiration_policy' => true))
+ .execute
rescue StandardError
repository.cleanup_unfinished!
diff --git a/app/services/customer_relations/contacts/base_service.rb b/app/services/customer_relations/contacts/base_service.rb
new file mode 100644
index 00000000000..89f6f2c3f1f
--- /dev/null
+++ b/app/services/customer_relations/contacts/base_service.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module CustomerRelations
+ module Contacts
+ class BaseService < ::BaseGroupService
+ private
+
+ def allowed?
+ current_user&.can?(:admin_contact, group)
+ end
+
+ def error(message)
+ ServiceResponse.error(message: Array(message))
+ end
+ end
+ end
+end
diff --git a/app/services/customer_relations/contacts/create_service.rb b/app/services/customer_relations/contacts/create_service.rb
new file mode 100644
index 00000000000..7ff8b731e0d
--- /dev/null
+++ b/app/services/customer_relations/contacts/create_service.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module CustomerRelations
+ module Contacts
+ class CreateService < BaseService
+ def execute
+ return error_no_permissions unless allowed?
+ return error_organization_invalid unless organization_valid?
+
+ contact = Contact.create(params.merge(group_id: group.id))
+
+ return error_creating(contact) unless contact.persisted?
+
+ ServiceResponse.success(payload: contact)
+ end
+
+ private
+
+ def organization_valid?
+ return true unless params[:organization_id]
+
+ organization = Organization.find(params[:organization_id])
+ organization.group_id == group.id
+ rescue ActiveRecord::RecordNotFound
+ false
+ end
+
+ def error_organization_invalid
+ error('The specified organization was not found or does not belong to this group')
+ end
+
+ def error_no_permissions
+ error('You have insufficient permissions to create a contact for this group')
+ end
+
+ def error_creating(contact)
+ error(contact&.errors&.full_messages || 'Failed to create contact')
+ end
+ end
+ end
+end
diff --git a/app/services/customer_relations/contacts/update_service.rb b/app/services/customer_relations/contacts/update_service.rb
new file mode 100644
index 00000000000..473a80be262
--- /dev/null
+++ b/app/services/customer_relations/contacts/update_service.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module CustomerRelations
+ module Contacts
+ class UpdateService < BaseService
+ def execute(contact)
+ return error_no_permissions unless allowed?
+ return error_updating(contact) unless contact.update(params)
+
+ ServiceResponse.success(payload: contact)
+ end
+
+ private
+
+ def error_no_permissions
+ error('You have insufficient permissions to update a contact for this group')
+ end
+
+ def error_updating(contact)
+ error(contact&.errors&.full_messages || 'Failed to update contact')
+ end
+ end
+ end
+end
diff --git a/app/services/customer_relations/organizations/base_service.rb b/app/services/customer_relations/organizations/base_service.rb
index 63261534b37..8f8480d697c 100644
--- a/app/services/customer_relations/organizations/base_service.rb
+++ b/app/services/customer_relations/organizations/base_service.rb
@@ -10,7 +10,7 @@ module CustomerRelations
end
def error(message)
- ServiceResponse.error(message: message)
+ ServiceResponse.error(message: Array(message))
end
end
end
diff --git a/app/services/customer_relations/organizations/create_service.rb b/app/services/customer_relations/organizations/create_service.rb
index 9c223796eaf..aad1b7e2ca4 100644
--- a/app/services/customer_relations/organizations/create_service.rb
+++ b/app/services/customer_relations/organizations/create_service.rb
@@ -7,9 +7,7 @@ module CustomerRelations
def execute
return error_no_permissions unless allowed?
- params[:group_id] = group.id
-
- organization = Organization.create(params)
+ organization = Organization.create(params.merge(group_id: group.id))
return error_creating(organization) unless organization.persisted?
diff --git a/app/services/dependency_proxy/auth_token_service.rb b/app/services/dependency_proxy/auth_token_service.rb
index 16279ed12b0..c6c9eb534bb 100644
--- a/app/services/dependency_proxy/auth_token_service.rb
+++ b/app/services/dependency_proxy/auth_token_service.rb
@@ -12,10 +12,16 @@ module DependencyProxy
JSONWebToken::HMACToken.decode(token, ::Auth::DependencyProxyAuthenticationService.secret).first
end
- class << self
- def decoded_token_payload(token)
- self.new(token).execute
+ def self.user_or_deploy_token_from_jwt(raw_jwt)
+ token_payload = self.new(raw_jwt).execute
+
+ if token_payload['user_id']
+ User.find(token_payload['user_id'])
+ elsif token_payload['deploy_token']
+ DeployToken.active.find_by_token(token_payload['deploy_token'])
end
+ rescue JWT::DecodeError, JWT::ExpiredSignature, JWT::ImmatureSignature
+ nil
end
end
end
diff --git a/app/services/dependency_proxy/find_or_create_blob_service.rb b/app/services/dependency_proxy/find_or_create_blob_service.rb
index f3dbf31dcdb..0a6db6e3d34 100644
--- a/app/services/dependency_proxy/find_or_create_blob_service.rb
+++ b/app/services/dependency_proxy/find_or_create_blob_service.rb
@@ -12,7 +12,7 @@ module DependencyProxy
def execute
from_cache = true
file_name = @blob_sha.sub('sha256:', '') + '.gz'
- blob = @group.dependency_proxy_blobs.find_or_build(file_name)
+ blob = @group.dependency_proxy_blobs.active.find_or_build(file_name)
unless blob.persisted?
from_cache = false
@@ -30,6 +30,8 @@ module DependencyProxy
blob.save!
end
+ # Technical debt: change to read_at https://gitlab.com/gitlab-org/gitlab/-/issues/341536
+ blob.touch if from_cache
success(blob: blob, from_cache: from_cache)
end
diff --git a/app/services/dependency_proxy/find_or_create_manifest_service.rb b/app/services/dependency_proxy/find_or_create_manifest_service.rb
index 0eb990ab7f8..1976d4d47f4 100644
--- a/app/services/dependency_proxy/find_or_create_manifest_service.rb
+++ b/app/services/dependency_proxy/find_or_create_manifest_service.rb
@@ -13,11 +13,16 @@ module DependencyProxy
def execute
@manifest = @group.dependency_proxy_manifests
+ .active
.find_or_initialize_by_file_name_or_digest(file_name: @file_name, digest: @tag)
head_result = DependencyProxy::HeadManifestService.new(@image, @tag, @token).execute
- return success(manifest: @manifest, from_cache: true) if cached_manifest_matches?(head_result)
+ if cached_manifest_matches?(head_result)
+ @manifest.touch
+
+ return success(manifest: @manifest, from_cache: true)
+ end
pull_new_manifest
respond(from_cache: false)
@@ -46,6 +51,9 @@ module DependencyProxy
def respond(from_cache: true)
if @manifest.persisted?
+ # Technical debt: change to read_at https://gitlab.com/gitlab-org/gitlab/-/issues/341536
+ @manifest.touch if from_cache
+
success(manifest: @manifest, from_cache: from_cache)
else
error('Failed to download the manifest from the external registry', 503)
diff --git a/app/services/dependency_proxy/group_settings/update_service.rb b/app/services/dependency_proxy/group_settings/update_service.rb
new file mode 100644
index 00000000000..ba43452def3
--- /dev/null
+++ b/app/services/dependency_proxy/group_settings/update_service.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module DependencyProxy
+ module GroupSettings
+ class UpdateService < BaseContainerService
+ ALLOWED_ATTRIBUTES = %i[enabled].freeze
+
+ def execute
+ return ServiceResponse.error(message: 'Access Denied', http_status: 403) unless allowed?
+ return ServiceResponse.error(message: 'Dependency proxy setting not found', http_status: 404) unless dependency_proxy_setting
+
+ if dependency_proxy_setting.update(dependency_proxy_setting_params)
+ ServiceResponse.success(payload: { dependency_proxy_setting: dependency_proxy_setting })
+ else
+ ServiceResponse.error(
+ message: dependency_proxy_setting.errors.full_messages.to_sentence || 'Bad request',
+ http_status: 400
+ )
+ end
+ end
+
+ private
+
+ def dependency_proxy_setting
+ container.dependency_proxy_setting
+ end
+
+ def allowed?
+ Ability.allowed?(current_user, :admin_dependency_proxy, container)
+ end
+
+ def dependency_proxy_setting_params
+ params.slice(*ALLOWED_ATTRIBUTES)
+ end
+ end
+ end
+end
diff --git a/app/services/deployments/older_deployments_drop_service.rb b/app/services/deployments/older_deployments_drop_service.rb
index 100d1267848..504b55b99ac 100644
--- a/app/services/deployments/older_deployments_drop_service.rb
+++ b/app/services/deployments/older_deployments_drop_service.rb
@@ -11,23 +11,23 @@ module Deployments
def execute
return unless @deployment&.running?
- older_deployments.find_each do |older_deployment|
- Gitlab::OptimisticLocking.retry_lock(older_deployment.deployable, name: 'older_deployments_drop') do |deployable|
- deployable.drop(:forward_deployment_failure)
+ older_deployments_builds.each do |build|
+ Gitlab::OptimisticLocking.retry_lock(build, name: 'older_deployments_drop') do |build|
+ build.drop(:forward_deployment_failure)
end
rescue StandardError => e
- Gitlab::ErrorTracking.track_exception(e, subject_id: @deployment.id, deployment_id: older_deployment.id)
+ Gitlab::ErrorTracking.track_exception(e, subject_id: @deployment.id, build_id: build.id)
end
end
private
- def older_deployments
+ def older_deployments_builds
@deployment
.environment
.active_deployments
.older_than(@deployment)
- .with_deployable
+ .builds
end
end
end
diff --git a/app/services/error_tracking/list_issues_service.rb b/app/services/error_tracking/list_issues_service.rb
index 86c7791e759..1979816b88d 100644
--- a/app/services/error_tracking/list_issues_service.rb
+++ b/app/services/error_tracking/list_issues_service.rb
@@ -76,16 +76,21 @@ module ErrorTracking
filter_opts = {
status: opts[:issue_status],
sort: opts[:sort],
- limit: opts[:limit]
+ limit: opts[:limit],
+ cursor: opts[:cursor]
}
errors = ErrorTracking::ErrorsFinder.new(current_user, project, filter_opts).execute
+ pagination = {}
+ pagination[:next] = { cursor: errors.cursor_for_next_page } if errors.has_next_page?
+ pagination[:previous] = { cursor: errors.cursor_for_previous_page } if errors.has_previous_page?
+
# We use the same response format as project_error_tracking_setting
# method below for compatibility with existing code.
{
issues: errors.map(&:to_sentry_error),
- pagination: {}
+ pagination: pagination
}
else
project_error_tracking_setting.list_sentry_issues(**opts)
diff --git a/app/services/feature_flags/base_service.rb b/app/services/feature_flags/base_service.rb
index 9ae9ab4de63..ca0b6b89199 100644
--- a/app/services/feature_flags/base_service.rb
+++ b/app/services/feature_flags/base_service.rb
@@ -7,6 +7,8 @@ module FeatureFlags
AUDITABLE_ATTRIBUTES = %w(name description active).freeze
def success(**args)
+ audit_event = args.fetch(:audit_event) { audit_event(args[:feature_flag]) }
+ save_audit_event(audit_event)
sync_to_jira(args[:feature_flag])
super
end
@@ -66,5 +68,11 @@ module FeatureFlags
feature_flag_by_name.scopes.find_by_environment_scope(params[:environment_scope])
end
end
+
+ private
+
+ def audit_message(feature_flag)
+ raise NotImplementedError, "This method should be overriden by subclasses"
+ end
end
end
diff --git a/app/services/feature_flags/create_service.rb b/app/services/feature_flags/create_service.rb
index 65f8f8e33f6..ebbe71f39c7 100644
--- a/app/services/feature_flags/create_service.rb
+++ b/app/services/feature_flags/create_service.rb
@@ -10,8 +10,6 @@ module FeatureFlags
feature_flag = project.operations_feature_flags.new(params)
if feature_flag.save
- save_audit_event(audit_event(feature_flag))
-
success(feature_flag: feature_flag)
else
error(feature_flag.errors.full_messages, 400)
diff --git a/app/services/feature_flags/destroy_service.rb b/app/services/feature_flags/destroy_service.rb
index 986fe004db6..817a80940c0 100644
--- a/app/services/feature_flags/destroy_service.rb
+++ b/app/services/feature_flags/destroy_service.rb
@@ -13,8 +13,6 @@ module FeatureFlags
ApplicationRecord.transaction do
if feature_flag.destroy
- save_audit_event(audit_event(feature_flag))
-
success(feature_flag: feature_flag)
else
error(feature_flag.errors.full_messages)
diff --git a/app/services/feature_flags/hook_service.rb b/app/services/feature_flags/hook_service.rb
new file mode 100644
index 00000000000..6f77a70bd09
--- /dev/null
+++ b/app/services/feature_flags/hook_service.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module FeatureFlags
+ class HookService
+ HOOK_NAME = :feature_flag_hooks
+
+ def initialize(feature_flag, current_user)
+ @feature_flag = feature_flag
+ @current_user = current_user
+ end
+
+ def execute
+ project.execute_hooks(hook_data, HOOK_NAME)
+ end
+
+ private
+
+ attr_reader :feature_flag, :current_user
+
+ def project
+ @project ||= feature_flag.project
+ end
+
+ def hook_data
+ Gitlab::DataBuilder::FeatureFlag.build(feature_flag, current_user)
+ end
+ end
+end
diff --git a/app/services/feature_flags/update_service.rb b/app/services/feature_flags/update_service.rb
index ccfd1b57d44..bcfd2c15189 100644
--- a/app/services/feature_flags/update_service.rb
+++ b/app/services/feature_flags/update_service.rb
@@ -7,6 +7,11 @@ module FeatureFlags
'parameters' => 'parameters'
}.freeze
+ def success(**args)
+ execute_hooks_after_commit(args[:feature_flag])
+ super
+ end
+
def execute(feature_flag)
return error('Access Denied', 403) unless can_update?(feature_flag)
return error('Not Found', 404) unless valid_user_list_ids?(feature_flag, user_list_ids(params))
@@ -20,16 +25,11 @@ module FeatureFlags
end
end
+ # We generate the audit event before the feature flag is saved as #changed_strategies_messages depends on the strategies' states before save
audit_event = audit_event(feature_flag)
- if feature_flag.active_changed?
- feature_flag.execute_hooks(current_user)
- end
-
if feature_flag.save
- save_audit_event(audit_event)
-
- success(feature_flag: feature_flag)
+ success(feature_flag: feature_flag, audit_event: audit_event)
else
error(feature_flag.errors.full_messages, :bad_request)
end
@@ -38,6 +38,16 @@ module FeatureFlags
private
+ def execute_hooks_after_commit(feature_flag)
+ return unless feature_flag.active_previously_changed?
+
+ # The `current_user` method (defined in `BaseService`) is not available within the `run_after_commit` block
+ user = current_user
+ feature_flag.run_after_commit do
+ HookService.new(feature_flag, user).execute
+ end
+ end
+
def audit_message(feature_flag)
changes = changed_attributes_messages(feature_flag)
changes += changed_strategies_messages(feature_flag)
diff --git a/app/services/groups/transfer_service.rb b/app/services/groups/transfer_service.rb
index b7eae06b963..774f81b0a5e 100644
--- a/app/services/groups/transfer_service.rb
+++ b/app/services/groups/transfer_service.rb
@@ -29,6 +29,7 @@ module Groups
update_group_attributes
ensure_ownership
update_integrations
+ update_pending_builds!
end
post_update_hooks(@updated_project_ids)
@@ -139,6 +140,10 @@ module Groups
# these records again.
@updated_project_ids = projects_to_update.pluck(:id)
+ Namespaces::ProjectNamespace
+ .where(id: projects_to_update.select(:project_namespace_id))
+ .update_all(visibility_level: @new_parent_group.visibility_level)
+
projects_to_update
.update_all(visibility_level: @new_parent_group.visibility_level)
end
@@ -217,6 +222,15 @@ module Groups
PropagateIntegrationWorker.perform_async(integration.id)
end
end
+
+ def update_pending_builds!
+ update_params = {
+ namespace_traversal_ids: group.traversal_ids,
+ namespace_id: group.id
+ }
+
+ ::Ci::UpdatePendingBuildService.new(group, update_params).execute
+ end
end
end
diff --git a/app/services/import/validate_remote_git_endpoint_service.rb b/app/services/import/validate_remote_git_endpoint_service.rb
new file mode 100644
index 00000000000..afccb5373a9
--- /dev/null
+++ b/app/services/import/validate_remote_git_endpoint_service.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+module Import
+ class ValidateRemoteGitEndpointService
+ # Validates if the remote endpoint is a valid GIT repository
+ # Only smart protocol is supported
+ # Validation rules are taken from https://git-scm.com/docs/http-protocol#_smart_clients
+
+ GIT_SERVICE_NAME = "git-upload-pack"
+ GIT_EXPECTED_FIRST_PACKET_LINE = "# service=#{GIT_SERVICE_NAME}"
+ GIT_BODY_MESSAGE_REGEXP = /^[0-9a-f]{4}#{GIT_EXPECTED_FIRST_PACKET_LINE}/.freeze
+ # https://github.com/git/git/blob/master/Documentation/technical/protocol-common.txt#L56-L59
+ GIT_PROTOCOL_PKT_LEN = 4
+ GIT_MINIMUM_RESPONSE_LENGTH = GIT_PROTOCOL_PKT_LEN + GIT_EXPECTED_FIRST_PACKET_LINE.length
+ EXPECTED_CONTENT_TYPE = "application/x-#{GIT_SERVICE_NAME}-advertisement"
+
+ def initialize(params)
+ @params = params
+ end
+
+ def execute
+ uri = Gitlab::Utils.parse_url(@params[:url])
+
+ return ServiceResponse.error(message: "#{@params[:url]} is not a valid URL") unless uri
+
+ uri.fragment = nil
+ url = Gitlab::Utils.append_path(uri.to_s, "/info/refs?service=#{GIT_SERVICE_NAME}")
+
+ response_body = ''
+ result = nil
+ Gitlab::HTTP.try_get(url, stream_body: true, follow_redirects: false, basic_auth: auth) do |fragment|
+ response_body += fragment
+ next if response_body.length < GIT_MINIMUM_RESPONSE_LENGTH
+
+ result = if status_code_is_valid(fragment) && content_type_is_valid(fragment) && response_body_is_valid(response_body)
+ :success
+ else
+ :error
+ end
+
+ # We are interested only in the first chunks of the response
+ # So we're using stream_body: true and breaking when receive enough body
+ break
+ end
+
+ if result == :success
+ ServiceResponse.success
+ else
+ ServiceResponse.error(message: "#{uri} is not a valid HTTP Git repository")
+ end
+ end
+
+ private
+
+ def auth
+ unless @params[:user].to_s.blank?
+ {
+ username: @params[:user],
+ password: @params[:password]
+ }
+ end
+ end
+
+ def status_code_is_valid(fragment)
+ fragment.http_response.code == '200'
+ end
+
+ def content_type_is_valid(fragment)
+ fragment.http_response['content-type'] == EXPECTED_CONTENT_TYPE
+ end
+
+ def response_body_is_valid(response_body)
+ response_body.match?(GIT_BODY_MESSAGE_REGEXP)
+ end
+ end
+end
diff --git a/app/services/issuable/import_csv/base_service.rb b/app/services/issuable/import_csv/base_service.rb
index 4a6b7540ded..4a2078a4e60 100644
--- a/app/services/issuable/import_csv/base_service.rb
+++ b/app/services/issuable/import_csv/base_service.rb
@@ -71,7 +71,14 @@ module Issuable
# NOTE: CSV imports are performed by workers, so we do not have a request context in order
# to create a SpamParams object to pass to the issuable create service.
spam_params = nil
- create_issuable_class.new(project: @project, current_user: @user, params: attributes, spam_params: spam_params).execute
+ create_service = create_issuable_class.new(project: @project, current_user: @user, params: attributes, spam_params: spam_params)
+
+ # For now, if create_issuable_class prepends RateLimitedService let's bypass rate limiting
+ if create_issuable_class < RateLimitedService
+ create_service.execute_without_rate_limiting
+ else
+ create_service.execute
+ end
end
def email_results_to_user
diff --git a/app/services/issues/clone_service.rb b/app/services/issues/clone_service.rb
index cb42334fe32..c675f957cd7 100644
--- a/app/services/issues/clone_service.rb
+++ b/app/services/issues/clone_service.rb
@@ -8,13 +8,7 @@ module Issues
@target_project = target_project
@with_notes = with_notes
- unless issue.can_clone?(current_user, target_project)
- raise CloneError, s_('CloneIssue|Cannot clone issue due to insufficient permissions!')
- end
-
- if target_project.pending_delete?
- raise CloneError, s_('CloneIssue|Cannot clone issue to target project as it is pending deletion.')
- end
+ verify_can_clone_issue!(issue, target_project)
super(issue, target_project)
@@ -30,6 +24,20 @@ module Issues
attr_reader :target_project
attr_reader :with_notes
+ def verify_can_clone_issue!(issue, target_project)
+ unless issue.supports_move_and_clone?
+ raise CloneError, s_('CloneIssue|Cannot clone issues of \'%{issue_type}\' type.') % { issue_type: issue.issue_type }
+ end
+
+ unless issue.can_clone?(current_user, target_project)
+ raise CloneError, s_('CloneIssue|Cannot clone issue due to insufficient permissions!')
+ end
+
+ if target_project.pending_delete?
+ raise CloneError, s_('CloneIssue|Cannot clone issue to target project as it is pending deletion.')
+ end
+ end
+
def update_new_entity
# we don't call `super` because we want to be able to decide whether or not to copy all comments over.
update_new_entity_description
diff --git a/app/services/issues/close_service.rb b/app/services/issues/close_service.rb
index ea64239dd99..ac846c769a3 100644
--- a/app/services/issues/close_service.rb
+++ b/app/services/issues/close_service.rb
@@ -3,8 +3,8 @@
module Issues
class CloseService < Issues::BaseService
# Closes the supplied issue if the current user is able to do so.
- def execute(issue, commit: nil, notifications: true, system_note: true)
- return issue unless can?(current_user, :update_issue, issue) || issue.is_a?(ExternalIssue)
+ def execute(issue, commit: nil, notifications: true, system_note: true, skip_authorization: false)
+ return issue unless can_close?(issue, skip_authorization: skip_authorization)
close_issue(issue,
closed_via: commit,
@@ -24,7 +24,7 @@ module Issues
return issue
end
- if project.issues_enabled? && issue.close(current_user)
+ if perform_close(issue)
event_service.close_issue(issue, current_user)
create_note(issue, closed_via) if system_note
@@ -51,6 +51,15 @@ module Issues
private
+ # Overridden on EE
+ def perform_close(issue)
+ issue.close(current_user)
+ end
+
+ def can_close?(issue, skip_authorization: false)
+ skip_authorization || can?(current_user, :update_issue, issue) || issue.is_a?(ExternalIssue)
+ end
+
def perform_incident_management_actions(issue)
resolve_alert(issue)
end
@@ -82,11 +91,11 @@ module Issues
end
end
- def store_first_mentioned_in_commit_at(issue, merge_request)
+ def store_first_mentioned_in_commit_at(issue, merge_request, max_commit_lookup: 100)
metrics = issue.metrics
return if metrics.nil? || metrics.first_mentioned_in_commit_at
- first_commit_timestamp = merge_request.commits(limit: 1).first.try(:authored_date)
+ first_commit_timestamp = merge_request.commits(limit: max_commit_lookup).last.try(:authored_date)
return unless first_commit_timestamp
metrics.update!(first_mentioned_in_commit_at: first_commit_timestamp)
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index b15b3e49c9a..fcedd1c1c8d 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -3,6 +3,10 @@
module Issues
class CreateService < Issues::BaseService
include ResolveDiscussions
+ prepend RateLimitedService
+
+ rate_limit key: :issues_create,
+ opts: { scope: [:project, :current_user], users_allowlist: -> { [User.support_bot.username] } }
# NOTE: For Issues::CreateService, we require the spam_params and do not default it to nil, because
# spam_checking is likely to be necessary. However, if there is not a request available in scope
diff --git a/app/services/issues/move_service.rb b/app/services/issues/move_service.rb
index ff78221c941..4418b4eb2bf 100644
--- a/app/services/issues/move_service.rb
+++ b/app/services/issues/move_service.rb
@@ -7,13 +7,7 @@ module Issues
def execute(issue, target_project)
@target_project = target_project
- unless issue.can_move?(current_user, @target_project)
- raise MoveError, s_('MoveIssue|Cannot move issue due to insufficient permissions!')
- end
-
- if @project == @target_project
- raise MoveError, s_('MoveIssue|Cannot move issue to project it originates from!')
- end
+ verify_can_move_issue!(issue, target_project)
super
@@ -32,6 +26,20 @@ module Issues
attr_reader :target_project
+ def verify_can_move_issue!(issue, target_project)
+ unless issue.supports_move_and_clone?
+ raise MoveError, s_('MoveIssue|Cannot move issues of \'%{issue_type}\' type.') % { issue_type: issue.issue_type }
+ end
+
+ unless issue.can_move?(current_user, @target_project)
+ raise MoveError, s_('MoveIssue|Cannot move issue due to insufficient permissions!')
+ end
+
+ if @project == @target_project
+ raise MoveError, s_('MoveIssue|Cannot move issue to project it originates from!')
+ end
+ end
+
def update_service_desk_sent_notifications
return unless original_entity.from_service_desk?
diff --git a/app/services/issues/relative_position_rebalancing_service.rb b/app/services/issues/relative_position_rebalancing_service.rb
index 7d199f99a24..23bb409f3cd 100644
--- a/app/services/issues/relative_position_rebalancing_service.rb
+++ b/app/services/issues/relative_position_rebalancing_service.rb
@@ -82,7 +82,7 @@ module Issues
collection.each do |project|
caching.cache_current_project_id(project.id)
index += 1
- scope = Issue.in_projects(project).reorder(custom_reorder).select(:id, :relative_position)
+ scope = Issue.in_projects(project).order_by_relative_position.with_non_null_relative_position.select(:id, :relative_position)
with_retry(PREFETCH_ISSUES_BATCH_SIZE, 100) do |batch_size|
Gitlab::Pagination::Keyset::Iterator.new(scope: scope).each_batch(of: batch_size) do |batch|
@@ -166,10 +166,6 @@ module Issues
@start_position ||= (RelativePositioning::START_POSITION - (gaps / 2) * gap_size).to_i
end
- def custom_reorder
- ::Gitlab::Pagination::Keyset::Order.build([Issue.column_order_relative_position, Issue.column_order_id_asc])
- end
-
def with_retry(initial_batch_size, exit_batch_size)
retries = 0
batch_size = initial_batch_size
diff --git a/app/services/issues/reopen_service.rb b/app/services/issues/reopen_service.rb
index 977b924ed72..4abd1dfbf4e 100644
--- a/app/services/issues/reopen_service.rb
+++ b/app/services/issues/reopen_service.rb
@@ -2,10 +2,10 @@
module Issues
class ReopenService < Issues::BaseService
- def execute(issue)
- return issue unless can?(current_user, :reopen_issue, issue)
+ def execute(issue, skip_authorization: false)
+ return issue unless can_reopen?(issue, skip_authorization: skip_authorization)
- if issue.reopen
+ if perform_reopen(issue)
event_service.reopen_issue(issue, current_user)
create_note(issue, 'reopened')
notification_service.async.reopen_issue(issue, current_user)
@@ -22,6 +22,15 @@ module Issues
private
+ # Overriden on EE
+ def perform_reopen(issue)
+ issue.reopen
+ end
+
+ def can_reopen?(issue, skip_authorization: false)
+ skip_authorization || can?(current_user, :reopen_issue, issue)
+ end
+
def perform_incident_management_actions(issue)
end
diff --git a/app/services/merge_requests/mergeability/check_base_service.rb b/app/services/merge_requests/mergeability/check_base_service.rb
new file mode 100644
index 00000000000..d5ddcb4b828
--- /dev/null
+++ b/app/services/merge_requests/mergeability/check_base_service.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+module MergeRequests
+ module Mergeability
+ class CheckBaseService
+ attr_reader :merge_request, :params
+
+ def initialize(merge_request:, params:)
+ @merge_request = merge_request
+ @params = params
+ end
+
+ def skip?
+ raise NotImplementedError
+ end
+
+ # When this method is true, we need to implement a cache_key
+ def cacheable?
+ raise NotImplementedError
+ end
+
+ def cache_key
+ raise NotImplementedError
+ end
+
+ private
+
+ def success(*args)
+ Gitlab::MergeRequests::Mergeability::CheckResult.success(*args)
+ end
+
+ def failure(*args)
+ Gitlab::MergeRequests::Mergeability::CheckResult.failed(*args)
+ end
+ end
+ end
+end
diff --git a/app/services/merge_requests/mergeability/check_ci_status_service.rb b/app/services/merge_requests/mergeability/check_ci_status_service.rb
new file mode 100644
index 00000000000..c0ef5ba1c30
--- /dev/null
+++ b/app/services/merge_requests/mergeability/check_ci_status_service.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+module MergeRequests
+ module Mergeability
+ class CheckCiStatusService < CheckBaseService
+ def execute
+ if merge_request.mergeable_ci_state?
+ success
+ else
+ failure
+ end
+ end
+
+ def skip?
+ params[:skip_ci_check].present?
+ end
+
+ def cacheable?
+ false
+ end
+ end
+ end
+end
diff --git a/app/services/merge_requests/mergeability/run_checks_service.rb b/app/services/merge_requests/mergeability/run_checks_service.rb
new file mode 100644
index 00000000000..c1d65fb65cc
--- /dev/null
+++ b/app/services/merge_requests/mergeability/run_checks_service.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+module MergeRequests
+ module Mergeability
+ class RunChecksService
+ include Gitlab::Utils::StrongMemoize
+
+ # We want to have the cheapest checks first in the list,
+ # that way we can fail fast before running the more expensive ones
+ CHECKS = [
+ CheckCiStatusService
+ ].freeze
+
+ def initialize(merge_request:, params:)
+ @merge_request = merge_request
+ @params = params
+ end
+
+ def execute
+ CHECKS.each_with_object([]) do |check_class, results|
+ check = check_class.new(merge_request: merge_request, params: params)
+
+ next if check.skip?
+
+ check_result = run_check(check)
+ results << check_result
+
+ break results if check_result.failed?
+ end
+ end
+
+ private
+
+ attr_reader :merge_request, :params
+
+ def run_check(check)
+ return check.execute unless Feature.enabled?(:mergeability_caching, merge_request.project, default_enabled: :yaml)
+ return check.execute unless check.cacheable?
+
+ cached_result = results.read(merge_check: check)
+ return cached_result if cached_result.respond_to?(:status)
+
+ check.execute.tap do |result|
+ results.write(merge_check: check, result_hash: result.to_hash)
+ end
+ end
+
+ def results
+ strong_memoize(:results) do
+ Gitlab::MergeRequests::Mergeability::ResultsStore.new(merge_request: merge_request)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index af041de5596..c5395138902 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -248,7 +248,7 @@ module MergeRequests
def merge_from_quick_action(merge_request)
last_diff_sha = params.delete(:merge)
- MergeRequests::MergeOrchestrationService
+ ::MergeRequests::MergeOrchestrationService
.new(project, current_user, { sha: last_diff_sha })
.execute(merge_request)
end
diff --git a/app/services/metrics/dashboard/annotations/create_service.rb b/app/services/metrics/dashboard/annotations/create_service.rb
index 54f4e96378c..b86fa82a5e8 100644
--- a/app/services/metrics/dashboard/annotations/create_service.rb
+++ b/app/services/metrics/dashboard/annotations/create_service.rb
@@ -30,7 +30,7 @@ module Metrics
options[:environment] = environment
success(options)
else
- error(s_('Metrics::Dashboard::Annotation|You are not authorized to create annotation for selected environment'))
+ error(s_('MetricsDashboardAnnotation|You are not authorized to create annotation for selected environment'))
end
end
@@ -39,7 +39,7 @@ module Metrics
options[:cluster] = cluster
success(options)
else
- error(s_('Metrics::Dashboard::Annotation|You are not authorized to create annotation for selected cluster'))
+ error(s_('MetricsDashboardAnnotation|You are not authorized to create annotation for selected cluster'))
end
end
@@ -51,7 +51,7 @@ module Metrics
success(options)
rescue Gitlab::Template::Finders::RepoTemplateFinder::FileNotFoundError
- error(s_('Metrics::Dashboard::Annotation|Dashboard with requested path can not be found'))
+ error(s_('MetricsDashboardAnnotation|Dashboard with requested path can not be found'))
end
def create(options)
diff --git a/app/services/metrics/dashboard/annotations/delete_service.rb b/app/services/metrics/dashboard/annotations/delete_service.rb
index 3efe6924a9b..3cb22f8d3da 100644
--- a/app/services/metrics/dashboard/annotations/delete_service.rb
+++ b/app/services/metrics/dashboard/annotations/delete_service.rb
@@ -27,7 +27,7 @@ module Metrics
if Ability.allowed?(user, :delete_metrics_dashboard_annotation, annotation)
success
else
- error(s_('Metrics::Dashboard::Annotation|You are not authorized to delete this annotation'))
+ error(s_('MetricsDashboardAnnotation|You are not authorized to delete this annotation'))
end
end
@@ -35,7 +35,7 @@ module Metrics
if annotation.destroy
success
else
- error(s_('Metrics::Dashboard::Annotation|Annotation has not been deleted'))
+ error(s_('MetricsDashboardAnnotation|Annotation has not been deleted'))
end
end
end
diff --git a/app/services/metrics/users_starred_dashboards/create_service.rb b/app/services/metrics/users_starred_dashboards/create_service.rb
index 9642df87861..0d028f120d3 100644
--- a/app/services/metrics/users_starred_dashboards/create_service.rb
+++ b/app/services/metrics/users_starred_dashboards/create_service.rb
@@ -35,7 +35,7 @@ module Metrics
if Ability.allowed?(user, :create_metrics_user_starred_dashboard, project)
success(user: user, project: project)
else
- error(s_('Metrics::UsersStarredDashboards|You are not authorized to add star to this dashboard'))
+ error(s_('MetricsUsersStarredDashboards|You are not authorized to add star to this dashboard'))
end
end
@@ -44,7 +44,7 @@ module Metrics
options[:dashboard_path] = dashboard_path
success(options)
else
- error(s_('Metrics::UsersStarredDashboards|Dashboard with requested path can not be found'))
+ error(s_('MetricsUsersStarredDashboards|Dashboard with requested path can not be found'))
end
end
diff --git a/app/services/packages/composer/create_package_service.rb b/app/services/packages/composer/create_package_service.rb
index 8215a3385a4..0f5429f667e 100644
--- a/app/services/packages/composer/create_package_service.rb
+++ b/app/services/packages/composer/create_package_service.rb
@@ -17,10 +17,6 @@ module Packages
})
end
- unless Feature.enabled?(:remove_composer_v1_cache_code, project)
- ::Packages::Composer::CacheUpdateWorker.perform_async(created_package.project_id, created_package.name, nil)
- end
-
created_package
end
diff --git a/app/services/projects/after_rename_service.rb b/app/services/projects/after_rename_service.rb
index 953b386b754..a3d54bc6b58 100644
--- a/app/services/projects/after_rename_service.rb
+++ b/app/services/projects/after_rename_service.rb
@@ -12,6 +12,8 @@ module Projects
#
# Projects::AfterRenameService.new(project).execute
class AfterRenameService
+ include BaseServiceUtility
+
# @return [String] The Project being renamed.
attr_reader :project
@@ -78,7 +80,7 @@ module Projects
def execute_system_hooks
project.old_path_with_namespace = full_path_before
- SystemHooksService.new.execute_hooks_for(project, :rename)
+ system_hook_service.execute_hooks_for(project, :rename)
end
def update_repository_configuration
@@ -110,7 +112,7 @@ module Projects
end
def log_completion
- Gitlab::AppLogger.info(
+ log_info(
"Project #{project.id} has been renamed from " \
"#{full_path_before} to #{full_path_after}"
)
@@ -140,7 +142,7 @@ module Projects
def rename_failed!
error = "Repository #{full_path_before} could not be renamed to #{full_path_after}"
- Gitlab::AppLogger.error(error)
+ log_error(error)
raise RenameFailedError, error
end
diff --git a/app/services/projects/container_repository/cache_tags_created_at_service.rb b/app/services/projects/container_repository/cache_tags_created_at_service.rb
new file mode 100644
index 00000000000..3a5346d7a23
--- /dev/null
+++ b/app/services/projects/container_repository/cache_tags_created_at_service.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+module Projects
+ module ContainerRepository
+ class CacheTagsCreatedAtService
+ def initialize(container_repository)
+ @container_repository = container_repository
+ @cached_tag_names = Set.new
+ end
+
+ def populate(tags)
+ return if tags.empty?
+
+ # This will load all tags in one Redis roundtrip
+ # the maximum number of tags is configurable and is set to 200 by default.
+ # https://gitlab.com/gitlab-org/gitlab/blob/master/doc/user/packages/container_registry/index.md#set-cleanup-limits-to-conserve-resources
+ keys = tags.map(&method(:cache_key))
+ cached_tags_count = 0
+
+ ::Gitlab::Redis::Cache.with do |redis|
+ tags.zip(redis.mget(keys)).each do |tag, created_at|
+ next unless created_at
+
+ tag.created_at = DateTime.rfc3339(created_at)
+ @cached_tag_names << tag.name
+ cached_tags_count += 1
+ end
+ end
+
+ cached_tags_count
+ end
+
+ def insert(tags, max_ttl_in_seconds)
+ return unless max_ttl_in_seconds
+ return if tags.empty?
+
+ # tags with nil created_at are not cacheable
+ # tags already cached don't need to be cached again
+ cacheable_tags = tags.select do |tag|
+ tag.created_at.present? && !tag.name.in?(@cached_tag_names)
+ end
+
+ return if cacheable_tags.empty?
+
+ now = Time.zone.now
+
+ ::Gitlab::Redis::Cache.with do |redis|
+ # we use a pipeline instead of a MSET because each tag has
+ # a specific ttl
+ redis.pipelined do
+ cacheable_tags.each do |tag|
+ created_at = tag.created_at
+ # ttl is the max_ttl_in_seconds reduced by the number
+ # of seconds that the tag has already existed
+ ttl = max_ttl_in_seconds - (now - created_at).seconds
+ ttl = ttl.to_i
+ redis.set(cache_key(tag), created_at.rfc3339, ex: ttl) if ttl > 0
+ end
+ end
+ end
+ end
+
+ private
+
+ def cache_key(tag)
+ "container_repository:{#{@container_repository.id}}:tag:#{tag.name}:created_at"
+ end
+ end
+ end
+end
diff --git a/app/services/projects/container_repository/cleanup_tags_service.rb b/app/services/projects/container_repository/cleanup_tags_service.rb
index 793d2fec033..3a60de0f1ee 100644
--- a/app/services/projects/container_repository/cleanup_tags_service.rb
+++ b/app/services/projects/container_repository/cleanup_tags_service.rb
@@ -2,116 +2,152 @@
module Projects
module ContainerRepository
- class CleanupTagsService < BaseService
- def execute(container_repository)
+ class CleanupTagsService
+ include BaseServiceUtility
+ include ::Gitlab::Utils::StrongMemoize
+
+ def initialize(container_repository, user = nil, params = {})
+ @container_repository = container_repository
+ @current_user = user
+ @params = params.dup
+
+ @project = container_repository.project
+ @tags = container_repository.tags
+ tags_size = @tags.size
+ @counts = {
+ original_size: tags_size,
+ cached_tags_count: 0
+ }
+ end
+
+ def execute
return error('access denied') unless can_destroy?
return error('invalid regex') unless valid_regex?
- tags = container_repository.tags
- original_size = tags.size
+ filter_out_latest
+ filter_by_name
- tags = without_latest(tags)
- tags = filter_by_name(tags)
+ truncate
+ populate_from_cache
- before_truncate_size = tags.size
- tags = truncate(tags)
- after_truncate_size = tags.size
+ filter_keep_n
+ filter_by_older_than
- tags = filter_keep_n(tags)
- tags = filter_by_older_than(tags)
-
- delete_tags(container_repository, tags).tap do |result|
- result[:original_size] = original_size
- result[:before_truncate_size] = before_truncate_size
- result[:after_truncate_size] = after_truncate_size
- result[:before_delete_size] = tags.size
+ delete_tags.merge(@counts).tap do |result|
+ result[:before_delete_size] = @tags.size
result[:deleted_size] = result[:deleted]&.size
- result[:status] = :error if before_truncate_size != after_truncate_size
+ result[:status] = :error if @counts[:before_truncate_size] != @counts[:after_truncate_size]
end
end
private
- def delete_tags(container_repository, tags)
- return success(deleted: []) unless tags.any?
-
- tag_names = tags.map(&:name)
+ def delete_tags
+ return success(deleted: []) unless @tags.any?
service = Projects::ContainerRepository::DeleteTagsService.new(
- container_repository.project,
- current_user,
- tags: tag_names,
- container_expiration_policy: params['container_expiration_policy']
+ @project,
+ @current_user,
+ tags: @tags.map(&:name),
+ container_expiration_policy: container_expiration_policy
)
- service.execute(container_repository)
+ service.execute(@container_repository)
end
- def without_latest(tags)
- tags.reject(&:latest?)
+ def filter_out_latest
+ @tags.reject!(&:latest?)
end
- def order_by_date(tags)
+ def order_by_date
now = DateTime.current
- tags.sort_by { |tag| tag.created_at || now }.reverse
+ @tags.sort_by! { |tag| tag.created_at || now }
+ .reverse!
end
- def filter_by_name(tags)
- regex_delete = ::Gitlab::UntrustedRegexp.new("\\A#{params['name_regex_delete'] || params['name_regex']}\\z")
- regex_retain = ::Gitlab::UntrustedRegexp.new("\\A#{params['name_regex_keep']}\\z")
+ def filter_by_name
+ regex_delete = ::Gitlab::UntrustedRegexp.new("\\A#{name_regex_delete || name_regex}\\z")
+ regex_retain = ::Gitlab::UntrustedRegexp.new("\\A#{name_regex_keep}\\z")
- tags.select do |tag|
+ @tags.select! do |tag|
# regex_retain will override any overlapping matches by regex_delete
regex_delete.match?(tag.name) && !regex_retain.match?(tag.name)
end
end
- def filter_keep_n(tags)
- return tags unless params['keep_n']
+ def filter_keep_n
+ return unless keep_n
- tags = order_by_date(tags)
- tags.drop(keep_n)
+ order_by_date
+ cache_tags(@tags.first(keep_n_as_integer))
+ @tags = @tags.drop(keep_n_as_integer)
end
- def filter_by_older_than(tags)
- return tags unless params['older_than']
+ def filter_by_older_than
+ return unless older_than
- older_than = ChronicDuration.parse(params['older_than']).seconds.ago
+ older_than_timestamp = older_than_in_seconds.ago
- tags.select do |tag|
- tag.created_at && tag.created_at < older_than
+ @tags, tags_to_keep = @tags.partition do |tag|
+ tag.created_at && tag.created_at < older_than_timestamp
end
+
+ cache_tags(tags_to_keep)
end
def can_destroy?
- return true if params['container_expiration_policy']
+ return true if container_expiration_policy
- can?(current_user, :destroy_container_image, project)
+ can?(@current_user, :destroy_container_image, @project)
end
def valid_regex?
%w(name_regex_delete name_regex name_regex_keep).each do |param_name|
- regex = params[param_name]
+ regex = @params[param_name]
::Gitlab::UntrustedRegexp.new(regex) unless regex.blank?
end
true
rescue RegexpError => e
- ::Gitlab::ErrorTracking.log_exception(e, project_id: project.id)
+ ::Gitlab::ErrorTracking.log_exception(e, project_id: @project.id)
false
end
- def truncate(tags)
- return tags unless throttling_enabled?
- return tags if max_list_size == 0
+ def truncate
+ @counts[:before_truncate_size] = @tags.size
+ @counts[:after_truncate_size] = @tags.size
+
+ return unless throttling_enabled?
+ return if max_list_size == 0
# truncate the list to make sure that after the #filter_keep_n
# execution, the resulting list will be max_list_size
- truncated_size = max_list_size + keep_n
+ truncated_size = max_list_size + keep_n_as_integer
- return tags if tags.size <= truncated_size
+ return if @tags.size <= truncated_size
+
+ @tags = @tags.sample(truncated_size)
+ @counts[:after_truncate_size] = @tags.size
+ end
+
+ def populate_from_cache
+ @counts[:cached_tags_count] = cache.populate(@tags) if caching_enabled?
+ end
+
+ def cache_tags(tags)
+ cache.insert(tags, older_than_in_seconds) if caching_enabled?
+ end
+
+ def cache
+ strong_memoize(:cache) do
+ ::Projects::ContainerRepository::CacheTagsCreatedAtService.new(@container_repository)
+ end
+ end
- tags.sample(truncated_size)
+ def caching_enabled?
+ container_expiration_policy &&
+ older_than.present? &&
+ Feature.enabled?(:container_registry_expiration_policies_caching, @project)
end
def throttling_enabled?
@@ -123,7 +159,37 @@ module Projects
end
def keep_n
- params['keep_n'].to_i
+ @params['keep_n']
+ end
+
+ def keep_n_as_integer
+ keep_n.to_i
+ end
+
+ def older_than_in_seconds
+ strong_memoize(:older_than_in_seconds) do
+ ChronicDuration.parse(older_than).seconds
+ end
+ end
+
+ def older_than
+ @params['older_than']
+ end
+
+ def name_regex_delete
+ @params['name_regex_delete']
+ end
+
+ def name_regex
+ @params['name_regex']
+ end
+
+ def name_regex_keep
+ @params['name_regex_keep']
+ end
+
+ def container_expiration_policy
+ @params['container_expiration_policy']
end
end
end
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index e717491b19d..1536f0a22b8 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -8,6 +8,7 @@ module Projects
@current_user = user
@params = params.dup
@skip_wiki = @params.delete(:skip_wiki)
+ @initialize_with_sast = Gitlab::Utils.to_boolean(@params.delete(:initialize_with_sast))
@initialize_with_readme = Gitlab::Utils.to_boolean(@params.delete(:initialize_with_readme))
@import_data = @params.delete(:import_data)
@relations_block = @params.delete(:relations_block)
@@ -118,6 +119,7 @@ module Projects
Projects::PostCreationWorker.perform_async(@project.id)
create_readme if @initialize_with_readme
+ create_sast_commit if @initialize_with_sast
end
# Add an authorization for the current user authorizations inline
@@ -160,6 +162,10 @@ module Projects
Files::CreateService.new(@project, current_user, commit_attrs).execute
end
+ def create_sast_commit
+ ::Security::CiConfiguration::SastCreateService.new(@project, current_user, {}, commit_on_default: true).execute
+ end
+
def readme_content
@readme_template.presence || experiment(:new_project_readme_content, namespace: @project.namespace).run_with(@project)
end
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index afa8de04fca..27f813f4661 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -5,6 +5,7 @@ module Projects
include Gitlab::ShellAdapter
DestroyError = Class.new(StandardError)
+ BATCH_SIZE = 100
def async_execute
project.update_attribute(:pending_delete, true)
@@ -119,6 +120,12 @@ module Projects
destroy_web_hooks!
destroy_project_bots!
+ if ::Feature.enabled?(:ci_optimize_project_records_destruction, project, default_enabled: :yaml) &&
+ Feature.enabled?(:abort_deleted_project_pipelines, default_enabled: :yaml)
+
+ destroy_ci_records!
+ end
+
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
# This ensures we delete records in batches.
@@ -133,6 +140,23 @@ module Projects
log_info("Attempting to destroy #{project.full_path} (#{project.id})")
end
+ def destroy_ci_records!
+ project.all_pipelines.find_each(batch_size: BATCH_SIZE) do |pipeline| # rubocop: disable CodeReuse/ActiveRecord
+ # Destroy artifacts, then builds, then pipelines
+ # All builds have already been dropped by Ci::AbortPipelinesService,
+ # so no Ci::Build-instantiating cancellations happen here.
+ # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71342#note_691523196
+
+ ::Ci::DestroyPipelineService.new(project, current_user).execute(pipeline)
+ end
+
+ deleted_count = project.commit_statuses.delete_all
+
+ if deleted_count > 0
+ Gitlab::AppLogger.info "Projects::DestroyService - Project #{project.id} - #{deleted_count} leftover commit statuses"
+ end
+ end
+
# The project can have multiple webhooks with hundreds of thousands of web_hook_logs.
# By default, they are removed with "DELETE CASCADE" option defined via foreign_key.
# But such queries can exceed the statement_timeout limit and fail to delete the project.
diff --git a/app/services/projects/group_links/update_service.rb b/app/services/projects/group_links/update_service.rb
index 475ab17f1a1..a836b96cac3 100644
--- a/app/services/projects/group_links/update_service.rb
+++ b/app/services/projects/group_links/update_service.rb
@@ -20,19 +20,15 @@ module Projects
attr_reader :group_link
def refresh_authorizations
- if Feature.enabled?(:specialized_worker_for_project_share_update_auth_recalculation)
- AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async(project.id)
-
- # Until we compare the inconsistency rates of the new specialized worker and
- # the old approach, we still run AuthorizedProjectsWorker
- # but with some delay and lower urgency as a safety net.
- group_link.group.refresh_members_authorized_projects(
- blocking: false,
- priority: UserProjectAccessChangedService::LOW_PRIORITY
- )
- else
- group_link.group.refresh_members_authorized_projects
- end
+ AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async(project.id)
+
+ # Until we compare the inconsistency rates of the new specialized worker and
+ # the old approach, we still run AuthorizedProjectsWorker
+ # but with some delay and lower urgency as a safety net.
+ group_link.group.refresh_members_authorized_projects(
+ blocking: false,
+ priority: UserProjectAccessChangedService::LOW_PRIORITY
+ )
end
def requires_authorization_refresh?(params)
diff --git a/app/services/projects/import_service.rb b/app/services/projects/import_service.rb
index b5288aad6f0..4979af6dfe1 100644
--- a/app/services/projects/import_service.rb
+++ b/app/services/projects/import_service.rb
@@ -16,6 +16,8 @@ module Projects
end
def execute
+ track_start_import
+
add_repository_to_project
download_lfs_objects
@@ -25,16 +27,17 @@ module Projects
after_execute_hook
success
- rescue Gitlab::UrlBlocker::BlockedUrlError => e
- Gitlab::ErrorTracking.track_exception(e, project_path: project.full_path, importer: project.import_type)
+ rescue Gitlab::UrlBlocker::BlockedUrlError, StandardError => e
+ Gitlab::Import::ImportFailureService.track(
+ project_id: project.id,
+ error_source: self.class.name,
+ exception: e,
+ metrics: true
+ )
- error(s_("ImportProjects|Error importing repository %{project_safe_import_url} into %{project_full_path} - %{message}") % { project_safe_import_url: project.safe_import_url, project_full_path: project.full_path, message: e.message })
- rescue StandardError => e
message = Projects::ImportErrorFilter.filter_message(e.message)
-
- Gitlab::ErrorTracking.track_exception(e, project_path: project.full_path, importer: project.import_type)
-
- error(s_("ImportProjects|Error importing repository %{project_safe_import_url} into %{project_full_path} - %{message}") % { project_safe_import_url: project.safe_import_url, project_full_path: project.full_path, message: message })
+ error(s_("ImportProjects|Error importing repository %{project_safe_import_url} into %{project_full_path} - %{message}") %
+ { project_safe_import_url: project.safe_import_url, project_full_path: project.full_path, message: message })
end
protected
@@ -54,6 +57,10 @@ module Projects
# Defined in EE::Projects::ImportService
end
+ def track_start_import
+ has_importer? && importer_class.try(:track_start_import, project)
+ end
+
def add_repository_to_project
if project.external_import? && !unknown_url?
begin
diff --git a/app/services/projects/overwrite_project_service.rb b/app/services/projects/overwrite_project_service.rb
index f35370c427f..2612001eb95 100644
--- a/app/services/projects/overwrite_project_service.rb
+++ b/app/services/projects/overwrite_project_service.rb
@@ -3,7 +3,7 @@
module Projects
class OverwriteProjectService < BaseService
def execute(source_project)
- return unless source_project && source_project.namespace == @project.namespace
+ return unless source_project && source_project.namespace_id == @project.namespace_id
start_time = ::Gitlab::Metrics::System.monotonic_time
@@ -40,7 +40,7 @@ module Projects
duration = ::Gitlab::Metrics::System.monotonic_time - start_time
Gitlab::AppJsonLogger.info(class: self.class.name,
- namespace_id: source_project.namespace.id,
+ namespace_id: source_project.namespace_id,
project_id: source_project.id,
duration_s: duration.to_f,
error: exception.class.name)
diff --git a/app/services/projects/participants_service.rb b/app/services/projects/participants_service.rb
index 228115d72b8..1616a8a4062 100644
--- a/app/services/projects/participants_service.rb
+++ b/app/services/projects/participants_service.rb
@@ -36,14 +36,17 @@ module Projects
private
def project_members_through_invited_groups
- groups_with_ancestors_ids = Gitlab::ObjectHierarchy
- .new(visible_groups)
- .base_and_ancestors
- .pluck_primary_key
+ groups_with_ancestors = if ::Feature.enabled?(:linear_participants_service_ancestor_scopes, current_user, default_enabled: :yaml)
+ visible_groups.self_and_ancestors
+ else
+ Gitlab::ObjectHierarchy
+ .new(visible_groups)
+ .base_and_ancestors
+ end
GroupMember
.active_without_invites_and_requests
- .with_source_id(groups_with_ancestors_ids)
+ .with_source_id(groups_with_ancestors.pluck_primary_key)
end
def visible_groups
diff --git a/app/services/projects/transfer_service.rb b/app/services/projects/transfer_service.rb
index 27376173f07..a69e6488ebc 100644
--- a/app/services/projects/transfer_service.rb
+++ b/app/services/projects/transfer_service.rb
@@ -81,7 +81,7 @@ module Projects
# Apply changes to the project
update_namespace_and_visibility(@new_namespace)
- update_shared_runners_settings
+ project.reconcile_shared_runners_setting!
project.save!
# Notifications
@@ -104,6 +104,8 @@ module Projects
update_repository_configuration(@new_path)
execute_system_hooks
+
+ update_pending_builds!
end
post_update_hooks(project)
@@ -154,19 +156,15 @@ module Projects
user_ids = @old_namespace.user_ids_for_project_authorizations |
@new_namespace.user_ids_for_project_authorizations
- if Feature.enabled?(:specialized_worker_for_project_transfer_auth_recalculation)
- AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async(project.id)
-
- # Until we compare the inconsistency rates of the new specialized worker and
- # the old approach, we still run AuthorizedProjectsWorker
- # but with some delay and lower urgency as a safety net.
- UserProjectAccessChangedService.new(user_ids).execute(
- blocking: false,
- priority: UserProjectAccessChangedService::LOW_PRIORITY
- )
- else
- UserProjectAccessChangedService.new(user_ids).execute
- end
+ AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async(project.id)
+
+ # Until we compare the inconsistency rates of the new specialized worker and
+ # the old approach, we still run AuthorizedProjectsWorker
+ # but with some delay and lower urgency as a safety net.
+ UserProjectAccessChangedService.new(user_ids).execute(
+ blocking: false,
+ priority: UserProjectAccessChangedService::LOW_PRIORITY
+ )
end
def rollback_side_effects
@@ -189,7 +187,7 @@ module Projects
end
def execute_system_hooks
- SystemHooksService.new.execute_hooks_for(project, :transfer)
+ system_hook_service.execute_hooks_for(project, :transfer)
end
def move_project_folders(project)
@@ -241,18 +239,19 @@ module Projects
"#{new_path}#{::Gitlab::GlRepository::DESIGN.path_suffix}"
end
- def update_shared_runners_settings
- # If a project is being transferred to another group it means it can already
- # have shared runners enabled but we need to check whether the new group allows that.
- if project.group && project.group.shared_runners_setting == 'disabled_and_unoverridable'
- project.shared_runners_enabled = false
- end
- end
-
def update_integrations
project.integrations.with_default_settings.delete_all
Integration.create_from_active_default_integrations(project, :project_id)
end
+
+ def update_pending_builds!
+ update_params = {
+ namespace_id: new_namespace.id,
+ namespace_traversal_ids: new_namespace.traversal_ids
+ }
+
+ ::Ci::UpdatePendingBuildService.new(project, update_params).execute
+ end
end
end
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index dc75fe1014a..0000e713cb4 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -136,13 +136,11 @@ module Projects
def validate_outdated_sha!
return if latest?
- if Feature.enabled?(:pages_smart_check_outdated_sha, project, default_enabled: :yaml)
- # use pipeline_id in case the build is retried
- last_deployed_pipeline_id = project.pages_metadatum&.pages_deployment&.ci_build&.pipeline_id
+ # use pipeline_id in case the build is retried
+ last_deployed_pipeline_id = project.pages_metadatum&.pages_deployment&.ci_build&.pipeline_id
- return unless last_deployed_pipeline_id
- return if last_deployed_pipeline_id <= build.pipeline_id
- end
+ return unless last_deployed_pipeline_id
+ return if last_deployed_pipeline_id <= build.pipeline_id
raise InvalidStateError, 'build SHA is outdated for this ref'
end
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index b87564fcaef..a32e80af4b1 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -105,7 +105,7 @@ module Projects
end
update_pages_config if changing_pages_related_config?
- update_pending_builds if shared_runners_toggled?
+ update_pending_builds if runners_settings_toggled?
end
def after_rename_service(project)
@@ -181,13 +181,36 @@ module Projects
end
def update_pending_builds
- update_params = { instance_runners_enabled: project.shared_runners_enabled }
+ update_params = {
+ instance_runners_enabled: project.shared_runners_enabled?,
+ namespace_traversal_ids: group_runner_traversal_ids
+ }
- ::Ci::UpdatePendingBuildService.new(project, update_params).execute
+ ::Ci::UpdatePendingBuildService
+ .new(project, update_params)
+ .execute
end
- def shared_runners_toggled?
- project.previous_changes.include?('shared_runners_enabled')
+ def shared_runners_settings_toggled?
+ project.previous_changes.include?(:shared_runners_enabled)
+ end
+
+ def group_runners_settings_toggled?
+ return false unless project.ci_cd_settings.present?
+
+ project.ci_cd_settings.previous_changes.include?(:group_runners_enabled)
+ end
+
+ def runners_settings_toggled?
+ shared_runners_settings_toggled? || group_runners_settings_toggled?
+ end
+
+ def group_runner_traversal_ids
+ if project.group_runners_enabled?
+ project.namespace.traversal_ids
+ else
+ []
+ end
end
end
end
diff --git a/app/services/search/global_service.rb b/app/services/search/global_service.rb
index 33faf2d6698..cee59360b4b 100644
--- a/app/services/search/global_service.rb
+++ b/app/services/search/global_service.rb
@@ -24,7 +24,7 @@ module Search
# rubocop: disable CodeReuse/ActiveRecord
def projects
- @projects ||= ProjectsFinder.new(params: { non_archived: true }, current_user: current_user).execute.preload(:topics, :taggings)
+ @projects ||= ProjectsFinder.new(params: { non_archived: true }, current_user: current_user).execute.preload(:topics, :project_topics)
end
def allowed_scopes
diff --git a/app/services/security/ci_configuration/base_create_service.rb b/app/services/security/ci_configuration/base_create_service.rb
index adb45244adb..ea77cd98ba3 100644
--- a/app/services/security/ci_configuration/base_create_service.rb
+++ b/app/services/security/ci_configuration/base_create_service.rb
@@ -25,7 +25,7 @@ module Security
rescue Gitlab::Git::PreReceiveError => e
ServiceResponse.error(message: e.message)
rescue StandardError
- project.repository.rm_branch(current_user, branch_name) if project.repository.branch_exists?(branch_name)
+ remove_branch_on_exception
raise
end
@@ -50,6 +50,10 @@ module Security
Gitlab::Routing.url_helpers.project_new_merge_request_url(project, merge_request: merge_request_params)
end
+ def remove_branch_on_exception
+ project.repository.rm_branch(current_user, branch_name) if project.repository.branch_exists?(branch_name)
+ end
+
def track_event(attributes_for_commit)
action = attributes_for_commit[:actions].first
diff --git a/app/services/security/ci_configuration/sast_create_service.rb b/app/services/security/ci_configuration/sast_create_service.rb
index f495cac18f8..47e01847b17 100644
--- a/app/services/security/ci_configuration/sast_create_service.rb
+++ b/app/services/security/ci_configuration/sast_create_service.rb
@@ -5,15 +5,28 @@ module Security
class SastCreateService < ::Security::CiConfiguration::BaseCreateService
attr_reader :params
- def initialize(project, current_user, params)
+ def initialize(project, current_user, params, commit_on_default: false)
super(project, current_user)
@params = params
+
+ @commit_on_default = commit_on_default
+ @branch_name = project.default_branch if @commit_on_default
end
private
+ def remove_branch_on_exception
+ super unless @commit_on_default
+ end
+
def action
- Security::CiConfiguration::SastBuildAction.new(project.auto_devops_enabled?, params, existing_gitlab_ci_content).generate
+ existing_content = begin
+ existing_gitlab_ci_content # this can fail on the very first commit
+ rescue StandardError
+ nil
+ end
+
+ Security::CiConfiguration::SastBuildAction.new(project.auto_devops_enabled?, params, existing_content).generate
end
def next_branch
diff --git a/app/services/service_ping/submit_service.rb b/app/services/service_ping/submit_service.rb
index 3417ce4f583..63e01603d47 100644
--- a/app/services/service_ping/submit_service.rb
+++ b/app/services/service_ping/submit_service.rb
@@ -78,7 +78,7 @@ module ServicePing
def store_metrics(response)
metrics = response['conv_index'] || response['dev_ops_score'] # leaving dev_ops_score here, as the response data comes from the gitlab-version-com
- return unless metrics.present?
+ return unless metrics.except('usage_data_id').present?
DevOpsReport::Metric.create!(
metrics.slice(*METRICS)
diff --git a/app/services/terraform/remote_state_handler.rb b/app/services/terraform/remote_state_handler.rb
index e9a13cee764..f13477b8b34 100644
--- a/app/services/terraform/remote_state_handler.rb
+++ b/app/services/terraform/remote_state_handler.rb
@@ -2,8 +2,6 @@
module Terraform
class RemoteStateHandler < BaseService
- include Gitlab::OptimisticLocking
-
StateLockedError = Class.new(StandardError)
UnauthorizedError = Class.new(StandardError)
@@ -60,7 +58,7 @@ module Terraform
private
def retrieve_with_lock(find_only: false)
- create_or_find!(find_only: find_only).tap { |state| retry_optimistic_lock(state, name: 'terraform_remote_state_handler_retrieve') { |state| yield state } }
+ create_or_find!(find_only: find_only).tap { |state| state.with_lock { yield state } }
end
def create_or_find!(find_only:)
diff --git a/app/services/user_project_access_changed_service.rb b/app/services/user_project_access_changed_service.rb
index 5f48f410bf7..5bba986f4ad 100644
--- a/app/services/user_project_access_changed_service.rb
+++ b/app/services/user_project_access_changed_service.rb
@@ -30,7 +30,7 @@ class UserProjectAccessChangedService
end
end
- ::Gitlab::Database::LoadBalancing::Sticking.bulk_stick(:user, @user_ids)
+ ::User.sticking.bulk_stick(:user, @user_ids)
result
end
diff --git a/app/services/users/update_service.rb b/app/services/users/update_service.rb
index 23c67231a29..c3df9b153a1 100644
--- a/app/services/users/update_service.rb
+++ b/app/services/users/update_service.rb
@@ -5,15 +5,18 @@ module Users
include NewUserNotifier
attr_reader :user, :identity_params
+ ATTRS_REQUIRING_PASSWORD_CHECK = %w[email].freeze
+
def initialize(current_user, params = {})
@current_user = current_user
+ @validation_password = params.delete(:validation_password)
@user = params.delete(:user)
@status_params = params.delete(:status)
@identity_params = params.slice(*identity_attributes)
@params = params.dup
end
- def execute(validate: true, &block)
+ def execute(validate: true, check_password: false, &block)
yield(@user) if block_given?
user_exists = @user.persisted?
@@ -21,6 +24,11 @@ module Users
discard_read_only_attributes
assign_attributes
+
+ if check_password && require_password_check? && !@user.valid_password?(@validation_password)
+ return error(s_("Profiles|Invalid password"))
+ end
+
assign_identity
build_canonical_email
@@ -32,8 +40,8 @@ module Users
end
end
- def execute!(*args, &block)
- result = execute(*args, &block)
+ def execute!(*args, **kargs, &block)
+ result = execute(*args, **kargs, &block)
raise ActiveRecord::RecordInvalid, @user unless result[:status] == :success
@@ -42,6 +50,14 @@ module Users
private
+ def require_password_check?
+ return false unless @user.persisted?
+ return false if @user.password_automatically_set?
+
+ changes = @user.changed
+ ATTRS_REQUIRING_PASSWORD_CHECK.any? { |param| changes.include?(param) }
+ end
+
def build_canonical_email
return unless @user.email_changed?
diff --git a/app/services/users/upsert_credit_card_validation_service.rb b/app/services/users/upsert_credit_card_validation_service.rb
index 70a96b3ec6b..86b5b923418 100644
--- a/app/services/users/upsert_credit_card_validation_service.rb
+++ b/app/services/users/upsert_credit_card_validation_service.rb
@@ -7,6 +7,14 @@ module Users
end
def execute
+ @params = {
+ user_id: params.fetch(:user_id),
+ credit_card_validated_at: params.fetch(:credit_card_validated_at),
+ expiration_date: get_expiration_date(params),
+ last_digits: Integer(params.fetch(:credit_card_mask_number), 10),
+ holder_name: params.fetch(:credit_card_holder_name)
+ }
+
::Users::CreditCardValidation.upsert(@params)
ServiceResponse.success(message: 'CreditCardValidation was set')
@@ -16,5 +24,14 @@ module Users
Gitlab::ErrorTracking.track_exception(e, params: @params, class: self.class.to_s)
ServiceResponse.error(message: "Could not set CreditCardValidation: #{e.message}")
end
+
+ private
+
+ def get_expiration_date(params)
+ year = params.fetch(:credit_card_expiration_year)
+ month = params.fetch(:credit_card_expiration_month)
+
+ Date.new(year, month, -1) # last day of the month
+ end
end
end