summaryrefslogtreecommitdiff
path: root/app/services
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-06-16 18:25:58 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2021-06-16 18:25:58 +0000
commita5f4bba440d7f9ea47046a0a561d49adf0a1e6d4 (patch)
treefb69158581673816a8cd895f9d352dcb3c678b1e /app/services
parentd16b2e8639e99961de6ddc93909f3bb5c1445ba1 (diff)
downloadgitlab-ce-a5f4bba440d7f9ea47046a0a561d49adf0a1e6d4.tar.gz
Add latest changes from gitlab-org/gitlab@14-0-stable-eev14.0.0-rc42
Diffstat (limited to 'app/services')
-rw-r--r--app/services/authorized_project_update/periodic_recalculate_service.rb7
-rw-r--r--app/services/authorized_project_update/project_group_link_create_service.rb2
-rw-r--r--app/services/authorized_project_update/project_recalculate_service.rb80
-rw-r--r--app/services/authorized_project_update/recalculate_for_user_range_service.rb20
-rw-r--r--app/services/base_container_service.rb8
-rw-r--r--app/services/base_count_service.rb4
-rw-r--r--app/services/boards/base_item_move_service.rb9
-rw-r--r--app/services/boards/base_items_list_service.rb12
-rw-r--r--app/services/boards/issues/list_service.rb4
-rw-r--r--app/services/branches/create_service.rb8
-rw-r--r--app/services/bulk_imports/file_decompression_service.rb58
-rw-r--r--app/services/bulk_imports/file_download_service.rb102
-rw-r--r--app/services/bulk_imports/relation_export_service.rb4
-rw-r--r--app/services/ci/create_downstream_pipeline_service.rb12
-rw-r--r--app/services/ci/create_pipeline_service.rb1
-rw-r--r--app/services/ci/job_artifacts/create_service.rb5
-rw-r--r--app/services/ci/pipeline_creation/start_pipeline_service.rb19
-rw-r--r--app/services/ci/pipeline_schedules/calculate_next_run_service.rb59
-rw-r--r--app/services/ci/play_bridge_service.rb10
-rw-r--r--app/services/ci/play_build_service.rb18
-rw-r--r--app/services/ci/register_job_service.rb64
-rw-r--r--app/services/ci/retry_build_service.rb13
-rw-r--r--app/services/ci/update_build_queue_service.rb100
-rw-r--r--app/services/ci/update_build_state_service.rb22
-rw-r--r--app/services/clusters/applications/base_service.rb18
-rw-r--r--app/services/clusters/applications/schedule_update_service.rb7
-rw-r--r--app/services/clusters/cleanup/app_service.rb33
-rw-r--r--app/services/clusters/cleanup/project_namespace_service.rb2
-rw-r--r--app/services/clusters/cleanup/service_account_service.rb2
-rw-r--r--app/services/clusters/gcp/finalize_creation_service.rb12
-rw-r--r--app/services/clusters/parse_cluster_applications_artifact_service.rb94
-rw-r--r--app/services/commits/change_service.rb5
-rw-r--r--app/services/container_expiration_policies/cleanup_service.rb1
-rw-r--r--app/services/deployments/update_environment_service.rb16
-rw-r--r--app/services/design_management/copy_design_collection/copy_service.rb10
-rw-r--r--app/services/design_management/design_service.rb2
-rw-r--r--app/services/discussions/resolve_service.rb7
-rw-r--r--app/services/feature_flags/disable_service.rb46
-rw-r--r--app/services/feature_flags/enable_service.rb93
-rw-r--r--app/services/groups/create_service.rb2
-rw-r--r--app/services/groups/group_links/create_service.rb34
-rw-r--r--app/services/groups/participants_service.rb4
-rw-r--r--app/services/groups/transfer_service.rb3
-rw-r--r--app/services/import_export_clean_up_service.rb36
-rw-r--r--app/services/issuable_base_service.rb25
-rw-r--r--app/services/issue_rebalancing_service.rb11
-rw-r--r--app/services/issues/base_service.rb34
-rw-r--r--app/services/issues/close_service.rb13
-rw-r--r--app/services/issues/create_service.rb31
-rw-r--r--app/services/issues/update_service.rb22
-rw-r--r--app/services/issues/zoom_link_service.rb4
-rw-r--r--app/services/jira_import/users_importer.rb15
-rw-r--r--app/services/mattermost/create_team_service.rb4
-rw-r--r--app/services/members/create_service.rb22
-rw-r--r--app/services/merge_requests/base_service.rb33
-rw-r--r--app/services/merge_requests/handle_assignees_change_service.rb2
-rw-r--r--app/services/merge_requests/refresh_service.rb3
-rw-r--r--app/services/merge_requests/update_assignees_service.rb11
-rw-r--r--app/services/merge_requests/update_service.rb25
-rw-r--r--app/services/namespace_settings/update_service.rb10
-rw-r--r--app/services/namespaces/in_product_marketing_emails_service.rb45
-rw-r--r--app/services/notification_recipients/builder/base.rb45
-rw-r--r--app/services/packages/debian/create_distribution_service.rb21
-rw-r--r--app/services/packages/debian/destroy_distribution_service.rb33
-rw-r--r--app/services/packages/debian/generate_distribution_service.rb53
-rw-r--r--app/services/packages/debian/process_changes_service.rb2
-rw-r--r--app/services/packages/debian/update_distribution_service.rb10
-rw-r--r--app/services/packages/helm/extract_file_metadata_service.rb53
-rw-r--r--app/services/packages/nuget/metadata_extraction_service.rb27
-rw-r--r--app/services/pages/delete_service.rb9
-rw-r--r--app/services/projects/create_service.rb37
-rw-r--r--app/services/projects/destroy_service.rb2
-rw-r--r--app/services/projects/group_links/create_service.rb26
-rw-r--r--app/services/projects/group_links/destroy_service.rb20
-rw-r--r--app/services/projects/prometheus/alerts/notify_service.rb4
-rw-r--r--app/services/projects/update_service.rb9
-rw-r--r--app/services/prometheus/create_default_alerts_service.rb7
-rw-r--r--app/services/resource_access_tokens/create_service.rb2
-rw-r--r--app/services/search/project_service.rb4
-rw-r--r--app/services/search_service.rb4
-rw-r--r--app/services/security/ci_configuration/sast_parser_service.rb20
-rw-r--r--app/services/snippets/base_service.rb6
-rw-r--r--app/services/snippets/create_service.rb2
-rw-r--r--app/services/snippets/update_service.rb2
-rw-r--r--app/services/todo_service.rb2
-rw-r--r--app/services/user_project_access_changed_service.rb23
-rw-r--r--app/services/users/activity_service.rb4
-rw-r--r--app/services/users/authorized_build_service.rb18
-rw-r--r--app/services/users/authorized_create_service.rb14
-rw-r--r--app/services/users/build_service.rb187
-rw-r--r--app/services/users/create_service.rb9
-rw-r--r--app/services/users/refresh_authorized_projects_service.rb9
-rw-r--r--app/services/users/registrations_build_service.rb7
-rw-r--r--app/services/users/update_assigned_open_issue_count_service.rb33
-rw-r--r--app/services/web_hook_service.rb51
-rw-r--r--app/services/web_hooks/log_execution_service.rb35
96 files changed, 1292 insertions, 876 deletions
diff --git a/app/services/authorized_project_update/periodic_recalculate_service.rb b/app/services/authorized_project_update/periodic_recalculate_service.rb
index 662d10c648b..16dc76eb4cf 100644
--- a/app/services/authorized_project_update/periodic_recalculate_service.rb
+++ b/app/services/authorized_project_update/periodic_recalculate_service.rb
@@ -9,7 +9,12 @@ module AuthorizedProjectUpdate
# Using this approach (instead of eg. User.each_batch) keeps the arguments
# the same for AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker
# even if the user list changes, so we can deduplicate these jobs.
- (1..User.maximum(:id)).each_slice(BATCH_SIZE).with_index do |batch, index|
+
+ # Since UserRefreshOverUserRangeWorker has set data_consistency to delayed,
+ # a job enqueued without a delay could fail because the replica could not catch up with the primary.
+ # To prevent this, we start the index from `1` instead of `0` so as to ensure that
+ # no UserRefreshOverUserRangeWorker job is enqueued without a delay.
+ (1..User.maximum(:id)).each_slice(BATCH_SIZE).with_index(1) do |batch, index|
delay = DELAY_INTERVAL * index
AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker.perform_in(delay, *batch.minmax)
end
diff --git a/app/services/authorized_project_update/project_group_link_create_service.rb b/app/services/authorized_project_update/project_group_link_create_service.rb
index 090b22a7820..e9e7c56d7c7 100644
--- a/app/services/authorized_project_update/project_group_link_create_service.rb
+++ b/app/services/authorized_project_update/project_group_link_create_service.rb
@@ -49,7 +49,7 @@ module AuthorizedProjectUpdate
def access_level(membership_access_level)
return membership_access_level unless group_access
- # access level must not be higher than the max access level set when
+ # access level (role) must not be higher than the max access level (role) set when
# creating the project share
[membership_access_level, group_access].min
end
diff --git a/app/services/authorized_project_update/project_recalculate_service.rb b/app/services/authorized_project_update/project_recalculate_service.rb
new file mode 100644
index 00000000000..cbb8efaf99f
--- /dev/null
+++ b/app/services/authorized_project_update/project_recalculate_service.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+module AuthorizedProjectUpdate
+ class ProjectRecalculateService
+ # Service for refreshing all the authorizations to a particular project.
+ include Gitlab::Utils::StrongMemoize
+ BATCH_SIZE = 1000
+
+ def initialize(project)
+ @project = project
+ end
+
+ def execute
+ refresh_authorizations if needs_refresh?
+ ServiceResponse.success
+ end
+
+ private
+
+ attr_reader :project
+
+ def needs_refresh?
+ user_ids_to_remove.any? ||
+ authorizations_to_create.any?
+ end
+
+ def current_authorizations
+ strong_memoize(:current_authorizations) do
+ project.project_authorizations
+ .pluck(:user_id, :access_level) # rubocop: disable CodeReuse/ActiveRecord
+ end
+ end
+
+ def fresh_authorizations
+ strong_memoize(:fresh_authorizations) do
+ result = []
+
+ Projects::Members::EffectiveAccessLevelFinder.new(project)
+ .execute
+ .each_batch(of: BATCH_SIZE, column: :user_id) do |member_batch|
+ result += member_batch.pluck(:user_id, 'MAX(access_level)') # rubocop: disable CodeReuse/ActiveRecord
+ end
+
+ result
+ end
+ end
+
+ def user_ids_to_remove
+ strong_memoize(:user_ids_to_remove) do
+ (current_authorizations - fresh_authorizations)
+ .map {|user_id, _| user_id }
+ end
+ end
+
+ def authorizations_to_create
+ strong_memoize(:authorizations_to_create) do
+ (fresh_authorizations - current_authorizations).map do |user_id, access_level|
+ {
+ user_id: user_id,
+ access_level: access_level,
+ project_id: project.id
+ }
+ end
+ end
+ end
+
+ def refresh_authorizations
+ ProjectAuthorization.transaction do
+ if user_ids_to_remove.any?
+ ProjectAuthorization.where(project_id: project.id, user_id: user_ids_to_remove) # rubocop: disable CodeReuse/ActiveRecord
+ .delete_all
+ end
+
+ if authorizations_to_create.any?
+ ProjectAuthorization.insert_all(authorizations_to_create)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/authorized_project_update/recalculate_for_user_range_service.rb b/app/services/authorized_project_update/recalculate_for_user_range_service.rb
deleted file mode 100644
index f300c45f019..00000000000
--- a/app/services/authorized_project_update/recalculate_for_user_range_service.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-module AuthorizedProjectUpdate
- class RecalculateForUserRangeService
- def initialize(start_user_id, end_user_id)
- @start_user_id = start_user_id
- @end_user_id = end_user_id
- end
-
- def execute
- User.where(id: start_user_id..end_user_id).select(:id).find_each do |user| # rubocop: disable CodeReuse/ActiveRecord
- Users::RefreshAuthorizedProjectsService.new(user, source: self.class.name).execute
- end
- end
-
- private
-
- attr_reader :start_user_id, :end_user_id
- end
-end
diff --git a/app/services/base_container_service.rb b/app/services/base_container_service.rb
index ee15763ce65..8492b3ce92c 100644
--- a/app/services/base_container_service.rb
+++ b/app/services/base_container_service.rb
@@ -18,4 +18,12 @@ class BaseContainerService
@current_user = current_user
@params = params.dup
end
+
+ def project_container?
+ container.is_a?(::Project)
+ end
+
+ def group_container?
+ container.is_a?(::Group)
+ end
end
diff --git a/app/services/base_count_service.rb b/app/services/base_count_service.rb
index c316c488148..ff1949ce4dd 100644
--- a/app/services/base_count_service.rb
+++ b/app/services/base_count_service.rb
@@ -27,7 +27,7 @@ class BaseCountService
end
def delete_cache
- Rails.cache.delete(cache_key)
+ ::Gitlab::Cache.delete(cache_key)
end
def raw?
@@ -49,4 +49,4 @@ class BaseCountService
end
end
-BaseCountService.prepend_mod_with('BaseCountService')
+BaseCountService.prepend_mod
diff --git a/app/services/boards/base_item_move_service.rb b/app/services/boards/base_item_move_service.rb
index 28fb1e43043..dfd0002cbc9 100644
--- a/app/services/boards/base_item_move_service.rb
+++ b/app/services/boards/base_item_move_service.rb
@@ -23,14 +23,15 @@ module Boards
end
reposition_ids = move_between_ids(params)
- if reposition_ids
- attrs[:move_between_ids] = reposition_ids
- attrs.merge!(reposition_parent)
- end
+ attrs.merge!(reposition_params(reposition_ids)) if reposition_ids
attrs
end
+ def reposition_params(reposition_ids)
+ reposition_parent.merge(move_between_ids: reposition_ids)
+ end
+
def move_single_issuable(issuable, issuable_modification_params)
ability_name = :"admin_#{issuable.to_ability_name}"
return unless can?(current_user, ability_name, issuable)
diff --git a/app/services/boards/base_items_list_service.rb b/app/services/boards/base_items_list_service.rb
index cbc7a332cbe..a3e24844587 100644
--- a/app/services/boards/base_items_list_service.rb
+++ b/app/services/boards/base_items_list_service.rb
@@ -6,9 +6,9 @@ module Boards
include ActiveRecord::ConnectionAdapters::Quoting
def execute
- return items.order_closed_date_desc if list&.closed?
+ items = init_collection
- ordered_items
+ order(items)
end
# rubocop: disable CodeReuse/ActiveRecord
@@ -17,7 +17,7 @@ module Boards
keys = metadata_fields.keys
# TODO: eliminate need for SQL literal fragment
columns = Arel.sql(metadata_fields.values_at(*keys).join(', '))
- results = item_model.where(id: items.select(issuables[:id])).pluck(columns)
+ results = item_model.where(id: init_collection.select(issuables[:id])).pluck(columns)
Hash[keys.zip(results.flatten)]
end
@@ -29,7 +29,7 @@ module Boards
{ size: 'COUNT(*)' }
end
- def ordered_items
+ def order(items)
raise NotImplementedError
end
@@ -47,8 +47,8 @@ module Boards
# We memoize the query here since the finder methods we use are quite complex. This does not memoize the result of the query.
# rubocop: disable CodeReuse/ActiveRecord
- def items
- strong_memoize(:items) do
+ def init_collection
+ strong_memoize(:init_collection) do
filter(finder.execute).reorder(nil)
end
end
diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb
index 6284e454561..0e95bf7a434 100644
--- a/app/services/boards/issues/list_service.rb
+++ b/app/services/boards/issues/list_service.rb
@@ -11,7 +11,9 @@ module Boards
private
- def ordered_items
+ def order(items)
+ return items.order_closed_date_desc if list&.closed?
+
items.order_by_position_and_priority(with_cte: params[:search].present?)
end
diff --git a/app/services/branches/create_service.rb b/app/services/branches/create_service.rb
index 8684da701db..848e6aaa65a 100644
--- a/app/services/branches/create_service.rb
+++ b/app/services/branches/create_service.rb
@@ -2,8 +2,8 @@
module Branches
class CreateService < BaseService
- def execute(branch_name, ref, create_master_if_empty: true)
- create_master_branch if create_master_if_empty && project.empty_repo?
+ def execute(branch_name, ref, create_default_branch_if_empty: true)
+ create_default_branch if create_default_branch_if_empty && project.empty_repo?
result = ::Branches::ValidateNewService.new(project).execute(branch_name)
@@ -27,13 +27,13 @@ module Branches
private
- def create_master_branch
+ def create_default_branch
project.repository.create_file(
current_user,
'/README.md',
'',
message: 'Add README.md',
- branch_name: 'master'
+ branch_name: project.default_branch_or_main
)
end
end
diff --git a/app/services/bulk_imports/file_decompression_service.rb b/app/services/bulk_imports/file_decompression_service.rb
new file mode 100644
index 00000000000..fe9017377ec
--- /dev/null
+++ b/app/services/bulk_imports/file_decompression_service.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class FileDecompressionService
+ include Gitlab::ImportExport::CommandLineUtil
+
+ ServiceError = Class.new(StandardError)
+
+ def initialize(dir:, filename:)
+ @dir = dir
+ @filename = filename
+ @filepath = File.join(@dir, @filename)
+ @decompressed_filename = File.basename(@filename, '.gz')
+ @decompressed_filepath = File.join(@dir, @decompressed_filename)
+ end
+
+ def execute
+ validate_dir
+ validate_decompressed_file_size if Feature.enabled?(:validate_import_decompressed_archive_size, default_enabled: :yaml)
+ validate_symlink(filepath)
+
+ decompress_file
+
+ validate_symlink(decompressed_filepath)
+
+ filepath
+ rescue StandardError => e
+ File.delete(filepath) if File.exist?(filepath)
+ File.delete(decompressed_filepath) if File.exist?(decompressed_filepath)
+
+ raise e
+ end
+
+ private
+
+ attr_reader :dir, :filename, :filepath, :decompressed_filename, :decompressed_filepath
+
+ def validate_dir
+ raise(ServiceError, 'Invalid target directory') unless dir.start_with?(Dir.tmpdir)
+ end
+
+ def validate_decompressed_file_size
+ raise(ServiceError, 'File decompression error') unless size_validator.valid?
+ end
+
+ def validate_symlink(filepath)
+ raise(ServiceError, 'Invalid file') if File.lstat(filepath).symlink?
+ end
+
+ def decompress_file
+ gunzip(dir: dir, filename: filename)
+ end
+
+ def size_validator
+ @size_validator ||= Gitlab::ImportExport::DecompressedArchiveSizeValidator.new(archive_path: filepath)
+ end
+ end
+end
diff --git a/app/services/bulk_imports/file_download_service.rb b/app/services/bulk_imports/file_download_service.rb
new file mode 100644
index 00000000000..c5a1241e0a4
--- /dev/null
+++ b/app/services/bulk_imports/file_download_service.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+module BulkImports
+ class FileDownloadService
+ FILE_SIZE_LIMIT = 5.gigabytes
+ ALLOWED_CONTENT_TYPES = %w(application/gzip application/octet-stream).freeze
+
+ ServiceError = Class.new(StandardError)
+
+ def initialize(configuration:, relative_url:, dir:, filename:)
+ @configuration = configuration
+ @relative_url = relative_url
+ @filename = filename
+ @dir = dir
+ @filepath = File.join(@dir, @filename)
+ end
+
+ def execute
+ validate_dir
+ validate_url
+ validate_content_type
+ validate_content_length
+
+ download_file
+
+ validate_symlink
+
+ filepath
+ end
+
+ private
+
+ attr_reader :configuration, :relative_url, :dir, :filename, :filepath
+
+ def download_file
+ File.open(filepath, 'wb') do |file|
+ bytes_downloaded = 0
+
+ http_client.stream(relative_url) do |chunk|
+ bytes_downloaded += chunk.size
+
+ raise(ServiceError, 'Invalid downloaded file') if bytes_downloaded > FILE_SIZE_LIMIT
+ raise(ServiceError, "File download error #{chunk.code}") unless chunk.code == 200
+
+ file.write(chunk)
+ end
+ end
+ rescue StandardError => e
+ File.delete(filepath) if File.exist?(filepath)
+
+ raise e
+ end
+
+ def http_client
+ @http_client ||= BulkImports::Clients::HTTP.new(
+ uri: configuration.url,
+ token: configuration.access_token
+ )
+ end
+
+ def allow_local_requests?
+ ::Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
+ end
+
+ def headers
+ @headers ||= http_client.head(relative_url).headers
+ end
+
+ def validate_dir
+ raise(ServiceError, 'Invalid target directory') unless dir.start_with?(Dir.tmpdir)
+ end
+
+ def validate_symlink
+ if File.lstat(filepath).symlink?
+ File.delete(filepath)
+
+ raise(ServiceError, 'Invalid downloaded file')
+ end
+ end
+
+ def validate_url
+ ::Gitlab::UrlBlocker.validate!(
+ http_client.resource_url(relative_url),
+ allow_localhost: allow_local_requests?,
+ allow_local_network: allow_local_requests?,
+ schemes: %w(http https)
+ )
+ end
+
+ def validate_content_length
+ content_size = headers['content-length']
+
+ raise(ServiceError, 'Invalid content length') if content_size.blank? || content_size.to_i > FILE_SIZE_LIMIT
+ end
+
+ def validate_content_type
+ content_type = headers['content-type']
+
+ raise(ServiceError, 'Invalid content type') if content_type.blank? || ALLOWED_CONTENT_TYPES.exclude?(content_type)
+ end
+ end
+end
diff --git a/app/services/bulk_imports/relation_export_service.rb b/app/services/bulk_imports/relation_export_service.rb
index 53952a33b5f..055f9cafd10 100644
--- a/app/services/bulk_imports/relation_export_service.rb
+++ b/app/services/bulk_imports/relation_export_service.rb
@@ -86,7 +86,7 @@ module BulkImports
# rubocop: disable CodeReuse/Serializer
def serializer
- @serializer ||= ::Gitlab::ImportExport::JSON::StreamingSerializer.new(
+ @serializer ||= ::Gitlab::ImportExport::Json::StreamingSerializer.new(
portable,
portable_tree,
json_writer,
@@ -96,7 +96,7 @@ module BulkImports
# rubocop: enable CodeReuse/Serializer
def json_writer
- @json_writer ||= ::Gitlab::ImportExport::JSON::NdjsonWriter.new(export_path)
+ @json_writer ||= ::Gitlab::ImportExport::Json::NdjsonWriter.new(export_path)
end
def ndjson_filename
diff --git a/app/services/ci/create_downstream_pipeline_service.rb b/app/services/ci/create_downstream_pipeline_service.rb
index 64a99e404c6..1eff76c2e5d 100644
--- a/app/services/ci/create_downstream_pipeline_service.rb
+++ b/app/services/ci/create_downstream_pipeline_service.rb
@@ -19,13 +19,14 @@ module Ci
DuplicateDownstreamPipelineError.new,
bridge_id: @bridge.id, project_id: @bridge.project_id
)
- return
+
+ return error('Already has a downstream pipeline')
end
pipeline_params = @bridge.downstream_pipeline_params
target_ref = pipeline_params.dig(:target_revision, :ref)
- return unless ensure_preconditions!(target_ref)
+ return error('Pre-conditions not met') unless ensure_preconditions!(target_ref)
service = ::Ci::CreatePipelineService.new(
pipeline_params.fetch(:project),
@@ -119,8 +120,11 @@ module Ci
return false if @bridge.triggers_child_pipeline?
if Feature.enabled?(:ci_drop_cyclical_triggered_pipelines, @bridge.project, default_enabled: :yaml)
- checksums = @bridge.pipeline.base_and_ancestors.map { |pipeline| config_checksum(pipeline) }
- checksums.uniq.length != checksums.length
+ pipeline_checksums = @bridge.pipeline.base_and_ancestors.filter_map do |pipeline|
+ config_checksum(pipeline) unless pipeline.child?
+ end
+
+ pipeline_checksums.uniq.length != pipeline_checksums.length
end
end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index fd333e24860..c039f31aafc 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -13,6 +13,7 @@ module Ci
Gitlab::Ci::Pipeline::Chain::Validate::SecurityOrchestrationPolicy,
Gitlab::Ci::Pipeline::Chain::Config::Content,
Gitlab::Ci::Pipeline::Chain::Config::Process,
+ Gitlab::Ci::Pipeline::Chain::Validate::AfterConfig,
Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs,
Gitlab::Ci::Pipeline::Chain::Skip,
Gitlab::Ci::Pipeline::Chain::SeedBlock,
diff --git a/app/services/ci/job_artifacts/create_service.rb b/app/services/ci/job_artifacts/create_service.rb
index a22ac87f660..9fc7c3b4d40 100644
--- a/app/services/ci/job_artifacts/create_service.rb
+++ b/app/services/ci/job_artifacts/create_service.rb
@@ -115,7 +115,6 @@ module Ci
case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(artifact)
- when 'cluster_applications' then parse_cluster_applications_artifact(artifact)
else success
end
end
@@ -165,10 +164,6 @@ module Ci
def parse_dotenv_artifact(artifact)
Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
end
-
- def parse_cluster_applications_artifact(artifact)
- Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
- end
end
end
end
diff --git a/app/services/ci/pipeline_creation/start_pipeline_service.rb b/app/services/ci/pipeline_creation/start_pipeline_service.rb
new file mode 100644
index 00000000000..27c12caaa0a
--- /dev/null
+++ b/app/services/ci/pipeline_creation/start_pipeline_service.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Ci
+ module PipelineCreation
+ class StartPipelineService
+ attr_reader :pipeline
+
+ def initialize(pipeline)
+ @pipeline = pipeline
+ end
+
+ def execute
+ Ci::ProcessPipelineService.new(pipeline).execute
+ end
+ end
+ end
+end
+
+::Ci::PipelineCreation::StartPipelineService.prepend_mod_with('Ci::PipelineCreation::StartPipelineService')
diff --git a/app/services/ci/pipeline_schedules/calculate_next_run_service.rb b/app/services/ci/pipeline_schedules/calculate_next_run_service.rb
new file mode 100644
index 00000000000..9978b2d4775
--- /dev/null
+++ b/app/services/ci/pipeline_schedules/calculate_next_run_service.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+module Ci
+ module PipelineSchedules
+ class CalculateNextRunService < BaseService
+ include Gitlab::Utils::StrongMemoize
+
+ def execute(schedule, fallback_method:)
+ @schedule = schedule
+
+ return fallback_method.call unless ::Feature.enabled?(:ci_daily_limit_for_pipeline_schedules, project, default_enabled: :yaml)
+ return fallback_method.call unless plan_cron&.cron_valid?
+
+ now = Time.zone.now
+
+ schedule_next_run = schedule_cron.next_time_from(now)
+ return schedule_next_run if worker_cron.match?(schedule_next_run) && plan_cron.match?(schedule_next_run)
+
+ plan_next_run = plan_cron.next_time_from(now)
+ return plan_next_run if worker_cron.match?(plan_next_run)
+
+ worker_next_run = worker_cron.next_time_from(now)
+ return worker_next_run if plan_cron.match?(worker_next_run)
+
+ worker_cron.next_time_from(plan_next_run)
+ end
+
+ private
+
+ def schedule_cron
+ strong_memoize(:schedule_cron) do
+ Gitlab::Ci::CronParser.new(@schedule.cron, @schedule.cron_timezone)
+ end
+ end
+
+ def worker_cron
+ strong_memoize(:worker_cron) do
+ Gitlab::Ci::CronParser.new(worker_cron_expression, Time.zone.name)
+ end
+ end
+
+ def plan_cron
+ strong_memoize(:plan_cron) do
+ daily_limit = @schedule.daily_limit
+
+ next unless daily_limit
+
+ every_x_minutes = (1.day.in_minutes / daily_limit).to_i
+
+ Gitlab::Ci::CronParser.parse_natural("every #{every_x_minutes} minutes", Time.zone.name)
+ end
+ end
+
+ def worker_cron_expression
+ Settings.cron_jobs['pipeline_schedule_worker']['cron']
+ end
+ end
+ end
+end
diff --git a/app/services/ci/play_bridge_service.rb b/app/services/ci/play_bridge_service.rb
index c5b19a3963a..2f0bcece9e3 100644
--- a/app/services/ci/play_bridge_service.rb
+++ b/app/services/ci/play_bridge_service.rb
@@ -3,7 +3,7 @@
module Ci
class PlayBridgeService < ::BaseService
def execute(bridge)
- raise Gitlab::Access::AccessDeniedError unless can?(current_user, :play_job, bridge)
+ check_access!(bridge)
bridge.tap do |bridge|
bridge.user = current_user
@@ -14,5 +14,13 @@ module Ci
AfterRequeueJobService.new(project, current_user).execute(bridge)
end
end
+
+ private
+
+ def check_access!(bridge)
+ raise Gitlab::Access::AccessDeniedError unless can?(current_user, :play_job, bridge)
+ end
end
end
+
+Ci::PlayBridgeService.prepend_mod_with('Ci::PlayBridgeService')
diff --git a/app/services/ci/play_build_service.rb b/app/services/ci/play_build_service.rb
index 4953b1ea5fc..073c1a2d0e0 100644
--- a/app/services/ci/play_build_service.rb
+++ b/app/services/ci/play_build_service.rb
@@ -3,11 +3,7 @@
module Ci
class PlayBuildService < ::BaseService
def execute(build, job_variables_attributes = nil)
- raise Gitlab::Access::AccessDeniedError unless can?(current_user, :play_job, build)
-
- if job_variables_attributes.present? && !can?(current_user, :set_pipeline_variables, project)
- raise Gitlab::Access::AccessDeniedError
- end
+ check_access!(build, job_variables_attributes)
# Try to enqueue the build, otherwise create a duplicate.
#
@@ -23,5 +19,17 @@ module Ci
Ci::Build.retry(build, current_user)
end
end
+
+ private
+
+ def check_access!(build, job_variables_attributes)
+ raise Gitlab::Access::AccessDeniedError unless can?(current_user, :play_job, build)
+
+ if job_variables_attributes.present? && !can?(current_user, :set_pipeline_variables, project)
+ raise Gitlab::Access::AccessDeniedError
+ end
+ end
end
end
+
+Ci::PlayBuildService.prepend_mod_with('Ci::PlayBuildService')
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index 461647ffccc..6280bf4c986 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -22,11 +22,27 @@ module Ci
end
def execute(params = {})
+ db_all_caught_up = ::Gitlab::Database::LoadBalancing::Sticking.all_caught_up?(:runner, runner.id)
+
@metrics.increment_queue_operation(:queue_attempt)
- @metrics.observe_queue_time(:process, @runner.runner_type) do
+ result = @metrics.observe_queue_time(:process, @runner.runner_type) do
process_queue(params)
end
+
+ # Since we execute this query against replica it might lead to false-positive
+ # We might receive the positive response: "hi, we don't have any more builds for you".
+ # This might not be true. If our DB replica is not up-to date with when runner event was generated
+ # we might still have some CI builds to be picked. Instead we should say to runner:
+ # "Hi, we don't have any more builds now, but not everything is right anyway, so try again".
+ # Runner will retry, but again, against replica, and again will check if replication lag did catch-up.
+ if !db_all_caught_up && !result.build
+ metrics.increment_queue_operation(:queue_replication_lag)
+
+ ::Ci::RegisterJobService::Result.new(nil, false) # rubocop:disable Cop/AvoidReturnFromBlocks
+ else
+ result
+ end
end
private
@@ -109,25 +125,23 @@ module Ci
builds = builds.queued_before(params[:job_age].seconds.ago)
end
- if Feature.enabled?(:ci_register_job_service_one_by_one, runner, default_enabled: true)
- build_ids = retrieve_queue(-> { builds.pluck(:id) })
-
- @metrics.observe_queue_size(-> { build_ids.size }, @runner.runner_type)
+ build_ids = retrieve_queue(-> { builds.pluck(:id) })
- build_ids.each do |build_id|
- yield Ci::Build.find(build_id)
- end
- else
- builds_array = retrieve_queue(-> { builds.to_a })
-
- @metrics.observe_queue_size(-> { builds_array.size }, @runner.runner_type)
+ @metrics.observe_queue_size(-> { build_ids.size }, @runner.runner_type)
- builds_array.each(&blk)
+ build_ids.each do |build_id|
+ yield Ci::Build.find(build_id)
end
end
# rubocop: enable CodeReuse/ActiveRecord
def retrieve_queue(queue_query_proc)
+ ##
+ # We want to reset a load balancing session to discard the side
+ # effects of writes that could have happened prior to this moment.
+ #
+ ::Gitlab::Database::LoadBalancing::Session.clear_session
+
@metrics.observe_queue_time(:retrieve, @runner.runner_type) do
queue_query_proc.call
end
@@ -182,13 +196,7 @@ module Ci
end
def max_queue_depth
- @max_queue_depth ||= begin
- if Feature.enabled?(:gitlab_ci_builds_queue_limit, runner, default_enabled: true)
- MAX_QUEUE_DEPTH
- else
- ::Gitlab::Database::MAX_INT_VALUE
- end
- end
+ MAX_QUEUE_DEPTH
end
# Force variables evaluation to occur now
@@ -271,15 +279,11 @@ module Ci
.order(Arel.sql('COALESCE(project_builds.running_builds, 0) ASC'), 'ci_builds.id ASC')
end
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def builds_for_project_runner
new_builds.where(project: runner.projects.without_deleted.with_builds_enabled).order('id ASC')
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def builds_for_group_runner
# Workaround for weird Rails bug, that makes `runner.groups.to_sql` to return `runner_id = NULL`
groups = ::Group.joins(:runner_namespaces).merge(runner.runner_namespaces)
@@ -291,17 +295,23 @@ module Ci
.without_deleted
new_builds.where(project: projects).order('id ASC')
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def running_builds_for_shared_runners
Ci::Build.running.where(runner: Ci::Runner.instance_type)
.group(:project_id).select(:project_id, 'count(*) AS running_builds')
end
+
+ def all_builds
+ if Feature.enabled?(:ci_pending_builds_queue_join, runner, default_enabled: :yaml)
+ Ci::Build.joins(:queuing_entry)
+ else
+ Ci::Build.all
+ end
+ end
# rubocop: enable CodeReuse/ActiveRecord
def new_builds
- builds = Ci::Build.pending.unstarted
+ builds = all_builds.pending.unstarted
builds = builds.ref_protected if runner.ref_protected?
builds
end
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index e03f2ae3d52..ea76771b80a 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -34,15 +34,9 @@ module Ci
attributes[:user] = current_user
Ci::Build.transaction do
- # mark all other builds of that name as retried
- build.pipeline.builds.latest
- .where(name: build.name)
- .update_all(retried: true, processed: true)
-
- create_build!(attributes).tap do
- # mark existing object as retried/processed without a reload
- build.retried = true
- build.processed = true
+ create_build!(attributes).tap do |new_build|
+ new_build.update_older_statuses_retried!
+ build.reset # refresh the data to get new values of `retried` and `processed`.
end
end
end
@@ -59,7 +53,6 @@ module Ci
def create_build!(attributes)
build = project.builds.new(attributes)
build.assign_attributes(::Gitlab::Ci::Pipeline::Seed::Build.environment_attributes_for(build))
- build.retried = false
BulkInsertableAssociations.with_bulk_insert do
build.save!
end
diff --git a/app/services/ci/update_build_queue_service.rb b/app/services/ci/update_build_queue_service.rb
index cf629b879b3..eea09e9ac67 100644
--- a/app/services/ci/update_build_queue_service.rb
+++ b/app/services/ci/update_build_queue_service.rb
@@ -2,13 +2,103 @@
module Ci
class UpdateBuildQueueService
- def execute(build, metrics = ::Gitlab::Ci::Queue::Metrics)
- tick_for(build, build.project.all_runners, metrics)
+ InvalidQueueTransition = Class.new(StandardError)
+
+ attr_reader :metrics
+
+ def initialize(metrics = ::Gitlab::Ci::Queue::Metrics)
+ @metrics = metrics
+ end
+
+ ##
+ # Add a build to the pending builds queue
+ #
+ def push(build, transition)
+ return unless maintain_pending_builds_queue?(build)
+
+ raise InvalidQueueTransition unless transition.to == 'pending'
+
+ transition.within_transaction do
+ result = build.create_queuing_entry!
+
+ unless result.empty?
+ metrics.increment_queue_operation(:build_queue_push)
+
+ result.rows.dig(0, 0)
+ end
+ end
+ end
+
+ ##
+ # Remove a build from the pending builds queue
+ #
+ def pop(build, transition)
+ return unless maintain_pending_builds_queue?(build)
+
+ raise InvalidQueueTransition unless transition.from == 'pending'
+
+ transition.within_transaction do
+ removed = build.all_queuing_entries.delete_all
+
+ if removed > 0
+ metrics.increment_queue_operation(:build_queue_pop)
+
+ build.id
+ end
+ end
+ end
+
+ ##
+ # Add shared runner build tracking entry (used for queuing).
+ #
+ def track(build, transition)
+ return unless Feature.enabled?(:ci_track_shared_runner_builds, build.project, default_enabled: :yaml)
+ return unless build.shared_runner_build?
+
+ raise InvalidQueueTransition unless transition.to == 'running'
+
+ transition.within_transaction do
+ result = ::Ci::RunningBuild.upsert_shared_runner_build!(build)
+
+ unless result.empty?
+ metrics.increment_queue_operation(:shared_runner_build_new)
+
+ result.rows.dig(0, 0)
+ end
+ end
+ end
+
+ ##
+ # Remove a runtime build tracking entry for a shared runner build (used for
+ # queuing).
+ #
+ def untrack(build, transition)
+ return unless Feature.enabled?(:ci_untrack_shared_runner_builds, build.project, default_enabled: :yaml)
+ return unless build.shared_runner_build?
+
+ raise InvalidQueueTransition unless transition.from == 'running'
+
+ transition.within_transaction do
+ removed = build.all_runtime_metadata.delete_all
+
+ if removed > 0
+ metrics.increment_queue_operation(:shared_runner_build_done)
+
+ build.id
+ end
+ end
+ end
+
+ ##
+ # Unblock runner associated with given project / build
+ #
+ def tick(build)
+ tick_for(build, build.project.all_available_runners)
end
private
- def tick_for(build, runners, metrics)
+ def tick_for(build, runners)
runners = runners.with_recent_runner_queue
runners = runners.with_tags if Feature.enabled?(:ci_preload_runner_tags, default_enabled: :yaml)
@@ -20,5 +110,9 @@ module Ci
runner.pick_build!(build)
end
end
+
+ def maintain_pending_builds_queue?(build)
+ Feature.enabled?(:ci_pending_builds_queue_maintain, build.project, default_enabled: :yaml)
+ end
end
end
diff --git a/app/services/ci/update_build_state_service.rb b/app/services/ci/update_build_state_service.rb
index 874f4bf459a..abd50d2f110 100644
--- a/app/services/ci/update_build_state_service.rb
+++ b/app/services/ci/update_build_state_service.rb
@@ -19,8 +19,6 @@ module Ci
end
def execute
- overwrite_trace! if has_trace?
-
unless accept_available?
return update_build_state!
end
@@ -34,12 +32,6 @@ module Ci
private
- def overwrite_trace!
- metrics.increment_trace_operation(operation: :overwrite)
-
- build.trace.set(params[:trace]) if Gitlab::Ci::Features.trace_overwrite?
- end
-
def ensure_pending_state!
pending_state.created_at
end
@@ -151,10 +143,6 @@ module Ci
params.dig(:state).to_s
end
- def has_trace?
- params.dig(:trace).present?
- end
-
def has_checksum?
trace_checksum.present?
end
@@ -181,7 +169,7 @@ module Ci
state: params.fetch(:state),
trace_checksum: trace_checksum,
trace_bytesize: trace_bytesize,
- failure_reason: params.dig(:failure_reason)
+ failure_reason: failure_reason
)
unless build_state.present?
@@ -191,6 +179,14 @@ module Ci
build_state || build.pending_state
end
+ def failure_reason
+ reason = params.dig(:failure_reason)
+
+ return unless reason
+
+ Ci::BuildPendingState.failure_reasons.fetch(reason.to_s, 'unknown_failure')
+ end
+
##
# This method is releasing an exclusive lock on a build trace the moment we
# conclude that build status has been written and the build state update
diff --git a/app/services/clusters/applications/base_service.rb b/app/services/clusters/applications/base_service.rb
index 39a2d6bf758..c6f22cfa04c 100644
--- a/app/services/clusters/applications/base_service.rb
+++ b/app/services/clusters/applications/base_service.rb
@@ -5,8 +5,6 @@ module Clusters
class BaseService
InvalidApplicationError = Class.new(StandardError)
- FLUENTD_KNOWN_ATTRS = %i[host protocol port waf_log_enabled cilium_log_enabled].freeze
-
attr_reader :cluster, :current_user, :params
def initialize(cluster, user, params = {})
@@ -29,16 +27,6 @@ module Clusters
application.stack = params[:stack]
end
- if application.has_attribute?(:modsecurity_enabled)
- application.modsecurity_enabled = params[:modsecurity_enabled] || false
- end
-
- if application.has_attribute?(:modsecurity_mode)
- application.modsecurity_mode = params[:modsecurity_mode] || 0
- end
-
- apply_fluentd_related_attributes(application)
-
if application.respond_to?(:oauth_application)
application.oauth_application = create_oauth_application(application, request)
end
@@ -103,12 +91,6 @@ module Clusters
::Applications::CreateService.new(current_user, oauth_application_params).execute(request)
end
-
- def apply_fluentd_related_attributes(application)
- FLUENTD_KNOWN_ATTRS.each do |attr|
- application[attr] = params[attr] if application.has_attribute?(attr)
- end
- end
end
end
end
diff --git a/app/services/clusters/applications/schedule_update_service.rb b/app/services/clusters/applications/schedule_update_service.rb
index 4f130f76b87..4fabf1d809e 100644
--- a/app/services/clusters/applications/schedule_update_service.rb
+++ b/app/services/clusters/applications/schedule_update_service.rb
@@ -1,5 +1,6 @@
# frozen_string_literal: true
+# DEPRECATED: To be removed as part of https://gitlab.com/groups/gitlab-org/-/epics/5877
module Clusters
module Applications
class ScheduleUpdateService
@@ -7,14 +8,14 @@ module Clusters
attr_accessor :application, :project
- def initialize(application, project)
- @application = application
+ def initialize(cluster_prometheus_adapter, project)
+ @application = cluster_prometheus_adapter&.cluster&.application_prometheus
@project = project
end
def execute
return unless application
- return unless application.managed_prometheus?
+ return if application.externally_installed?
if recently_scheduled?
worker_class.perform_in(BACKOFF_DELAY, application.name, application.id, project.id, Time.current)
diff --git a/app/services/clusters/cleanup/app_service.rb b/app/services/clusters/cleanup/app_service.rb
deleted file mode 100644
index a7e29c78ea0..00000000000
--- a/app/services/clusters/cleanup/app_service.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- module Cleanup
- class AppService < Clusters::Cleanup::BaseService
- def execute
- persisted_applications = @cluster.persisted_applications
-
- persisted_applications.each do |app|
- next unless app.available?
- next unless app.can_uninstall?
-
- log_event(:uninstalling_app, application: app.class.application_name)
- uninstall_app_async(app)
- end
-
- # Keep calling the worker untill all dependencies are uninstalled
- return schedule_next_execution(Clusters::Cleanup::AppWorker) if persisted_applications.any?
-
- log_event(:schedule_remove_project_namespaces)
- cluster.continue_cleanup!
- end
-
- private
-
- def uninstall_app_async(application)
- application.make_scheduled!
-
- Clusters::Applications::UninstallWorker.perform_async(application.name, application.id)
- end
- end
- end
-end
diff --git a/app/services/clusters/cleanup/project_namespace_service.rb b/app/services/clusters/cleanup/project_namespace_service.rb
index 7621be565ff..16254041306 100644
--- a/app/services/clusters/cleanup/project_namespace_service.rb
+++ b/app/services/clusters/cleanup/project_namespace_service.rb
@@ -2,7 +2,7 @@
module Clusters
module Cleanup
- class ProjectNamespaceService < BaseService
+ class ProjectNamespaceService < ::Clusters::Cleanup::BaseService
KUBERNETES_NAMESPACE_BATCH_SIZE = 100
def execute
diff --git a/app/services/clusters/cleanup/service_account_service.rb b/app/services/clusters/cleanup/service_account_service.rb
index d60bd76d388..baac9e4a9e7 100644
--- a/app/services/clusters/cleanup/service_account_service.rb
+++ b/app/services/clusters/cleanup/service_account_service.rb
@@ -2,7 +2,7 @@
module Clusters
module Cleanup
- class ServiceAccountService < BaseService
+ class ServiceAccountService < ::Clusters::Cleanup::BaseService
def execute
delete_gitlab_service_account
diff --git a/app/services/clusters/gcp/finalize_creation_service.rb b/app/services/clusters/gcp/finalize_creation_service.rb
index 0aff1bcc8b9..73d6fc4dc8f 100644
--- a/app/services/clusters/gcp/finalize_creation_service.rb
+++ b/app/services/clusters/gcp/finalize_creation_service.rb
@@ -43,8 +43,6 @@ module Clusters
cluster.build_platform_kubernetes(
api_url: 'https://' + gke_cluster.endpoint,
ca_cert: Base64.decode64(gke_cluster.master_auth.cluster_ca_certificate),
- username: gke_cluster.master_auth.username,
- password: gke_cluster.master_auth.password,
authorization_type: authorization_type,
token: request_kubernetes_token)
end
@@ -75,18 +73,16 @@ module Clusters
def kube_client
@kube_client ||= build_kube_client!(
'https://' + gke_cluster.endpoint,
- Base64.decode64(gke_cluster.master_auth.cluster_ca_certificate),
- gke_cluster.master_auth.username,
- gke_cluster.master_auth.password
+ Base64.decode64(gke_cluster.master_auth.cluster_ca_certificate)
)
end
- def build_kube_client!(api_url, ca_pem, username, password)
- raise "Incomplete settings" unless api_url && username && password
+ def build_kube_client!(api_url, ca_pem)
+ raise "Incomplete settings" unless api_url
Gitlab::Kubernetes::KubeClient.new(
api_url,
- auth_options: { username: username, password: password },
+ auth_options: { bearer_token: provider.access_token },
ssl_options: kubeclient_ssl_options(ca_pem),
http_proxy_uri: ENV['http_proxy']
)
diff --git a/app/services/clusters/parse_cluster_applications_artifact_service.rb b/app/services/clusters/parse_cluster_applications_artifact_service.rb
deleted file mode 100644
index b9b2953b6bd..00000000000
--- a/app/services/clusters/parse_cluster_applications_artifact_service.rb
+++ /dev/null
@@ -1,94 +0,0 @@
-# frozen_string_literal: true
-
-module Clusters
- class ParseClusterApplicationsArtifactService < ::BaseService
- include Gitlab::Utils::StrongMemoize
-
- MAX_ACCEPTABLE_ARTIFACT_SIZE = 5.kilobytes
- RELEASE_NAMES = %w[cilium].freeze
-
- def initialize(job, current_user)
- @job = job
-
- super(job.project, current_user)
- end
-
- def execute(artifact)
- raise ArgumentError, 'Artifact is not cluster_applications file type' unless artifact&.cluster_applications?
-
- return error(too_big_error_message, :bad_request) unless artifact.file.size < MAX_ACCEPTABLE_ARTIFACT_SIZE
- return error(no_deployment_message, :bad_request) unless job.deployment
- return error(no_deployment_cluster_message, :bad_request) unless cluster
-
- parse!(artifact)
-
- success
- rescue Gitlab::Kubernetes::Helm::Parsers::ListV2::ParserError, ActiveRecord::RecordInvalid => error
- Gitlab::ErrorTracking.track_exception(error, job_id: artifact.job_id)
- error(error.message, :bad_request)
- end
-
- private
-
- attr_reader :job
-
- def cluster
- strong_memoize(:cluster) do
- deployment_cluster = job.deployment&.cluster
-
- deployment_cluster if Ability.allowed?(current_user, :admin_cluster, deployment_cluster)
- end
- end
-
- def parse!(artifact)
- releases = []
-
- artifact.each_blob do |blob|
- next if blob.empty?
-
- releases.concat(Gitlab::Kubernetes::Helm::Parsers::ListV2.new(blob).releases)
- end
-
- update_cluster_application_statuses!(releases)
- end
-
- def update_cluster_application_statuses!(releases)
- release_by_name = releases.index_by { |release| release['Name'] }
-
- Clusters::Cluster.transaction do
- RELEASE_NAMES.each do |release_name|
- application_class = Clusters::Cluster::APPLICATIONS[release_name]
- application = cluster.find_or_build_application(application_class)
-
- release = release_by_name[release_name]
-
- if release
- case release['Status']
- when 'DEPLOYED'
- application.make_externally_installed!
- when 'FAILED'
- application.make_errored!(s_('ClusterIntegration|Helm release failed to install'))
- end
- else
- # missing, so by definition, we consider this uninstalled
- application.make_externally_uninstalled! if application.persisted?
- end
- end
- end
- end
-
- def too_big_error_message
- human_size = ActiveSupport::NumberHelper.number_to_human_size(MAX_ACCEPTABLE_ARTIFACT_SIZE)
-
- s_('ClusterIntegration|Cluster_applications artifact too big. Maximum allowable size: %{human_size}') % { human_size: human_size }
- end
-
- def no_deployment_message
- s_('ClusterIntegration|No deployment found for this job')
- end
-
- def no_deployment_cluster_message
- s_('ClusterIntegration|No deployment cluster found for this job')
- end
- end
-end
diff --git a/app/services/commits/change_service.rb b/app/services/commits/change_service.rb
index edb9f04ccd7..dc7f84ab807 100644
--- a/app/services/commits/change_service.rb
+++ b/app/services/commits/change_service.rb
@@ -6,6 +6,7 @@ module Commits
super
@commit = params[:commit]
+ @message = params[:message]
end
private
@@ -14,7 +15,9 @@ module Commits
raise NotImplementedError unless repository.respond_to?(action)
# rubocop:disable GitlabSecurity/PublicSend
- message = @commit.public_send(:"#{action}_message", current_user)
+ message =
+ @message || @commit.public_send(:"#{action}_message", current_user)
+
repository.public_send(
action,
current_user,
diff --git a/app/services/container_expiration_policies/cleanup_service.rb b/app/services/container_expiration_policies/cleanup_service.rb
index 38a3fc231c6..cd988cdc5fe 100644
--- a/app/services/container_expiration_policies/cleanup_service.rb
+++ b/app/services/container_expiration_policies/cleanup_service.rb
@@ -49,7 +49,6 @@ module ContainerExpirationPolicies
private
def schedule_next_run_if_needed
- return unless Feature.enabled?(:container_registry_expiration_policies_loopless)
return if policy.next_run_at.future?
repos_before_next_run = ::ContainerRepository.for_project_id(policy.project_id)
diff --git a/app/services/deployments/update_environment_service.rb b/app/services/deployments/update_environment_service.rb
index 9e862d6fa52..6f85779c285 100644
--- a/app/services/deployments/update_environment_service.rb
+++ b/app/services/deployments/update_environment_service.rb
@@ -9,6 +9,8 @@ module Deployments
delegate :variables, to: :deployable
delegate :options, to: :deployable, allow_nil: true
+ EnvironmentUpdateFailure = Class.new(StandardError)
+
def initialize(deployment)
@deployment = deployment
@deployable = deployment.deployable
@@ -31,8 +33,18 @@ module Deployments
renew_deployment_tier
environment.fire_state_event(action)
- if environment.save && !environment.stopped?
- deployment.update_merge_request_metrics!
+ if environment.save
+ deployment.update_merge_request_metrics! unless environment.stopped?
+ else
+ # If there is a validation error on environment update, such as
+ # the external URL is malformed, the error message is recorded for debugging purpose.
+ # We should surface the error message to users for letting them to take an action.
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/21182.
+ Gitlab::ErrorTracking.track_exception(
+ EnvironmentUpdateFailure.new,
+ project_id: deployment.project_id,
+ environment_id: environment.id,
+ reason: environment.errors.full_messages.to_sentence)
end
end
end
diff --git a/app/services/design_management/copy_design_collection/copy_service.rb b/app/services/design_management/copy_design_collection/copy_service.rb
index 496103f9e58..b40f6a81174 100644
--- a/app/services/design_management/copy_design_collection/copy_service.rb
+++ b/app/services/design_management/copy_design_collection/copy_service.rb
@@ -86,7 +86,7 @@ module DesignManagement
def with_temporary_branch(&block)
target_repository.create_if_not_exists
- create_master_branch! if target_repository.empty?
+ create_default_branch! if target_repository.empty?
create_temporary_branch!
yield
@@ -95,9 +95,9 @@ module DesignManagement
end
# A project that does not have any designs will have a blank design
- # repository. To create a temporary branch from `master` we need
- # create `master` first by adding a file to it.
- def create_master_branch!
+ # repository. To create a temporary branch from default branch we need to
+ # create default branch first by adding a file to it.
+ def create_default_branch!
target_repository.create_file(
git_user,
".CopyDesignCollectionService_#{Time.now.to_i}",
@@ -121,7 +121,7 @@ module DesignManagement
target_repository.rm_branch(git_user, temporary_branch)
end
- # Merge the temporary branch containing the commits to `master`
+ # Merge the temporary branch containing the commits to default branch
# and update the state of the target_design_collection.
def finalize!
source_sha = shas.last
diff --git a/app/services/design_management/design_service.rb b/app/services/design_management/design_service.rb
index 5aa2a2f73bc..f337a9dc8e0 100644
--- a/app/services/design_management/design_service.rb
+++ b/app/services/design_management/design_service.rb
@@ -13,7 +13,7 @@ module DesignManagement
attr_reader :issue
def target_branch
- repository.root_ref || "master"
+ repository.root_ref || Gitlab::DefaultBranch.value(object: project)
end
def collection
diff --git a/app/services/discussions/resolve_service.rb b/app/services/discussions/resolve_service.rb
index 3b733023eae..baf14aa8a03 100644
--- a/app/services/discussions/resolve_service.rb
+++ b/app/services/discussions/resolve_service.rb
@@ -47,9 +47,16 @@ module Discussions
MergeRequests::ResolvedDiscussionNotificationService.new(project: project, current_user: current_user).execute(merge_request)
end
+ resolve_user_todos_for(discussion)
SystemNoteService.discussion_continued_in_issue(discussion, project, current_user, follow_up_issue) if follow_up_issue
end
+ def resolve_user_todos_for(discussion)
+ return unless discussion.for_design?
+
+ TodoService.new.resolve_todos_for_target(discussion, current_user)
+ end
+
def first_discussion
@first_discussion ||= discussions.first
end
diff --git a/app/services/feature_flags/disable_service.rb b/app/services/feature_flags/disable_service.rb
deleted file mode 100644
index 8a443ac1795..00000000000
--- a/app/services/feature_flags/disable_service.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-module FeatureFlags
- class DisableService < BaseService
- def execute
- return error('Feature Flag not found', 404) unless feature_flag_by_name
- return error('Feature Flag Scope not found', 404) unless feature_flag_scope_by_environment_scope
- return error('Strategy not found', 404) unless strategy_exist_in_persisted_data?
-
- ::FeatureFlags::UpdateService
- .new(project, current_user, update_params)
- .execute(feature_flag_by_name)
- end
-
- private
-
- def update_params
- if remaining_strategies.empty?
- params_to_destroy_scope
- else
- params_to_update_scope
- end
- end
-
- def remaining_strategies
- strong_memoize(:remaining_strategies) do
- feature_flag_scope_by_environment_scope.strategies.reject do |strategy|
- strategy['name'] == params[:strategy]['name'] &&
- strategy['parameters'] == params[:strategy]['parameters']
- end
- end
- end
-
- def strategy_exist_in_persisted_data?
- feature_flag_scope_by_environment_scope.strategies != remaining_strategies
- end
-
- def params_to_destroy_scope
- { scopes_attributes: [{ id: feature_flag_scope_by_environment_scope.id, _destroy: true }] }
- end
-
- def params_to_update_scope
- { scopes_attributes: [{ id: feature_flag_scope_by_environment_scope.id, strategies: remaining_strategies }] }
- end
- end
-end
diff --git a/app/services/feature_flags/enable_service.rb b/app/services/feature_flags/enable_service.rb
deleted file mode 100644
index b4cbb32e003..00000000000
--- a/app/services/feature_flags/enable_service.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-module FeatureFlags
- class EnableService < BaseService
- def execute
- if feature_flag_by_name
- update_feature_flag
- else
- create_feature_flag
- end
- end
-
- private
-
- def create_feature_flag
- ::FeatureFlags::CreateService
- .new(project, current_user, create_params)
- .execute
- end
-
- def update_feature_flag
- ::FeatureFlags::UpdateService
- .new(project, current_user, update_params)
- .execute(feature_flag_by_name)
- end
-
- def create_params
- if params[:environment_scope] == '*'
- params_to_create_flag_with_default_scope
- else
- params_to_create_flag_with_additional_scope
- end
- end
-
- def update_params
- if feature_flag_scope_by_environment_scope
- params_to_update_scope
- else
- params_to_create_scope
- end
- end
-
- def params_to_create_flag_with_default_scope
- {
- name: params[:name],
- scopes_attributes: [
- {
- active: true,
- environment_scope: '*',
- strategies: [params[:strategy]]
- }
- ]
- }
- end
-
- def params_to_create_flag_with_additional_scope
- {
- name: params[:name],
- scopes_attributes: [
- {
- active: false,
- environment_scope: '*'
- },
- {
- active: true,
- environment_scope: params[:environment_scope],
- strategies: [params[:strategy]]
- }
- ]
- }
- end
-
- def params_to_create_scope
- {
- scopes_attributes: [{
- active: true,
- environment_scope: params[:environment_scope],
- strategies: [params[:strategy]]
- }]
- }
- end
-
- def params_to_update_scope
- {
- scopes_attributes: [{
- id: feature_flag_scope_by_environment_scope.id,
- active: true,
- strategies: feature_flag_scope_by_environment_scope.strategies | [params[:strategy]]
- }]
- }
- end
- end
-end
diff --git a/app/services/groups/create_service.rb b/app/services/groups/create_service.rb
index 8e8efe7d555..f900927793a 100644
--- a/app/services/groups/create_service.rb
+++ b/app/services/groups/create_service.rb
@@ -28,7 +28,7 @@ module Groups
@group.name ||= @group.path.dup
if create_chat_team?
- response = Mattermost::CreateTeamService.new(@group, current_user).execute
+ response = ::Mattermost::CreateTeamService.new(@group, current_user).execute
return @group if @group.errors.any?
@group.build_chat_team(name: response['name'], team_id: response['id'])
diff --git a/app/services/groups/group_links/create_service.rb b/app/services/groups/group_links/create_service.rb
index 0a60140d037..5f81e5972b0 100644
--- a/app/services/groups/group_links/create_service.rb
+++ b/app/services/groups/group_links/create_service.rb
@@ -3,27 +3,51 @@
module Groups
module GroupLinks
class CreateService < Groups::BaseService
- def execute(shared_group)
- unless group && shared_group &&
+ def initialize(shared_group, shared_with_group, user, params)
+ @shared_group = shared_group
+ super(shared_with_group, user, params)
+ end
+
+ def execute
+ unless shared_with_group && shared_group &&
can?(current_user, :admin_group_member, shared_group) &&
- can?(current_user, :read_group, group)
+ can?(current_user, :read_group, shared_with_group) &&
+ sharing_allowed?
return error('Not Found', 404)
end
link = GroupGroupLink.new(
shared_group: shared_group,
- shared_with_group: group,
+ shared_with_group: shared_with_group,
group_access: params[:shared_group_access],
expires_at: params[:expires_at]
)
if link.save
- group.refresh_members_authorized_projects(direct_members_only: true)
+ shared_with_group.refresh_members_authorized_projects(direct_members_only: true)
success(link: link)
else
error(link.errors.full_messages.to_sentence, 409)
end
end
+
+ private
+
+ attr_reader :shared_group
+
+ alias_method :shared_with_group, :group
+
+ def sharing_allowed?
+ sharing_outside_hierarchy_allowed? || within_hierarchy?
+ end
+
+ def sharing_outside_hierarchy_allowed?
+ !shared_group.root_ancestor.namespace_settings.prevent_sharing_groups_outside_hierarchy
+ end
+
+ def within_hierarchy?
+ shared_group.root_ancestor.self_and_descendants_ids.include?(shared_with_group.id)
+ end
end
end
end
diff --git a/app/services/groups/participants_service.rb b/app/services/groups/participants_service.rb
index 0844c98dd6a..1de2b3c5a2e 100644
--- a/app/services/groups/participants_service.rb
+++ b/app/services/groups/participants_service.rb
@@ -23,9 +23,9 @@ module Groups
end
def group_members
- return [] unless noteable
+ return [] unless group
- @group_members ||= sorted(noteable.group.direct_and_indirect_users)
+ @group_members ||= sorted(group.direct_and_indirect_users)
end
end
end
diff --git a/app/services/groups/transfer_service.rb b/app/services/groups/transfer_service.rb
index 56ff1310def..518d061c654 100644
--- a/app/services/groups/transfer_service.rb
+++ b/app/services/groups/transfer_service.rb
@@ -108,10 +108,13 @@ module Groups
@group.parent = @new_parent_group
@group.clear_memoization(:self_and_ancestors_ids)
+ @group.clear_memoization(:root_ancestor) if different_root_ancestor?
inherit_group_shared_runners_settings
@group.save!
+ # #reload is called to make sure traversal_ids are reloaded
+ @group.reload # rubocop:disable Cop/ActiveRecordAssociationReload
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/services/import_export_clean_up_service.rb b/app/services/import_export_clean_up_service.rb
index 66ac7dac4ca..567ac065cf7 100644
--- a/app/services/import_export_clean_up_service.rb
+++ b/app/services/import_export_clean_up_service.rb
@@ -2,6 +2,7 @@
class ImportExportCleanUpService
LAST_MODIFIED_TIME_IN_MINUTES = 1440
+ DIR_DEPTH = 5
attr_reader :mmin, :path
@@ -27,15 +28,42 @@ class ImportExportCleanUpService
end
def clean_up_export_files
- Gitlab::Popen.popen(%W(find #{path} -not -path #{path} -mmin +#{mmin} -delete))
+ old_directories do |dir|
+ FileUtils.remove_entry(dir)
+
+ logger.info(
+ message: 'Removed Import/Export tmp directory',
+ dir_path: dir
+ )
+ end
end
- # rubocop: disable CodeReuse/ActiveRecord
def clean_up_export_object_files
- ImportExportUpload.where('updated_at < ?', mmin.minutes.ago).each do |upload|
+ ImportExportUpload.with_export_file.updated_before(mmin.minutes.ago).each do |upload|
upload.remove_export_file!
upload.save!
+
+ logger.info(
+ message: 'Removed Import/Export export_file',
+ project_id: upload.project_id,
+ group_id: upload.group_id
+ )
+ end
+ end
+
+ def old_directories
+ IO.popen(directories_cmd) do |find|
+ find.each_line(chomp: true) do |directory|
+ yield directory
+ end
end
end
- # rubocop: enable CodeReuse/ActiveRecord
+
+ def directories_cmd
+ %W(find #{path} -mindepth #{DIR_DEPTH} -maxdepth #{DIR_DEPTH} -type d -not -path #{path} -mmin +#{mmin})
+ end
+
+ def logger
+ @logger ||= Gitlab::Import::Logger.build
+ end
end
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 099e0d81bc9..02c1f078a40 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -27,8 +27,14 @@ class IssuableBaseService < ::BaseProjectService
can?(current_user, ability_name, issuable)
end
+ def can_set_issuable_metadata?(issuable)
+ ability_name = :"set_#{issuable.to_ability_name}_metadata"
+
+ can?(current_user, ability_name, issuable)
+ end
+
def filter_params(issuable)
- unless can_admin_issuable?(issuable)
+ unless can_set_issuable_metadata?(issuable)
params.delete(:milestone)
params.delete(:milestone_id)
params.delete(:labels)
@@ -45,6 +51,7 @@ class IssuableBaseService < ::BaseProjectService
params.delete(:canonical_issue_id)
params.delete(:project)
params.delete(:discussion_locked)
+ params.delete(:confidential)
end
filter_assignees(issuable)
@@ -184,7 +191,7 @@ class IssuableBaseService < ::BaseProjectService
params[:assignee_ids] = process_assignee_ids(params, extra_assignee_ids: issuable.assignee_ids.to_a)
end
- issuable.assign_attributes(params)
+ issuable.assign_attributes(allowed_create_params(params))
before_create(issuable)
@@ -194,6 +201,7 @@ class IssuableBaseService < ::BaseProjectService
if issuable_saved
create_system_notes(issuable, is_update: false) unless skip_system_notes
+ handle_changes(issuable, { params: params })
after_create(issuable)
execute_hooks(issuable)
@@ -233,7 +241,7 @@ class IssuableBaseService < ::BaseProjectService
assign_requested_assignees(issuable)
if issuable.changed? || params.present?
- issuable.assign_attributes(params)
+ issuable.assign_attributes(allowed_update_params(params))
if has_title_or_description_changed?(issuable)
issuable.assign_attributes(last_edited_at: Time.current, last_edited_by: current_user)
@@ -260,7 +268,7 @@ class IssuableBaseService < ::BaseProjectService
issuable, old_labels: old_associations[:labels], old_milestone: old_associations[:milestone]
)
- handle_changes(issuable, old_associations: old_associations)
+ handle_changes(issuable, old_associations: old_associations, params: params)
new_assignees = issuable.assignees.to_a
affected_assignees = (old_associations[:assignees] + new_assignees) - (old_associations[:assignees] & new_assignees)
@@ -432,6 +440,7 @@ class IssuableBaseService < ::BaseProjectService
milestone: issuable.try(:milestone)
}
associations[:total_time_spent] = issuable.total_time_spent if issuable.respond_to?(:total_time_spent)
+ associations[:time_change] = issuable.time_change if issuable.respond_to?(:time_change)
associations[:description] = issuable.description
associations[:reviewers] = issuable.reviewers.to_a if issuable.allows_reviewers?
@@ -505,6 +514,14 @@ class IssuableBaseService < ::BaseProjectService
def update_timestamp?(issuable)
issuable.changes.keys != ["relative_position"]
end
+
+ def allowed_create_params(params)
+ params
+ end
+
+ def allowed_update_params(params)
+ params
+ end
end
IssuableBaseService.prepend_mod_with('IssuableBaseService')
diff --git a/app/services/issue_rebalancing_service.rb b/app/services/issue_rebalancing_service.rb
index 6a8d45b92b2..142d287370f 100644
--- a/app/services/issue_rebalancing_service.rb
+++ b/app/services/issue_rebalancing_service.rb
@@ -15,14 +15,13 @@ class IssueRebalancingService
[5.seconds, 1.second]
].freeze
- def initialize(issue)
- @issue = issue
- @base = Issue.relative_positioning_query_base(issue)
+ def initialize(projects_collection)
+ @root_namespace = projects_collection.take.root_namespace # rubocop:disable CodeReuse/ActiveRecord
+ @base = Issue.in_projects(projects_collection)
end
def execute
- gates = [issue.project, issue.project.group].compact
- return unless gates.any? { |gate| Feature.enabled?(:rebalance_issues, gate) }
+ return unless Feature.enabled?(:rebalance_issues, root_namespace)
raise TooManyIssues, "#{issue_count} issues" if issue_count > MAX_ISSUE_COUNT
@@ -57,7 +56,7 @@ class IssueRebalancingService
private
- attr_reader :issue, :base
+ attr_reader :root_namespace, :base
# rubocop: disable CodeReuse/ActiveRecord
def indexed_ids
diff --git a/app/services/issues/base_service.rb b/app/services/issues/base_service.rb
index 72e906e20f1..1c50bb74176 100644
--- a/app/services/issues/base_service.rb
+++ b/app/services/issues/base_service.rb
@@ -29,7 +29,7 @@ module Issues
gates = [issue.project, issue.project.group].compact
return unless gates.any? { |gate| Feature.enabled?(:rebalance_issues, gate) }
- IssueRebalancingWorker.perform_async(nil, issue.project_id)
+ IssueRebalancingWorker.perform_async(nil, *issue.project.self_or_root_group_ids)
end
private
@@ -38,6 +38,7 @@ module Issues
super
params.delete(:issue_type) unless issue_type_allowed?(issue)
+ filter_incident_label(issue) if params[:issue_type]
moved_issue = params.delete(:moved_issue)
@@ -82,6 +83,37 @@ module Issues
def issue_type_allowed?(object)
can?(current_user, :"create_#{params[:issue_type]}", object)
end
+
+ # @param issue [Issue]
+ def filter_incident_label(issue)
+ return unless add_incident_label?(issue) || remove_incident_label?(issue)
+
+ label = ::IncidentManagement::CreateIncidentLabelService
+ .new(project, current_user)
+ .execute
+ .payload[:label]
+
+ # These(add_label_ids, remove_label_ids) are being added ahead of time
+ # to be consumed by #process_label_ids, this allows system notes
+ # to be applied correctly alongside the label updates.
+ if add_incident_label?(issue)
+ params[:add_label_ids] ||= []
+ params[:add_label_ids] << label.id
+ else
+ params[:remove_label_ids] ||= []
+ params[:remove_label_ids] << label.id
+ end
+ end
+
+ # @param issue [Issue]
+ def add_incident_label?(issue)
+ issue.incident?
+ end
+
+ # @param _issue [Issue, nil]
+ def remove_incident_label?(_issue)
+ false
+ end
end
end
diff --git a/app/services/issues/close_service.rb b/app/services/issues/close_service.rb
index 1700d1d8586..cc4ad1a9c85 100644
--- a/app/services/issues/close_service.rb
+++ b/app/services/issues/close_service.rb
@@ -25,7 +25,6 @@ module Issues
end
if project.issues_enabled? && issue.close(current_user)
- remove_on_close_labels_from(issue)
event_service.close_issue(issue, current_user)
create_note(issue, closed_via) if system_note
@@ -52,18 +51,6 @@ module Issues
private
- def remove_on_close_labels_from(issue)
- old_labels = issue.labels.to_a
-
- issue.label_links.with_remove_on_close_labels.delete_all
- issue.labels.reset
-
- Issuable::CommonSystemNotesService.new(project: project, current_user: current_user).execute(
- issue,
- old_labels: old_labels
- )
- end
-
def close_external_issue(issue, closed_via)
return unless project.external_issue_tracker&.support_close_issue?
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index 1f4efeb1a8a..53f3dc39ba3 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -34,13 +34,26 @@ module Issues
# Add new items to Issues::AfterCreateService if they can be performed in Sidekiq
def after_create(issue)
- add_incident_label(issue)
user_agent_detail_service.create
resolve_discussions_with_issue(issue)
super
end
+ def handle_changes(issue, options)
+ super
+ old_associations = options.fetch(:old_associations, {})
+ old_assignees = old_associations.fetch(:assignees, [])
+
+ handle_assignee_changes(issue, old_assignees)
+ end
+
+ def handle_assignee_changes(issue, old_assignees)
+ return if issue.assignees == old_assignees
+
+ create_assignee_note(issue, old_assignees)
+ end
+
def resolve_discussions_with_issue(issue)
return if discussions_to_resolve.empty?
@@ -56,22 +69,6 @@ module Issues
def user_agent_detail_service
UserAgentDetailService.new(@issue, request)
end
-
- # Applies label "incident" (creates it if missing) to incident issues.
- # For use in "after" hooks only to ensure we are not appyling
- # labels prematurely.
- def add_incident_label(issue)
- return unless issue.incident?
-
- label = ::IncidentManagement::CreateIncidentLabelService
- .new(project, current_user)
- .execute
- .payload[:label]
-
- return if issue.label_ids.include?(label.id)
-
- issue.labels << label
- end
end
end
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index af5029f8364..cf2892bf413 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -20,17 +20,6 @@ module Issues
super
end
- override :filter_params
- def filter_params(issue)
- super
-
- # filter confidential in `Issues::UpdateService` and not in `IssuableBaseService#filter_params`
- # because we do allow users that cannot admin issues to set confidential flag when creating an issue
- unless can_admin_issuable?(issue)
- params.delete(:confidential)
- end
- end
-
def before_update(issue, skip_spam_check: false)
return if skip_spam_check
@@ -43,6 +32,7 @@ module Issues
end
def handle_changes(issue, options)
+ super
old_associations = options.fetch(:old_associations, {})
old_labels = old_associations.fetch(:labels, [])
old_mentioned_users = old_associations.fetch(:mentioned_users, [])
@@ -204,6 +194,16 @@ module Issues
def create_confidentiality_note(issue)
SystemNoteService.change_issue_confidentiality(issue, issue.project, current_user)
end
+
+ override :add_incident_label?
+ def add_incident_label?(issue)
+ issue.issue_type != params[:issue_type] && !issue.incident?
+ end
+
+ override :remove_incident_label?
+ def remove_incident_label?(issue)
+ issue.issue_type != params[:issue_type] && issue.incident?
+ end
end
end
diff --git a/app/services/issues/zoom_link_service.rb b/app/services/issues/zoom_link_service.rb
index ef48134dec4..1ce459aa7e6 100644
--- a/app/services/issues/zoom_link_service.rb
+++ b/app/services/issues/zoom_link_service.rb
@@ -47,11 +47,11 @@ module Issues
attr_reader :issue
def track_meeting_added_event
- ::Gitlab::Tracking.event('IncidentManagement::ZoomIntegration', 'add_zoom_meeting', label: 'Issue ID', value: issue.id)
+ ::Gitlab::Tracking.event('IncidentManagement::ZoomIntegration', 'add_zoom_meeting', label: 'Issue ID', value: issue.id, user: current_user, project: @project, namespace: @project.namespace)
end
def track_meeting_removed_event
- ::Gitlab::Tracking.event('IncidentManagement::ZoomIntegration', 'remove_zoom_meeting', label: 'Issue ID', value: issue.id)
+ ::Gitlab::Tracking.event('IncidentManagement::ZoomIntegration', 'remove_zoom_meeting', label: 'Issue ID', value: issue.id, user: current_user, project: @project, namespace: @project.namespace)
end
def add_zoom_meeting(link)
diff --git a/app/services/jira_import/users_importer.rb b/app/services/jira_import/users_importer.rb
index 3de165c1014..5b2f91efc38 100644
--- a/app/services/jira_import/users_importer.rb
+++ b/app/services/jira_import/users_importer.rb
@@ -31,21 +31,10 @@ module JiraImport
@users_mapper_service ||= user_mapper_service_factory
end
- def deployment_type
- # TODO: use project.jira_service.deployment_type value when https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37003 is merged
- @deployment_type ||= client.ServerInfo.all.deploymentType
- end
-
- def client
- @client ||= project.jira_service.client
- end
-
def user_mapper_service_factory
- # TODO: use deployment_type enum from jira service when https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37003 is merged
- case deployment_type.upcase
- when JiraService::DEPLOYMENT_TYPES[:server]
+ if project.jira_service.data_fields.deployment_server?
ServerUsersMapperService.new(user, project, start_at)
- when JiraService::DEPLOYMENT_TYPES[:cloud]
+ elsif project.jira_service.data_fields.deployment_cloud?
CloudUsersMapperService.new(user, project, start_at)
else
raise ArgumentError
diff --git a/app/services/mattermost/create_team_service.rb b/app/services/mattermost/create_team_service.rb
index 2cbcaaad5e1..9f6efab1e43 100644
--- a/app/services/mattermost/create_team_service.rb
+++ b/app/services/mattermost/create_team_service.rb
@@ -9,8 +9,8 @@ module Mattermost
def execute
# The user that creates the team will be Team Admin
- Mattermost::Team.new(current_user).create(@group.mattermost_team_params)
- rescue Mattermost::ClientError => e
+ ::Mattermost::Team.new(current_user).create(@group.mattermost_team_params)
+ rescue ::Mattermost::ClientError => e
@group.errors.add(:mattermost_team, e.message)
end
end
diff --git a/app/services/members/create_service.rb b/app/services/members/create_service.rb
index 7b81cc27635..5d3c4a5c54a 100644
--- a/app/services/members/create_service.rb
+++ b/app/services/members/create_service.rb
@@ -16,6 +16,7 @@ module Members
end
def execute
+ validate_invite_source!
validate_invites!
add_members
@@ -33,6 +34,10 @@ module Members
params[:user_ids]
end
+ def validate_invite_source!
+ raise ArgumentError, s_('AddMember|No invite source provided.') unless invite_source.present?
+ end
+
def validate_invites!
raise BlankInvitesError, blank_invites_message if invites.blank?
@@ -72,6 +77,23 @@ module Members
errors << "#{prefix}#{member.errors.full_messages.to_sentence}"
end
+ def after_execute(member:)
+ super
+
+ Gitlab::Tracking.event(self.class.name, 'create_member', label: invite_source, property: tracking_property(member), user: current_user)
+ end
+
+ def invite_source
+ params[:invite_source]
+ end
+
+ def tracking_property(member)
+ # ideally invites go down the invite service class instead, but there is nothing that limits an invite
+ # from being used in this class and if you send emails as a comma separated list to the api/members
+ # endpoint, it will support invites
+ member.invite? ? 'net_new_user' : 'existing_user'
+ end
+
def user_limit
limit = params.fetch(:limit, DEFAULT_INVITE_LIMIT)
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index e94274aff9d..7ebdbf94932 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -24,9 +24,42 @@ module MergeRequests
merge_request.project.execute_hooks(merge_data, :merge_request_hooks)
merge_request.project.execute_services(merge_data, :merge_request_hooks)
+ execute_external_hooks(merge_request, merge_data)
+
enqueue_jira_connect_messages_for(merge_request)
end
+ def execute_external_hooks(merge_request, merge_data)
+ # Implemented in EE
+ end
+
+ def handle_changes(merge_request, options)
+ old_associations = options.fetch(:old_associations, {})
+ old_assignees = old_associations.fetch(:assignees, [])
+ old_reviewers = old_associations.fetch(:reviewers, [])
+
+ handle_assignees_change(merge_request, old_assignees) if merge_request.assignees != old_assignees
+ handle_reviewers_change(merge_request, old_reviewers) if merge_request.reviewers != old_reviewers
+ end
+
+ def handle_assignees_change(merge_request, old_assignees)
+ MergeRequests::HandleAssigneesChangeService
+ .new(project: project, current_user: current_user)
+ .async_execute(merge_request, old_assignees)
+ end
+
+ def handle_reviewers_change(merge_request, old_reviewers)
+ affected_reviewers = (old_reviewers + merge_request.reviewers) - (old_reviewers & merge_request.reviewers)
+ create_reviewer_note(merge_request, old_reviewers)
+ notification_service.async.changed_reviewer_of_merge_request(merge_request, current_user, old_reviewers)
+ todo_service.reassigned_reviewable(merge_request, current_user, old_reviewers)
+ invalidate_cache_counts(merge_request, users: affected_reviewers.compact)
+
+ new_reviewers = merge_request.reviewers - old_reviewers
+ merge_request_activity_counter.track_users_review_requested(users: new_reviewers)
+ merge_request_activity_counter.track_reviewers_changed_action(user: current_user)
+ end
+
def cleanup_environments(merge_request)
Ci::StopEnvironmentsService.new(merge_request.source_project, current_user)
.execute_for_merge_request(merge_request)
diff --git a/app/services/merge_requests/handle_assignees_change_service.rb b/app/services/merge_requests/handle_assignees_change_service.rb
index 9ac386110f7..87cd6544406 100644
--- a/app/services/merge_requests/handle_assignees_change_service.rb
+++ b/app/services/merge_requests/handle_assignees_change_service.rb
@@ -8,7 +8,7 @@ module MergeRequests
merge_request.id,
current_user.id,
old_assignees.map(&:id),
- options
+ options.stringify_keys # see: https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1090
)
end
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index d5e2595a9c6..3a4e3ba38fd 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -27,7 +27,6 @@ module MergeRequests
merge_requests_for_source_branch.each do |mr|
outdate_suggestions(mr)
- refresh_pipelines_on_merge_requests(mr)
abort_auto_merges(mr)
mark_pending_todos_done(mr)
end
@@ -44,6 +43,8 @@ module MergeRequests
notify_about_push(mr)
mark_mr_as_draft_from_commits(mr)
execute_mr_web_hooks(mr)
+ # Run at the end of the loop to avoid any potential contention on the MR object
+ refresh_pipelines_on_merge_requests(mr)
merge_request_activity_counter.track_mr_including_ci_config(user: mr.author, merge_request: mr)
end
diff --git a/app/services/merge_requests/update_assignees_service.rb b/app/services/merge_requests/update_assignees_service.rb
index f99db35fd49..d52c1bbbcda 100644
--- a/app/services/merge_requests/update_assignees_service.rb
+++ b/app/services/merge_requests/update_assignees_service.rb
@@ -9,9 +9,11 @@ module MergeRequests
def execute(merge_request)
return merge_request unless current_user&.can?(:update_merge_request, merge_request)
- old_assignees = merge_request.assignees
+ old_assignees = merge_request.assignees.to_a
old_ids = old_assignees.map(&:id)
new_ids = new_assignee_ids(merge_request)
+
+ return merge_request if merge_request.errors.any?
return merge_request if new_ids.size != update_attrs[:assignee_ids].size
return merge_request if old_ids.to_set == new_ids.to_set # no-change
@@ -30,8 +32,11 @@ module MergeRequests
def new_assignee_ids(merge_request)
# prime the cache - prevent N+1 lookup during authorization loop.
- merge_request.project.team.max_member_access_for_user_ids(update_attrs[:assignee_ids])
- User.id_in(update_attrs[:assignee_ids]).map do |user|
+ user_ids = update_attrs[:assignee_ids]
+ return [] if user_ids.empty?
+
+ merge_request.project.team.max_member_access_for_user_ids(user_ids)
+ User.id_in(user_ids).map do |user|
if user.can?(:read_merge_request, merge_request)
user.id
else
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index b613d88aee4..af041de5596 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -15,6 +15,7 @@ module MergeRequests
end
def handle_changes(merge_request, options)
+ super
old_associations = options.fetch(:old_associations, {})
old_labels = old_associations.fetch(:labels, [])
old_mentioned_users = old_associations.fetch(:mentioned_users, [])
@@ -31,8 +32,6 @@ module MergeRequests
end
handle_target_branch_change(merge_request)
- handle_assignees_change(merge_request, old_assignees) if merge_request.assignees != old_assignees
- handle_reviewers_change(merge_request, old_reviewers) if merge_request.reviewers != old_reviewers
handle_milestone_change(merge_request)
handle_draft_status_change(merge_request, changed_fields)
@@ -50,7 +49,7 @@ module MergeRequests
#
if merge_request.previous_changes.include?('target_branch') ||
merge_request.previous_changes.include?('source_branch')
- merge_request.mark_as_unchecked
+ merge_request.mark_as_unchecked unless merge_request.unchecked?
end
end
@@ -220,24 +219,6 @@ module MergeRequests
end
end
- def handle_assignees_change(merge_request, old_assignees)
- MergeRequests::HandleAssigneesChangeService
- .new(project: project, current_user: current_user)
- .async_execute(merge_request, old_assignees)
- end
-
- def handle_reviewers_change(merge_request, old_reviewers)
- affected_reviewers = (old_reviewers + merge_request.reviewers) - (old_reviewers & merge_request.reviewers)
- create_reviewer_note(merge_request, old_reviewers)
- notification_service.async.changed_reviewer_of_merge_request(merge_request, current_user, old_reviewers)
- todo_service.reassigned_reviewable(merge_request, current_user, old_reviewers)
- invalidate_cache_counts(merge_request, users: affected_reviewers.compact)
-
- new_reviewers = merge_request.reviewers - old_reviewers
- merge_request_activity_counter.track_users_review_requested(users: new_reviewers)
- merge_request_activity_counter.track_reviewers_changed_action(user: current_user)
- end
-
def create_branch_change_note(issuable, branch_type, event_type, old_branch, new_branch)
SystemNoteService.change_branch(
issuable, issuable.project, current_user, branch_type, event_type,
@@ -293,7 +274,7 @@ module MergeRequests
def attempt_specialized_update_services(merge_request, attribute)
case attribute
- when :assignee_ids
+ when :assignee_ids, :assignee_id
assignees_service.execute(merge_request)
when :spend_time
add_time_spent_service.execute(merge_request)
diff --git a/app/services/namespace_settings/update_service.rb b/app/services/namespace_settings/update_service.rb
index de54eb87cc0..80f15f7cc22 100644
--- a/app/services/namespace_settings/update_service.rb
+++ b/app/services/namespace_settings/update_service.rb
@@ -14,6 +14,7 @@ module NamespaceSettings
def execute
validate_resource_access_token_creation_allowed_param
+ validate_prevent_sharing_groups_outside_hierarchy_param
if group.namespace_settings
group.namespace_settings.attributes = settings_params
@@ -32,6 +33,15 @@ module NamespaceSettings
group.namespace_settings.errors.add(:resource_access_token_creation_allowed, _('can only be changed by a group admin.'))
end
end
+
+ def validate_prevent_sharing_groups_outside_hierarchy_param
+ return if settings_params[:prevent_sharing_groups_outside_hierarchy].nil?
+
+ unless can?(current_user, :change_prevent_sharing_groups_outside_hierarchy, group)
+ settings_params.delete(:prevent_sharing_groups_outside_hierarchy)
+ group.namespace_settings.errors.add(:prevent_sharing_groups_outside_hierarchy, _('can only be changed by a group admin.'))
+ end
+ end
end
end
diff --git a/app/services/namespaces/in_product_marketing_emails_service.rb b/app/services/namespaces/in_product_marketing_emails_service.rb
index 61d5ed3bdf4..3461362b48c 100644
--- a/app/services/namespaces/in_product_marketing_emails_service.rb
+++ b/app/services/namespaces/in_product_marketing_emails_service.rb
@@ -4,17 +4,37 @@ module Namespaces
class InProductMarketingEmailsService
include Gitlab::Experimentation::GroupTypes
- INTERVAL_DAYS = [1, 5, 10].freeze
TRACKS = {
- create: :git_write,
- verify: :pipeline_created,
- trial: :trial_started,
- team: :user_added
+ create: {
+ interval_days: [1, 5, 10],
+ completed_actions: [:created],
+ incomplete_actions: [:git_write]
+ },
+ verify: {
+ interval_days: [1, 5, 10],
+ completed_actions: [:git_write],
+ incomplete_actions: [:pipeline_created]
+ },
+ trial: {
+ interval_days: [1, 5, 10],
+ completed_actions: [:git_write, :pipeline_created],
+ incomplete_actions: [:trial_started]
+ },
+ team: {
+ interval_days: [1, 5, 10],
+ completed_actions: [:git_write, :pipeline_created, :trial_started],
+ incomplete_actions: [:user_added]
+ },
+ experience: {
+ interval_days: [30],
+ completed_actions: [:created, :git_write],
+ incomplete_actions: []
+ }
}.freeze
def self.send_for_all_tracks_and_intervals
TRACKS.each_key do |track|
- INTERVAL_DAYS.each do |interval|
+ TRACKS[track][:interval_days].each do |interval|
new(track, interval).execute
end
end
@@ -69,7 +89,7 @@ module Namespaces
def groups_for_track
onboarding_progress_scope = OnboardingProgress
.completed_actions_with_latest_in_range(completed_actions, range)
- .incomplete_actions(incomplete_action)
+ .incomplete_actions(incomplete_actions)
# Filtering out sub-groups is a temporary fix to prevent calling
# `.root_ancestor` on groups that are not root groups.
@@ -103,6 +123,8 @@ module Namespaces
user.can?(:start_trial, group)
when :team
user.can?(:admin_group_member, group)
+ when :experience
+ true
end
end
@@ -111,8 +133,7 @@ module Namespaces
end
def completed_actions
- index = TRACKS.keys.index(track)
- index == 0 ? [:created] : TRACKS.values[0..index - 1]
+ TRACKS[track][:completed_actions]
end
def range
@@ -120,12 +141,12 @@ module Namespaces
date.beginning_of_day..date.end_of_day
end
- def incomplete_action
- TRACKS[track]
+ def incomplete_actions
+ TRACKS[track][:incomplete_actions]
end
def series
- INTERVAL_DAYS.index(interval)
+ TRACKS[track][:interval_days].index(interval)
end
def save_tracked_emails!
diff --git a/app/services/notification_recipients/builder/base.rb b/app/services/notification_recipients/builder/base.rb
index e8f783136cc..4f1bb0dc877 100644
--- a/app/services/notification_recipients/builder/base.rb
+++ b/app/services/notification_recipients/builder/base.rb
@@ -100,25 +100,6 @@ module NotificationRecipients
# Get project/group users with CUSTOM notification level
# rubocop: disable CodeReuse/ActiveRecord
def add_custom_notifications
- return new_add_custom_notifications if Feature.enabled?(:notification_setting_recipient_refactor, project, default_enabled: :yaml)
-
- user_ids = []
-
- # Users with a notification setting on group or project
- user_ids += user_ids_notifiable_on(project, :custom)
- user_ids += user_ids_notifiable_on(group, :custom)
-
- # Users with global level custom
- user_ids_with_project_level_global = user_ids_notifiable_on(project, :global)
- user_ids_with_group_level_global = user_ids_notifiable_on(group, :global)
-
- global_users_ids = user_ids_with_project_level_global.concat(user_ids_with_group_level_global)
- user_ids += user_ids_with_global_level_custom(global_users_ids, custom_action)
-
- add_recipients(user_scope.where(id: user_ids), :custom, nil)
- end
-
- def new_add_custom_notifications
notification_by_sources = related_notification_settings_sources(:custom)
return if notification_by_sources.blank?
@@ -172,22 +153,6 @@ module NotificationRecipients
# Get project users with WATCH notification level
# rubocop: disable CodeReuse/ActiveRecord
def project_watchers
- return new_project_watchers if Feature.enabled?(:notification_setting_recipient_refactor, project, default_enabled: :yaml)
-
- project_members_ids = user_ids_notifiable_on(project)
-
- user_ids_with_project_global = user_ids_notifiable_on(project, :global)
- user_ids_with_group_global = user_ids_notifiable_on(project.group, :global)
-
- user_ids = user_ids_with_global_level_watch((user_ids_with_project_global + user_ids_with_group_global).uniq)
-
- user_ids_with_project_setting = select_project_members_ids(user_ids_with_project_global, user_ids)
- user_ids_with_group_setting = select_group_members_ids(project.group, project_members_ids, user_ids_with_group_global, user_ids)
-
- user_scope.where(id: user_ids_with_project_setting.concat(user_ids_with_group_setting).uniq)
- end
-
- def new_project_watchers
notification_by_sources = related_notification_settings_sources(:watch)
return if notification_by_sources.blank?
@@ -200,16 +165,6 @@ module NotificationRecipients
# rubocop: disable CodeReuse/ActiveRecord
def group_watchers
- return new_group_watchers if Feature.enabled?(:notification_setting_recipient_refactor, project, default_enabled: :yaml)
-
- user_ids_with_group_global = user_ids_notifiable_on(group, :global)
- user_ids = user_ids_with_global_level_watch(user_ids_with_group_global)
- user_ids_with_group_setting = select_group_members_ids(group, [], user_ids_with_group_global, user_ids)
-
- user_scope.where(id: user_ids_with_group_setting)
- end
-
- def new_group_watchers
return [] unless group
user_ids = group
diff --git a/app/services/packages/debian/create_distribution_service.rb b/app/services/packages/debian/create_distribution_service.rb
index f947d2e4293..b4b1ec952ef 100644
--- a/app/services/packages/debian/create_distribution_service.rb
+++ b/app/services/packages/debian/create_distribution_service.rb
@@ -38,14 +38,19 @@ module Packages
append_errors(distribution)
return error unless errors.empty?
- distribution.transaction do
- if distribution.save
- create_components
- create_architectures
-
- success
- end
- end || error
+ result = distribution.transaction do
+ next unless distribution.save
+
+ create_components
+ create_architectures
+ success
+ end
+
+ result ||= error
+
+ ::Packages::Debian::GenerateDistributionWorker.perform_async(distribution.class.container_type, distribution.reset.id) if result.success?
+
+ result
end
def create_components
diff --git a/app/services/packages/debian/destroy_distribution_service.rb b/app/services/packages/debian/destroy_distribution_service.rb
deleted file mode 100644
index bef1127fece..00000000000
--- a/app/services/packages/debian/destroy_distribution_service.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-module Packages
- module Debian
- class DestroyDistributionService
- def initialize(distribution)
- @distribution = distribution
- end
-
- def execute
- destroy_distribution
- end
-
- private
-
- def destroy_distribution
- if @distribution.destroy
- success
- else
- error("Unable to destroy Debian #{@distribution.model_name.human.downcase}")
- end
- end
-
- def success
- ServiceResponse.success
- end
-
- def error(message)
- ServiceResponse.error(message: message, payload: { distribution: @distribution })
- end
- end
- end
-end
diff --git a/app/services/packages/debian/generate_distribution_service.rb b/app/services/packages/debian/generate_distribution_service.rb
index 67348af1a49..651325c49a0 100644
--- a/app/services/packages/debian/generate_distribution_service.rb
+++ b/app/services/packages/debian/generate_distribution_service.rb
@@ -6,6 +6,8 @@ module Packages
include Gitlab::Utils::StrongMemoize
include ExclusiveLeaseGuard
+ ONE_HOUR = 1.hour.freeze
+
# used by ExclusiveLeaseGuard
DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze
@@ -62,7 +64,7 @@ module Packages
def initialize(distribution)
@distribution = distribution
- @last_generated_at = nil
+ @oldest_kept_generated_at = nil
@md5sum = []
@sha256 = []
end
@@ -70,7 +72,10 @@ module Packages
def execute
try_obtain_lease do
@distribution.transaction do
- @last_generated_at = @distribution.component_files.maximum(:created_at)
+ # We consider `apt-get update` can take at most one hour
+ # We keep all generations younger than one hour
+ # and the previous generation
+ @oldest_kept_generated_at = @distribution.component_files.updated_before(release_date - ONE_HOUR).maximum(:updated_at)
generate_component_files
generate_release
destroy_old_component_files
@@ -96,7 +101,7 @@ module Packages
.with_debian_file_type(package_file_type)
.find_each
.map(&method(:package_stanza_from_fields))
- create_component_file(component, component_file_type, architecture, package_file_type, paragraphs.join("\n"))
+ reuse_or_create_component_file(component, component_file_type, architecture, paragraphs.join("\n"))
end
def package_stanza_from_fields(package_file)
@@ -127,17 +132,32 @@ module Packages
end
end
- def create_component_file(component, component_file_type, architecture, package_file_type, content)
- component_file = component.files.create!(
- file_type: component_file_type,
- architecture: architecture,
- compression_type: nil,
- file: CarrierWaveStringFile.new(content),
- file_md5: Digest::MD5.hexdigest(content),
- file_sha256: Digest::SHA256.hexdigest(content)
- )
- @md5sum.append(" #{component_file.file_md5} #{component_file.size.to_s.rjust(8)} #{component_file.relative_path}")
- @sha256.append(" #{component_file.file_sha256} #{component_file.size.to_s.rjust(8)} #{component_file.relative_path}")
+ def reuse_or_create_component_file(component, component_file_type, architecture, content)
+ file_md5 = Digest::MD5.hexdigest(content)
+ file_sha256 = Digest::SHA256.hexdigest(content)
+ component_file = component.files
+ .with_file_type(component_file_type)
+ .with_architecture(architecture)
+ .with_compression_type(nil)
+ .with_file_sha256(file_sha256)
+ .last
+
+ if component_file
+ component_file.touch(time: release_date)
+ else
+ component_file = component.files.create!(
+ updated_at: release_date,
+ file_type: component_file_type,
+ architecture: architecture,
+ compression_type: nil,
+ file: CarrierWaveStringFile.new(content),
+ file_md5: file_md5,
+ file_sha256: file_sha256
+ )
+ end
+
+ @md5sum.append(" #{file_md5} #{component_file.size.to_s.rjust(8)} #{component_file.relative_path}")
+ @sha256.append(" #{file_sha256} #{component_file.size.to_s.rjust(8)} #{component_file.relative_path}")
end
def generate_release
@@ -187,10 +207,9 @@ module Packages
end
def destroy_old_component_files
- # Only keep the last generation and one hour before
- return if @last_generated_at.nil?
+ return if @oldest_kept_generated_at.nil?
- @distribution.component_files.created_before(@last_generated_at - 1.hour).destroy_all # rubocop:disable Cop/DestroyAll
+ @distribution.component_files.updated_before(@oldest_kept_generated_at).destroy_all # rubocop:disable Cop/DestroyAll
end
# used by ExclusiveLeaseGuard
diff --git a/app/services/packages/debian/process_changes_service.rb b/app/services/packages/debian/process_changes_service.rb
index 881ad2c46f4..b6e81012656 100644
--- a/app/services/packages/debian/process_changes_service.rb
+++ b/app/services/packages/debian/process_changes_service.rb
@@ -25,6 +25,8 @@ module Packages
update_files_metadata
update_changes_metadata
end
+
+ ::Packages::Debian::GenerateDistributionWorker.perform_async(:project, package.debian_distribution.id)
end
end
diff --git a/app/services/packages/debian/update_distribution_service.rb b/app/services/packages/debian/update_distribution_service.rb
index 95face912d5..218167ecdc5 100644
--- a/app/services/packages/debian/update_distribution_service.rb
+++ b/app/services/packages/debian/update_distribution_service.rb
@@ -31,7 +31,7 @@ module Packages
end
def update_distribution
- distribution.transaction do
+ result = distribution.transaction do
if distribution.update(params)
update_components if components
update_architectures if architectures
@@ -41,7 +41,13 @@ module Packages
append_errors(distribution)
error
end
- end || error
+ end
+
+ result ||= error
+
+ ::Packages::Debian::GenerateDistributionWorker.perform_async(distribution.class.container_type, distribution.id) if result.success?
+
+ result
end
def update_components
diff --git a/app/services/packages/helm/extract_file_metadata_service.rb b/app/services/packages/helm/extract_file_metadata_service.rb
new file mode 100644
index 00000000000..e7373d8ea8f
--- /dev/null
+++ b/app/services/packages/helm/extract_file_metadata_service.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'rubygems/package'
+
+module Packages
+ module Helm
+ class ExtractFileMetadataService
+ ExtractionError = Class.new(StandardError)
+
+ def initialize(package_file)
+ @package_file = package_file
+ end
+
+ def execute
+ raise ExtractionError, 'invalid package file' unless valid_package_file?
+
+ metadata
+ end
+
+ private
+
+ def valid_package_file?
+ @package_file &&
+ @package_file.package&.helm? &&
+ @package_file.file.size > 0 # rubocop:disable Style/ZeroLengthPredicate
+ end
+
+ def metadata
+ YAML.safe_load(chart_yaml_content)
+ rescue Psych::Exception => e
+ raise ExtractionError, "Error while parsing Chart.yaml: #{e.message}"
+ end
+
+ def chart_yaml_content
+ @package_file.file.use_open_file do |file|
+ tar_reader = Gem::Package::TarReader.new(Zlib::GzipReader.new(file))
+
+ chart_yaml = tar_reader.find do |entry|
+ next unless entry.file?
+
+ entry.full_name.end_with?('/Chart.yaml')
+ end
+
+ raise ExtractionError, 'Chart.yaml not found within a directory' unless chart_yaml
+
+ chart_yaml.read
+ ensure
+ tar_reader.close
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/nuget/metadata_extraction_service.rb b/app/services/packages/nuget/metadata_extraction_service.rb
index dd5f1bcc936..63da98dde43 100644
--- a/app/services/packages/nuget/metadata_extraction_service.rb
+++ b/app/services/packages/nuget/metadata_extraction_service.rb
@@ -28,7 +28,7 @@ module Packages
def execute
raise ExtractionError, 'invalid package file' unless valid_package_file?
- extract_metadata(nuspec_file)
+ extract_metadata(nuspec_file_content)
end
private
@@ -39,6 +39,10 @@ module Packages
end
end
+ def project
+ package_file.package.project
+ end
+
def valid_package_file?
package_file &&
package_file.package&.nuget? &&
@@ -89,16 +93,21 @@ module Packages
tags.split(::Packages::Tag::NUGET_TAGS_SEPARATOR)
end
- def nuspec_file
- package_file.file.use_file do |file_path|
- Zip::File.open(file_path) do |zip_file|
- entry = zip_file.glob('*.nuspec').first
+ def nuspec_file_content
+ with_zip_file do |zip_file|
+ entry = zip_file.glob('*.nuspec').first
- raise ExtractionError, 'nuspec file not found' unless entry
- raise ExtractionError, 'nuspec file too big' if entry.size > MAX_FILE_SIZE
+ raise ExtractionError, 'nuspec file not found' unless entry
+ raise ExtractionError, 'nuspec file too big' if entry.size > MAX_FILE_SIZE
- entry.get_input_stream.read
- end
+ entry.get_input_stream.read
+ end
+ end
+
+ def with_zip_file(&block)
+ package_file.file.use_open_file do |open_file|
+ zip_file = Zip::File.new(open_file, false, true)
+ yield(zip_file)
end
end
end
diff --git a/app/services/pages/delete_service.rb b/app/services/pages/delete_service.rb
index c4009dcc4ec..f7d3d70aad6 100644
--- a/app/services/pages/delete_service.rb
+++ b/app/services/pages/delete_service.rb
@@ -3,8 +3,13 @@
module Pages
class DeleteService < BaseService
def execute
- project.mark_pages_as_not_deployed # prevents domain from updating config when deleted
- project.pages_domains.delete_all
+ project.mark_pages_as_not_deployed
+
+ # project.pages_domains.delete_all will just nullify project_id:
+ # > If no :dependent option is given, then it will follow the default
+ # > strategy for `has_many :through` associations.
+ # > The default strategy is :nullify which sets the foreign keys to NULL.
+ PagesDomain.for_project(project).delete_all
DestroyPagesDeploymentsWorker.perform_async(project.id)
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 97ea7d87545..7dd9280e5b1 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -11,6 +11,9 @@ module Projects
@initialize_with_readme = Gitlab::Utils.to_boolean(@params.delete(:initialize_with_readme))
@import_data = @params.delete(:import_data)
@relations_block = @params.delete(:relations_block)
+ @default_branch = @params.delete(:default_branch)
+
+ build_topics
end
def execute
@@ -128,20 +131,16 @@ module Projects
access_level: group_access_level)
end
- if Feature.enabled?(:specialized_project_authorization_workers, default_enabled: :yaml)
- AuthorizedProjectUpdate::ProjectCreateWorker.perform_async(@project.id)
- # AuthorizedProjectsWorker uses an exclusive lease per user but
- # specialized workers might have synchronization issues. Until we
- # compare the inconsistency rates of both approaches, we still run
- # AuthorizedProjectsWorker but with some delay and lower urgency as a
- # safety net.
- @project.group.refresh_members_authorized_projects(
- blocking: false,
- priority: UserProjectAccessChangedService::LOW_PRIORITY
- )
- else
- @project.group.refresh_members_authorized_projects(blocking: false)
- end
+ AuthorizedProjectUpdate::ProjectCreateWorker.perform_async(@project.id)
+ # AuthorizedProjectsWorker uses an exclusive lease per user but
+ # specialized workers might have synchronization issues. Until we
+ # compare the inconsistency rates of both approaches, we still run
+ # AuthorizedProjectsWorker but with some delay and lower urgency as a
+ # safety net.
+ @project.group.refresh_members_authorized_projects(
+ blocking: false,
+ priority: UserProjectAccessChangedService::LOW_PRIORITY
+ )
else
@project.add_maintainer(@project.namespace.owner, current_user: current_user)
end
@@ -149,7 +148,7 @@ module Projects
def create_readme
commit_attrs = {
- branch_name: @project.default_branch_or_main,
+ branch_name: @default_branch.presence || @project.default_branch_or_main,
commit_message: 'Initial commit',
file_path: 'README.md',
file_content: "# #{@project.name}\n\n#{@project.description}"
@@ -261,6 +260,14 @@ module Projects
.new(current_user, @project, project_params: { import_data: @import_data })
.level_restricted?
end
+
+ def build_topics
+ topics = params.delete(:topics)
+ tag_list = params.delete(:tag_list)
+ topic_list = topics || tag_list
+
+ params[:topic_list] ||= topic_list if topic_list
+ end
end
end
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index 0682f3013d4..e27ea7c07e5 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -116,7 +116,7 @@ module Projects
log_destroy_event
trash_relation_repositories!
trash_project_repositories!
- destroy_web_hooks! if Feature.enabled?(:destroy_webhooks_before_the_project, project, default_enabled: :yaml)
+ destroy_web_hooks!
# Rails attempts to load all related records into memory before
# destroying: https://github.com/rails/rails/issues/22510
diff --git a/app/services/projects/group_links/create_service.rb b/app/services/projects/group_links/create_service.rb
index d8fa2f36fcc..fc5c936b378 100644
--- a/app/services/projects/group_links/create_service.rb
+++ b/app/services/projects/group_links/create_service.rb
@@ -23,22 +23,18 @@ module Projects
private
def setup_authorizations(group, group_access = nil)
- if Feature.enabled?(:specialized_project_authorization_project_share_worker, default_enabled: :yaml)
- AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async(
- project.id, group.id, group_access)
+ AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async(
+ project.id, group.id, group_access)
- # AuthorizedProjectsWorker uses an exclusive lease per user but
- # specialized workers might have synchronization issues. Until we
- # compare the inconsistency rates of both approaches, we still run
- # AuthorizedProjectsWorker but with some delay and lower urgency as a
- # safety net.
- group.refresh_members_authorized_projects(
- blocking: false,
- priority: UserProjectAccessChangedService::LOW_PRIORITY
- )
- else
- group.refresh_members_authorized_projects(blocking: false)
- end
+ # AuthorizedProjectsWorker uses an exclusive lease per user but
+ # specialized workers might have synchronization issues. Until we
+ # compare the inconsistency rates of both approaches, we still run
+ # AuthorizedProjectsWorker but with some delay and lower urgency as a
+ # safety net.
+ group.refresh_members_authorized_projects(
+ blocking: false,
+ priority: UserProjectAccessChangedService::LOW_PRIORITY
+ )
end
end
end
diff --git a/app/services/projects/group_links/destroy_service.rb b/app/services/projects/group_links/destroy_service.rb
index bfe704cd780..01a5b617b46 100644
--- a/app/services/projects/group_links/destroy_service.rb
+++ b/app/services/projects/group_links/destroy_service.rb
@@ -13,9 +13,27 @@ module Projects
end
group_link.destroy.tap do |link|
- link.group.refresh_members_authorized_projects
+ if Feature.enabled?(:use_specialized_worker_for_project_auth_recalculation)
+ refresh_project_authorizations_asynchronously(link.project)
+
+ # Until we compare the inconsistency rates of the new specialized worker and
+ # the old approach, we still run AuthorizedProjectsWorker
+ # but with some delay and lower urgency as a safety net.
+ link.group.refresh_members_authorized_projects(
+ blocking: false,
+ priority: UserProjectAccessChangedService::LOW_PRIORITY
+ )
+ else
+ link.group.refresh_members_authorized_projects
+ end
end
end
+
+ private
+
+ def refresh_project_authorizations_asynchronously(project)
+ AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async(project.id)
+ end
end
end
end
diff --git a/app/services/projects/prometheus/alerts/notify_service.rb b/app/services/projects/prometheus/alerts/notify_service.rb
index db640a54745..e1eb1374d14 100644
--- a/app/services/projects/prometheus/alerts/notify_service.rb
+++ b/app/services/projects/prometheus/alerts/notify_service.rb
@@ -105,9 +105,9 @@ module Projects
cluster = alert.environment.deployment_platform&.cluster
return unless cluster&.enabled?
- return unless cluster.application_prometheus_available?
+ return unless cluster.integration_prometheus_available?
- cluster.application_prometheus || cluster.integration_prometheus
+ cluster.integration_prometheus
end
def find_alert(project, metric)
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index 541b333aae3..4351a66351d 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -8,6 +8,7 @@ module Projects
ValidationError = Class.new(StandardError)
def execute
+ build_topics
remove_unallowed_params
validate!
@@ -167,6 +168,14 @@ module Projects
project.repository_storage != new_repository_storage &&
can?(current_user, :change_repository_storage, project)
end
+
+ def build_topics
+ topics = params.delete(:topics)
+ tag_list = params.delete(:tag_list)
+ topic_list = topics || tag_list
+
+ params[:topic_list] ||= topic_list if topic_list
+ end
end
end
diff --git a/app/services/prometheus/create_default_alerts_service.rb b/app/services/prometheus/create_default_alerts_service.rb
index e59b0a8e8e3..eb8a9d45658 100644
--- a/app/services/prometheus/create_default_alerts_service.rb
+++ b/app/services/prometheus/create_default_alerts_service.rb
@@ -1,5 +1,6 @@
# frozen_string_literal: true
+# DEPRECATED: To be removed as part of https://gitlab.com/groups/gitlab-org/-/epics/5877
module Prometheus
class CreateDefaultAlertsService < BaseService
include Gitlab::Utils::StrongMemoize
@@ -53,12 +54,12 @@ module Prometheus
end
def schedule_prometheus_update
- return unless prometheus_application
+ return unless prometheus_adapter
- ::Clusters::Applications::ScheduleUpdateService.new(prometheus_application, project).execute
+ ::Clusters::Applications::ScheduleUpdateService.new(prometheus_adapter, project).execute
end
- def prometheus_application
+ def prometheus_adapter
environment.cluster_prometheus_adapter
end
diff --git a/app/services/resource_access_tokens/create_service.rb b/app/services/resource_access_tokens/create_service.rb
index 84f4478f20f..6ff8767a525 100644
--- a/app/services/resource_access_tokens/create_service.rb
+++ b/app/services/resource_access_tokens/create_service.rb
@@ -48,7 +48,7 @@ module ResourceAccessTokens
# since someone like a project maintainer does not inherently have the ability
# to create a new user in the system.
- Users::CreateService.new(current_user, default_user_params).execute(skip_authorization: true)
+ ::Users::AuthorizedCreateService.new(current_user, default_user_params).execute
end
def delete_failed_user(user)
diff --git a/app/services/search/project_service.rb b/app/services/search/project_service.rb
index 3181c0098cc..3e93346bfdf 100644
--- a/app/services/search/project_service.rb
+++ b/app/services/search/project_service.rb
@@ -8,8 +8,8 @@ module Search
attr_accessor :project, :current_user, :params
- def initialize(project, user, params)
- @project = project
+ def initialize(project_or_projects, user, params)
+ @project = project_or_projects
@current_user = user
@params = params.dup
end
diff --git a/app/services/search_service.rb b/app/services/search_service.rb
index 389cf17e115..cce7821a226 100644
--- a/app/services/search_service.rb
+++ b/app/services/search_service.rb
@@ -41,6 +41,10 @@ class SearchService
end
# rubocop: enable CodeReuse/ActiveRecord
+ def projects
+ # overridden in EE
+ end
+
def show_snippets?
return @show_snippets if defined?(@show_snippets)
diff --git a/app/services/security/ci_configuration/sast_parser_service.rb b/app/services/security/ci_configuration/sast_parser_service.rb
index 5220525d552..cae9a90f0a0 100644
--- a/app/services/security/ci_configuration/sast_parser_service.rb
+++ b/app/services/security/ci_configuration/sast_parser_service.rb
@@ -74,19 +74,11 @@ module Security
def sast_excluded_analyzers
strong_memoize(:sast_excluded_analyzers) do
- all_analyzers = Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS.split(', ') rescue []
- enabled_analyzers = sast_default_analyzers.split(',').map(&:strip) rescue []
-
excluded_analyzers = gitlab_ci_yml_attributes["SAST_EXCLUDED_ANALYZERS"] || sast_template_attributes["SAST_EXCLUDED_ANALYZERS"]
- excluded_analyzers = excluded_analyzers.split(',').map(&:strip) rescue []
- ((all_analyzers - enabled_analyzers) + excluded_analyzers).uniq
+ excluded_analyzers.split(',').map(&:strip) rescue []
end
end
- def sast_default_analyzers
- @sast_default_analyzers ||= gitlab_ci_yml_attributes["SAST_DEFAULT_ANALYZERS"] || sast_template_attributes["SAST_DEFAULT_ANALYZERS"]
- end
-
def sast_template_attributes
@sast_template_attributes ||= build_sast_attributes(sast_template_content)
end
@@ -109,17 +101,17 @@ module Security
yaml_result = Gitlab::Ci::YamlProcessor.new(content, options).execute
return {} unless yaml_result.valid?
- sast_attributes = yaml_result.build_attributes(:sast)
- extract_required_attributes(sast_attributes)
+ extract_required_attributes(yaml_result)
end
- def extract_required_attributes(attributes)
+ def extract_required_attributes(yaml_result)
result = {}
- attributes[:yaml_variables].each do |variable|
+
+ yaml_result.yaml_variables_for(:sast).each do |variable|
result[variable[:key]] = variable[:value]
end
- result[:stage] = attributes[:stage]
+ result[:stage] = yaml_result.stage_for(:sast)
result.with_indifferent_access
end
end
diff --git a/app/services/snippets/base_service.rb b/app/services/snippets/base_service.rb
index 415cfcb7d8f..1a04c4fcedd 100644
--- a/app/services/snippets/base_service.rb
+++ b/app/services/snippets/base_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Snippets
- class BaseService < ::BaseService
+ class BaseService < ::BaseProjectService
UPDATE_COMMIT_MSG = 'Update snippet'
INITIAL_COMMIT_MSG = 'Initial commit'
@@ -9,7 +9,7 @@ module Snippets
attr_reader :uploaded_assets, :snippet_actions
- def initialize(project, user = nil, params = {})
+ def initialize(project: nil, current_user: nil, params: {})
super
@uploaded_assets = Array(@params.delete(:files).presence)
@@ -20,7 +20,7 @@ module Snippets
private
- def visibility_allowed?(snippet, visibility_level)
+ def visibility_allowed?(visibility_level)
Gitlab::VisibilityLevel.allowed_for?(current_user, visibility_level)
end
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
index aadf9b865b8..8f1b481d307 100644
--- a/app/services/snippets/create_service.rb
+++ b/app/services/snippets/create_service.rb
@@ -12,7 +12,7 @@ module Snippets
return invalid_params_error(@snippet) unless valid_params?
- unless visibility_allowed?(snippet, snippet.visibility_level)
+ unless visibility_allowed?(snippet.visibility_level)
return forbidden_visibility_error(snippet)
end
diff --git a/app/services/snippets/update_service.rb b/app/services/snippets/update_service.rb
index 4088a08272d..8571bc9c869 100644
--- a/app/services/snippets/update_service.rb
+++ b/app/services/snippets/update_service.rb
@@ -14,7 +14,7 @@ module Snippets
return invalid_params_error(snippet) unless valid_params?
- if visibility_changed?(snippet) && !visibility_allowed?(snippet, visibility_level)
+ if visibility_changed?(snippet) && !visibility_allowed?(visibility_level)
return forbidden_visibility_error(snippet)
end
diff --git a/app/services/todo_service.rb b/app/services/todo_service.rb
index fc6543a8efc..71bb813f384 100644
--- a/app/services/todo_service.rb
+++ b/app/services/todo_service.rb
@@ -316,6 +316,8 @@ class TodoService
attributes.merge!(target_id: nil, commit_id: target.id)
elsif target.is_a?(Issue)
attributes[:issue_type] = target.issue_type
+ elsif target.is_a?(Discussion)
+ attributes.merge!(target_type: nil, target_id: nil, discussion: target)
end
attributes
diff --git a/app/services/user_project_access_changed_service.rb b/app/services/user_project_access_changed_service.rb
index 80490bd4c9a..f52502e0379 100644
--- a/app/services/user_project_access_changed_service.rb
+++ b/app/services/user_project_access_changed_service.rb
@@ -13,17 +13,20 @@ class UserProjectAccessChangedService
def execute(blocking: true, priority: HIGH_PRIORITY)
bulk_args = @user_ids.map { |id| [id] }
- if blocking
- AuthorizedProjectsWorker.bulk_perform_and_wait(bulk_args)
- else
- if priority == HIGH_PRIORITY
- AuthorizedProjectsWorker.bulk_perform_async(bulk_args) # rubocop:disable Scalability/BulkPerformWithContext
+ result =
+ if blocking
+ AuthorizedProjectsWorker.bulk_perform_and_wait(bulk_args)
else
- AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker.bulk_perform_in( # rubocop:disable Scalability/BulkPerformWithContext
- DELAY, bulk_args, batch_size: 100, batch_delay: 30.seconds)
+ if priority == HIGH_PRIORITY
+ AuthorizedProjectsWorker.bulk_perform_async(bulk_args) # rubocop:disable Scalability/BulkPerformWithContext
+ else
+ AuthorizedProjectUpdate::UserRefreshFromReplicaWorker.bulk_perform_in( # rubocop:disable Scalability/BulkPerformWithContext
+ DELAY, bulk_args, batch_size: 100, batch_delay: 30.seconds)
+ end
end
- end
+
+ ::Gitlab::Database::LoadBalancing::Sticking.bulk_stick(:user, @user_ids)
+
+ result
end
end
-
-UserProjectAccessChangedService.prepend_mod_with('UserProjectAccessChangedService')
diff --git a/app/services/users/activity_service.rb b/app/services/users/activity_service.rb
index c89a286cc8b..20594bec28d 100644
--- a/app/services/users/activity_service.rb
+++ b/app/services/users/activity_service.rb
@@ -17,7 +17,7 @@ module Users
def execute
return unless @user
- record_activity
+ ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes { record_activity }
end
private
@@ -37,5 +37,3 @@ module Users
end
end
end
-
-Users::ActivityService.prepend_mod
diff --git a/app/services/users/authorized_build_service.rb b/app/services/users/authorized_build_service.rb
new file mode 100644
index 00000000000..eb2386198d3
--- /dev/null
+++ b/app/services/users/authorized_build_service.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module Users
+ class AuthorizedBuildService < BuildService
+ extend ::Gitlab::Utils::Override
+
+ private
+
+ override :validate_access!
+ def validate_access!
+ # no-op
+ end
+
+ def signup_params
+ super + [:skip_confirmation]
+ end
+ end
+end
diff --git a/app/services/users/authorized_create_service.rb b/app/services/users/authorized_create_service.rb
new file mode 100644
index 00000000000..b6109f0c191
--- /dev/null
+++ b/app/services/users/authorized_create_service.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Users
+ class AuthorizedCreateService < CreateService
+ extend ::Gitlab::Utils::Override
+
+ private
+
+ override :build_class
+ def build_class
+ Users::AuthorizedBuildService
+ end
+ end
+end
diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb
index 649cf281ab0..ddb20a835e1 100644
--- a/app/services/users/build_service.rb
+++ b/app/services/users/build_service.rb
@@ -5,7 +5,6 @@ module Users
delegate :user_default_internal_regex_enabled?,
:user_default_internal_regex_instance,
to: :'Gitlab::CurrentSettings.current_application_settings'
- attr_reader :identity_params
def initialize(current_user, params = {})
@current_user = current_user
@@ -13,46 +12,128 @@ module Users
@identity_params = params.slice(*identity_attributes)
end
- def execute(skip_authorization: false)
- @skip_authorization = skip_authorization
+ def execute
+ build_user
+ build_identity
+ update_canonical_email
- raise Gitlab::Access::AccessDeniedError unless skip_authorization || can_create_user?
+ user
+ end
- user_params = build_user_params
- user = User.new(user_params)
+ private
- if current_user&.admin?
- @reset_token = user.generate_reset_token if params[:reset_password]
+ attr_reader :identity_params, :user_params, :user
- if user_params[:force_random_password]
- random_password = User.random_password
- user.password = user.password_confirmation = random_password
- end
+ def identity_attributes
+ [:extern_uid, :provider]
+ end
+
+ def build_user
+ if admin?
+ admin_build_user
+ else
+ standard_build_user
end
+ end
- build_identity(user)
+ def admin?
+ return false unless current_user
- Users::UpdateCanonicalEmailService.new(user: user).execute
+ current_user.admin?
+ end
- user
+ def admin_build_user
+ build_user_params_for_admin
+ init_user
+ password_reset
end
- private
+ def standard_build_user
+ # current_user non admin or nil
+ validate_access!
+ build_user_params_for_non_admin
+ init_user
+ end
- attr_reader :skip_authorization
+ def build_user_params_for_admin
+ @user_params = params.slice(*admin_create_params)
+ @user_params.merge!(force_random_password: true, password_expires_at: nil) if params[:reset_password]
+ end
- def identity_attributes
- [:extern_uid, :provider]
+ def init_user
+ assign_common_user_params
+
+ @user = User.new(user_params)
+ end
+
+ def assign_common_user_params
+ @user_params[:created_by_id] = current_user&.id
+ @user_params[:external] = user_external? if set_external_param?
+
+ @user_params.delete(:user_type) unless project_bot?
+ end
+
+ def set_external_param?
+ user_default_internal_regex_enabled? && !user_params.key?(:external)
+ end
+
+ def user_external?
+ user_default_internal_regex_instance.match(params[:email]).nil?
+ end
+
+ def project_bot?
+ user_params[:user_type]&.to_sym == :project_bot
+ end
+
+ def password_reset
+ @reset_token = user.generate_reset_token if params[:reset_password]
+
+ if user_params[:force_random_password]
+ random_password = User.random_password
+ @user.password = user.password_confirmation = random_password
+ end
+ end
+
+ def validate_access!
+ return if can_create_user?
+
+ raise Gitlab::Access::AccessDeniedError
+ end
+
+ def can_create_user?
+ current_user.nil? && Gitlab::CurrentSettings.allow_signup?
+ end
+
+ def build_user_params_for_non_admin
+ @user_params = params.slice(*signup_params)
+ @user_params[:skip_confirmation] = skip_user_confirmation_email_from_setting if assign_skip_confirmation_from_settings?
+ @user_params[:name] = fallback_name if use_fallback_name?
+ end
+
+ def assign_skip_confirmation_from_settings?
+ user_params[:skip_confirmation].nil?
end
- def build_identity(user)
+ def skip_user_confirmation_email_from_setting
+ !Gitlab::CurrentSettings.send_user_confirmation_email
+ end
+
+ def use_fallback_name?
+ user_params[:name].blank? && fallback_name.present?
+ end
+
+ def fallback_name
+ "#{user_params[:first_name]} #{user_params[:last_name]}"
+ end
+
+ def build_identity
return if identity_params.empty?
user.identities.build(identity_params)
end
- def can_create_user?
- (current_user.nil? && Gitlab::CurrentSettings.allow_signup?) || current_user&.admin?
+ def update_canonical_email
+ Users::UpdateCanonicalEmailService.new(user: user).execute
end
# Allowed params for creating a user (admins only)
@@ -96,69 +177,15 @@ module Users
def signup_params
[
:email,
- :password_automatically_set,
:name,
- :first_name,
- :last_name,
:password,
+ :password_automatically_set,
:username,
- :user_type
+ :user_type,
+ :first_name,
+ :last_name
]
end
-
- def build_user_params
- if current_user&.admin?
- user_params = params.slice(*admin_create_params)
-
- if params[:reset_password]
- user_params.merge!(force_random_password: true, password_expires_at: nil)
- end
- else
- allowed_signup_params = signup_params
- allowed_signup_params << :skip_confirmation if allow_caller_to_request_skip_confirmation?
-
- user_params = params.slice(*allowed_signup_params)
- if assign_skip_confirmation_from_settings?(user_params)
- user_params[:skip_confirmation] = skip_user_confirmation_email_from_setting
- end
-
- fallback_name = "#{user_params[:first_name]} #{user_params[:last_name]}"
-
- if user_params[:name].blank? && fallback_name.present?
- user_params = user_params.merge(name: fallback_name)
- end
- end
-
- user_params[:created_by_id] = current_user&.id
-
- if user_default_internal_regex_enabled? && !user_params.key?(:external)
- user_params[:external] = user_external?
- end
-
- user_params.delete(:user_type) unless project_bot?(user_params[:user_type])
-
- user_params
- end
-
- def allow_caller_to_request_skip_confirmation?
- skip_authorization
- end
-
- def assign_skip_confirmation_from_settings?(user_params)
- user_params[:skip_confirmation].nil?
- end
-
- def skip_user_confirmation_email_from_setting
- !Gitlab::CurrentSettings.send_user_confirmation_email
- end
-
- def user_external?
- user_default_internal_regex_instance.match(params[:email]).nil?
- end
-
- def project_bot?(user_type)
- user_type&.to_sym == :project_bot
- end
end
end
diff --git a/app/services/users/create_service.rb b/app/services/users/create_service.rb
index 757ebd783ee..591d88b275e 100644
--- a/app/services/users/create_service.rb
+++ b/app/services/users/create_service.rb
@@ -9,8 +9,8 @@ module Users
@params = params.dup
end
- def execute(skip_authorization: false)
- user = Users::BuildService.new(current_user, params).execute(skip_authorization: skip_authorization)
+ def execute
+ user = build_class.new(current_user, params).execute
reset_token = user.generate_reset_token if user.recently_sent_password_reset?
after_create_hook(user, reset_token) if user.save
@@ -23,6 +23,11 @@ module Users
def after_create_hook(user, reset_token)
notify_new_user(user, reset_token)
end
+
+ def build_class
+ # overridden by inheriting classes
+ Users::BuildService
+ end
end
end
diff --git a/app/services/users/refresh_authorized_projects_service.rb b/app/services/users/refresh_authorized_projects_service.rb
index d28ff45bfdf..1850fa9747d 100644
--- a/app/services/users/refresh_authorized_projects_service.rb
+++ b/app/services/users/refresh_authorized_projects_service.rb
@@ -24,11 +24,6 @@ module Users
@source = source
@incorrect_auth_found_callback = incorrect_auth_found_callback
@missing_auth_found_callback = missing_auth_found_callback
-
- # We need an up to date User object that has access to all relations that
- # may have been created earlier. The only way to ensure this is to reload
- # the User object.
- user.reset
end
def execute
@@ -43,6 +38,10 @@ module Users
end
begin
+ # We need an up to date User object that has access to all relations that
+ # may have been created earlier. The only way to ensure this is to reload
+ # the User object.
+ user.reset
execute_without_lease
ensure
Gitlab::ExclusiveLease.cancel(lease_key, uuid)
diff --git a/app/services/users/registrations_build_service.rb b/app/services/users/registrations_build_service.rb
index 9d7bf0a7e18..2d367e7b185 100644
--- a/app/services/users/registrations_build_service.rb
+++ b/app/services/users/registrations_build_service.rb
@@ -6,13 +6,12 @@ module Users
private
- override :allow_caller_to_request_skip_confirmation?
- def allow_caller_to_request_skip_confirmation?
- true
+ def signup_params
+ super + [:skip_confirmation]
end
override :assign_skip_confirmation_from_settings?
- def assign_skip_confirmation_from_settings?(user_params)
+ def assign_skip_confirmation_from_settings?
user_params[:skip_confirmation].blank?
end
end
diff --git a/app/services/users/update_assigned_open_issue_count_service.rb b/app/services/users/update_assigned_open_issue_count_service.rb
deleted file mode 100644
index 2ed05853b2f..00000000000
--- a/app/services/users/update_assigned_open_issue_count_service.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-module Users
- # Service class for calculating and caching the number of assigned open issues for a user.
- class UpdateAssignedOpenIssueCountService
- attr_accessor :target_user
-
- def initialize(target_user:)
- @target_user = target_user
-
- raise ArgumentError, "Please provide a target user" unless target_user.is_a?(User)
- end
-
- def execute
- value = calculate_count
- Rails.cache.write(cache_key, value, expires_in: User::COUNT_CACHE_VALIDITY_PERIOD)
-
- ServiceResponse.success(payload: { count: value })
- rescue StandardError => e
- ServiceResponse.error(message: e.message)
- end
-
- private
-
- def cache_key
- ['users', target_user.id, 'assigned_open_issues_count']
- end
-
- def calculate_count
- IssuesFinder.new(target_user, assignee_id: target_user.id, state: 'opened', non_archived: true).execute.count
- end
- end
-end
diff --git a/app/services/web_hook_service.rb b/app/services/web_hook_service.rb
index 654d9356739..77d2139b3d1 100644
--- a/app/services/web_hook_service.rb
+++ b/app/services/web_hook_service.rb
@@ -27,18 +27,19 @@ class WebHookService
REQUEST_BODY_SIZE_LIMIT = 25.megabytes
GITLAB_EVENT_HEADER = 'X-Gitlab-Event'
- MAX_FAILURES = 100
attr_accessor :hook, :data, :hook_name, :request_options
+ attr_reader :uniqueness_token
def self.hook_to_event(hook_name)
hook_name.to_s.singularize.titleize
end
- def initialize(hook, data, hook_name)
+ def initialize(hook, data, hook_name, uniqueness_token = nil)
@hook = hook
@data = data
@hook_name = hook_name.to_s
+ @uniqueness_token = uniqueness_token
@request_options = {
timeout: Gitlab.config.gitlab.webhook_timeout,
allow_local_requests: hook.allow_local_requests?
@@ -69,8 +70,7 @@ class WebHookService
http_status: response.code,
message: response.to_s
}
- rescue SocketError, OpenSSL::SSL::SSLError, Errno::ECONNRESET, Errno::ECONNREFUSED, Errno::EHOSTUNREACH,
- Net::OpenTimeout, Net::ReadTimeout, Gitlab::HTTP::BlockedUrlError, Gitlab::HTTP::RedirectionTooDeep,
+ rescue *Gitlab::HTTP::HTTP_ERRORS,
Gitlab::Json::LimitedEncoder::LimitExceeded, URI::InvalidURIError => e
execution_duration = Gitlab::Metrics::System.monotonic_time - start_time
log_execution(
@@ -91,9 +91,9 @@ class WebHookService
end
def async_execute
- if rate_limited?(hook)
- log_rate_limit(hook)
- else
+ Gitlab::ApplicationContext.with_context(hook.application_context) do
+ break log_rate_limit if rate_limited?
+
WebHookWorker.perform_async(hook.id, data, hook_name)
end
end
@@ -123,10 +123,8 @@ class WebHookService
end
def log_execution(trigger:, url:, request_data:, response:, execution_duration:, error_message: nil)
- handle_failure(response, hook)
-
- WebHookLog.create(
- web_hook: hook,
+ category = response_category(response)
+ log_data = {
trigger: trigger,
url: url,
execution_duration: execution_duration,
@@ -136,17 +134,19 @@ class WebHookService
response_body: safe_response_body(response),
response_status: response.code,
internal_error_message: error_message
- )
+ }
+
+ ::WebHooks::LogExecutionWorker
+ .perform_async(hook.id, log_data, category, uniqueness_token)
end
- def handle_failure(response, hook)
+ def response_category(response)
if response.success? || response.redirection?
- hook.enable!
+ :ok
elsif response.internal_server_error?
- next_backoff = hook.next_backoff
- hook.update!(disabled_until: next_backoff.from_now, backoff_count: hook.backoff_count + 1)
+ :error
else
- hook.update!(recent_failures: hook.recent_failures + 1) if hook.recent_failures < MAX_FAILURES
+ :failed
end
end
@@ -175,7 +175,7 @@ class WebHookService
response.body.encode('UTF-8', invalid: :replace, undef: :replace, replace: '')
end
- def rate_limited?(hook)
+ def rate_limited?
return false unless Feature.enabled?(:web_hooks_rate_limit, default_enabled: :yaml)
return false if rate_limit.nil?
@@ -190,18 +190,13 @@ class WebHookService
@rate_limit ||= hook.rate_limit
end
- def log_rate_limit(hook)
- payload = {
+ def log_rate_limit
+ Gitlab::AuthLogger.error(
message: 'Webhook rate limit exceeded',
hook_id: hook.id,
hook_type: hook.type,
- hook_name: hook_name
- }
-
- Gitlab::AuthLogger.error(payload)
-
- # Also log into application log for now, so we can use this information
- # to determine suitable limits for gitlab.com
- Gitlab::AppLogger.error(payload)
+ hook_name: hook_name,
+ **Gitlab::ApplicationContext.current
+ )
end
end
diff --git a/app/services/web_hooks/log_execution_service.rb b/app/services/web_hooks/log_execution_service.rb
new file mode 100644
index 00000000000..6e58e15f093
--- /dev/null
+++ b/app/services/web_hooks/log_execution_service.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module WebHooks
+ class LogExecutionService
+ attr_reader :hook, :log_data, :response_category
+
+ def initialize(hook:, log_data:, response_category:)
+ @hook = hook
+ @log_data = log_data
+ @response_category = response_category
+ end
+
+ def execute
+ update_hook_executability
+ log_execution
+ end
+
+ private
+
+ def log_execution
+ WebHookLog.create!(web_hook: hook, **log_data.transform_keys(&:to_sym))
+ end
+
+ def update_hook_executability
+ case response_category
+ when :ok
+ hook.enable!
+ when :error
+ hook.backoff!
+ when :failed
+ hook.failed!
+ end
+ end
+ end
+end