summaryrefslogtreecommitdiff
path: root/app/services
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-04-20 23:50:22 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2021-04-20 23:50:22 +0000
commit9dc93a4519d9d5d7be48ff274127136236a3adb3 (patch)
tree70467ae3692a0e35e5ea56bcb803eb512a10bedb /app/services
parent4b0f34b6d759d6299322b3a54453e930c6121ff0 (diff)
downloadgitlab-ce-9dc93a4519d9d5d7be48ff274127136236a3adb3.tar.gz
Add latest changes from gitlab-org/gitlab@13-11-stable-eev13.11.0-rc43
Diffstat (limited to 'app/services')
-rw-r--r--app/services/application_settings/base_service.rb4
-rw-r--r--app/services/authorized_project_update/find_records_due_for_refresh_service.rb92
-rw-r--r--app/services/base_container_service.rb4
-rw-r--r--app/services/base_service.rb4
-rw-r--r--app/services/boards/base_item_move_service.rb20
-rw-r--r--app/services/boards/base_service.rb4
-rw-r--r--app/services/boards/destroy_service.rb8
-rw-r--r--app/services/boards/issues/move_service.rb23
-rw-r--r--app/services/boards/lists/base_update_service.rb58
-rw-r--r--app/services/boards/lists/list_service.rb10
-rw-r--r--app/services/boards/lists/update_service.rb45
-rw-r--r--app/services/ci/abort_pipelines_service.rb32
-rw-r--r--app/services/ci/abort_project_pipelines_service.rb25
-rw-r--r--app/services/ci/after_requeue_job_service.rb28
-rw-r--r--app/services/ci/cancel_user_pipelines_service.rb18
-rw-r--r--app/services/ci/create_job_artifacts_service.rb172
-rw-r--r--app/services/ci/create_pipeline_service.rb3
-rw-r--r--app/services/ci/create_web_ide_terminal_service.rb3
-rw-r--r--app/services/ci/destroy_expired_job_artifacts_service.rb56
-rw-r--r--app/services/ci/disable_user_pipeline_schedules_service.rb11
-rw-r--r--app/services/ci/drop_pipeline_service.rb37
-rw-r--r--app/services/ci/generate_coverage_reports_service.rb8
-rw-r--r--app/services/ci/job_artifacts/create_service.rb174
-rw-r--r--app/services/ci/job_artifacts/destroy_all_expired_service.rb58
-rw-r--r--app/services/ci/job_artifacts/destroy_batch_service.rb74
-rw-r--r--app/services/ci/job_artifacts_destroy_batch_service.rb72
-rw-r--r--app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb (renamed from app/services/ci/pipeline_artifacts/destroy_expired_artifacts_service.rb)2
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb8
-rw-r--r--app/services/ci/pipeline_trigger_service.rb23
-rw-r--r--app/services/ci/play_bridge_service.rb4
-rw-r--r--app/services/ci/play_build_service.rb8
-rw-r--r--app/services/ci/process_build_service.rb25
-rw-r--r--app/services/ci/process_pipeline_service.rb2
-rw-r--r--app/services/ci/register_job_service.rb30
-rw-r--r--app/services/ci/retry_build_service.rb18
-rw-r--r--app/services/ci/retry_pipeline_service.rb2
-rw-r--r--app/services/ci/stop_environments_service.rb2
-rw-r--r--app/services/ci/test_failure_history_service.rb48
-rw-r--r--app/services/clusters/create_service.rb3
-rw-r--r--app/services/clusters/destroy_service.rb3
-rw-r--r--app/services/clusters/integrations/create_service.rb43
-rw-r--r--app/services/clusters/update_service.rb3
-rw-r--r--app/services/concerns/integrations/project_test_data.rb2
-rw-r--r--app/services/concerns/suggestible.rb2
-rw-r--r--app/services/deployments/link_merge_requests_service.rb28
-rw-r--r--app/services/draft_notes/base_service.rb4
-rw-r--r--app/services/git/wiki_push_service.rb4
-rw-r--r--app/services/git/wiki_push_service/change.rb4
-rw-r--r--app/services/groups/base_service.rb16
-rw-r--r--app/services/groups/count_service.rb49
-rw-r--r--app/services/groups/create_service.rb9
-rw-r--r--app/services/groups/group_links/create_service.rb2
-rw-r--r--app/services/groups/merge_requests_count_service.rb22
-rw-r--r--app/services/groups/nested_create_service.rb3
-rw-r--r--app/services/groups/open_issues_count_service.rb41
-rw-r--r--app/services/groups/update_service.rb12
-rw-r--r--app/services/issuable/bulk_update_service.rb17
-rw-r--r--app/services/issuable/destroy_service.rb34
-rw-r--r--app/services/issuable/process_assignees.rb9
-rw-r--r--app/services/issuable_base_service.rb74
-rw-r--r--app/services/issuable_links/create_service.rb8
-rw-r--r--app/services/issuable_links/destroy_service.rb8
-rw-r--r--app/services/issuable_links/list_service.rb3
-rw-r--r--app/services/issue_rebalancing_service.rb2
-rw-r--r--app/services/issues/after_create_service.rb13
-rw-r--r--app/services/issues/base_service.rb2
-rw-r--r--app/services/issues/create_service.rb8
-rw-r--r--app/services/issues/update_service.rb18
-rw-r--r--app/services/jira_connect_subscriptions/base_service.rb4
-rw-r--r--app/services/keys/base_service.rb3
-rw-r--r--app/services/keys/create_service.rb3
-rw-r--r--app/services/keys/expiry_notification_service.rb42
-rw-r--r--app/services/mattermost/create_team_service.rb3
-rw-r--r--app/services/members/create_service.rb111
-rw-r--r--app/services/members/invite_service.rb92
-rw-r--r--app/services/merge_requests/add_context_service.rb4
-rw-r--r--app/services/merge_requests/after_create_service.rb12
-rw-r--r--app/services/merge_requests/base_service.rb16
-rw-r--r--app/services/merge_requests/build_service.rb38
-rw-r--r--app/services/merge_requests/create_service.rb12
-rw-r--r--app/services/merge_requests/handle_assignees_change_service.rb43
-rw-r--r--app/services/merge_requests/merge_to_ref_service.rb8
-rw-r--r--app/services/merge_requests/migrate_external_diffs_service.rb2
-rw-r--r--app/services/merge_requests/push_options_handler_service.rb7
-rw-r--r--app/services/merge_requests/refresh_service.rb9
-rw-r--r--app/services/merge_requests/resolve_todos_service.rb28
-rw-r--r--app/services/merge_requests/retarget_chain_service.rb2
-rw-r--r--app/services/merge_requests/update_assignees_service.rb64
-rw-r--r--app/services/merge_requests/update_service.rb77
-rw-r--r--app/services/metrics/dashboard/annotations/create_service.rb3
-rw-r--r--app/services/metrics/dashboard/annotations/delete_service.rb3
-rw-r--r--app/services/metrics/dashboard/grafana_metric_embed_service.rb6
-rw-r--r--app/services/metrics/dashboard/panel_preview_service.rb4
-rw-r--r--app/services/metrics/users_starred_dashboards/create_service.rb4
-rw-r--r--app/services/metrics/users_starred_dashboards/delete_service.rb4
-rw-r--r--app/services/milestones/base_service.rb4
-rw-r--r--app/services/milestones/find_or_create_service.rb4
-rw-r--r--app/services/milestones/merge_requests_count_service.rb17
-rw-r--r--app/services/milestones/transfer_service.rb9
-rw-r--r--app/services/namespace_settings/update_service.rb13
-rw-r--r--app/services/namespaces/in_product_marketing_emails_service.rb47
-rw-r--r--app/services/notes/create_service.rb25
-rw-r--r--app/services/notification_recipients/builder/base.rb44
-rw-r--r--app/services/notification_recipients/builder/request_review.rb4
-rw-r--r--app/services/notification_service.rb16
-rw-r--r--app/services/packages/composer/composer_json_service.rb3
-rw-r--r--app/services/packages/composer/version_parser_service.rb3
-rw-r--r--app/services/packages/debian/create_distribution_service.rb3
-rw-r--r--app/services/packages/debian/extract_changes_metadata_service.rb112
-rw-r--r--app/services/packages/debian/extract_metadata_service.rb21
-rw-r--r--app/services/packages/debian/parse_debian822_service.rb2
-rw-r--r--app/services/packages/debian/process_changes_service.rb102
-rw-r--r--app/services/packages/debian/update_distribution_service.rb3
-rw-r--r--app/services/packages/go/create_package_service.rb70
-rw-r--r--app/services/packages/go/sync_packages_service.rb24
-rw-r--r--app/services/packages/maven/find_or_create_package_service.rb3
-rw-r--r--app/services/packages/maven/metadata/sync_service.rb15
-rw-r--r--app/services/packages/nuget/create_dependency_service.rb4
-rw-r--r--app/services/packages/rubygems/create_dependencies_service.rb44
-rw-r--r--app/services/packages/rubygems/create_gemspec_service.rb42
-rw-r--r--app/services/packages/rubygems/metadata_extraction_service.rb56
-rw-r--r--app/services/packages/rubygems/process_gem_service.rb124
-rw-r--r--app/services/pages/delete_service.rb2
-rw-r--r--app/services/pages/migrate_from_legacy_storage_service.rb43
-rw-r--r--app/services/pages/migrate_legacy_storage_to_deployment_service.rb17
-rw-r--r--app/services/pages/zip_directory_service.rb4
-rw-r--r--app/services/pod_logs/kubernetes_service.rb2
-rw-r--r--app/services/post_receive_service.rb2
-rw-r--r--app/services/projects/alerting/notify_service.rb2
-rw-r--r--app/services/projects/branches_by_mode_service.rb3
-rw-r--r--app/services/projects/create_from_template_service.rb3
-rw-r--r--app/services/projects/create_service.rb17
-rw-r--r--app/services/projects/destroy_service.rb4
-rw-r--r--app/services/projects/download_service.rb3
-rw-r--r--app/services/projects/gitlab_projects_import_service.rb4
-rw-r--r--app/services/projects/update_pages_configuration_service.rb2
-rw-r--r--app/services/projects/update_pages_service.rb15
-rw-r--r--app/services/projects/update_remote_mirror_service.rb14
-rw-r--r--app/services/prometheus/create_default_alerts_service.rb2
-rw-r--r--app/services/prometheus/proxy_service.rb4
-rw-r--r--app/services/prometheus/proxy_variable_substitution_service.rb3
-rw-r--r--app/services/releases/base_service.rb4
-rw-r--r--app/services/repositories/changelog_service.rb10
-rw-r--r--app/services/resource_access_tokens/create_service.rb2
-rw-r--r--app/services/resource_access_tokens/revoke_service.rb12
-rw-r--r--app/services/resource_events/base_synthetic_notes_builder_service.rb4
-rw-r--r--app/services/resource_events/change_labels_service.rb3
-rw-r--r--app/services/resource_events/change_state_service.rb3
-rw-r--r--app/services/search/global_service.rb3
-rw-r--r--app/services/search/project_service.rb4
-rw-r--r--app/services/snippets/create_service.rb2
-rw-r--r--app/services/snippets/update_service.rb2
-rw-r--r--app/services/spam/spam_action_service.rb32
-rw-r--r--app/services/spam/spam_params.rb8
-rw-r--r--app/services/submit_usage_ping_service.rb8
-rw-r--r--app/services/system_hooks_service.rb24
-rw-r--r--app/services/system_note_service.rb2
-rw-r--r--app/services/system_notes/alert_management_service.rb2
-rw-r--r--app/services/system_notes/commit_service.rb2
-rw-r--r--app/services/task_list_toggle_service.rb8
-rw-r--r--app/services/todo_service.rb25
-rw-r--r--app/services/todos/destroy/base_service.rb2
-rw-r--r--app/services/todos/destroy/destroyed_issuable_service.rb46
-rw-r--r--app/services/todos/destroy/entity_leave_service.rb15
-rw-r--r--app/services/todos/destroy/private_features_service.rb2
-rw-r--r--app/services/two_factor/base_service.rb3
-rw-r--r--app/services/upload_service.rb10
-rw-r--r--app/services/user_agent_detail_service.rb3
-rw-r--r--app/services/user_preferences/update_service.rb20
-rw-r--r--app/services/users/activity_service.rb2
-rw-r--r--app/services/users/batch_status_cleaner_service.rb2
-rw-r--r--app/services/users/refresh_authorized_projects_service.rb71
-rw-r--r--app/services/users/respond_to_terms_service.rb3
-rw-r--r--app/services/users/set_status_service.rb3
-rw-r--r--app/services/users/update_canonical_email_service.rb2
-rw-r--r--app/services/users/update_todo_count_cache_service.rb34
176 files changed, 2553 insertions, 1050 deletions
diff --git a/app/services/application_settings/base_service.rb b/app/services/application_settings/base_service.rb
index ebe067536ca..0929b30b7e9 100644
--- a/app/services/application_settings/base_service.rb
+++ b/app/services/application_settings/base_service.rb
@@ -3,7 +3,9 @@
module ApplicationSettings
class BaseService < ::BaseService
def initialize(application_setting, user, params = {})
- @application_setting, @current_user, @params = application_setting, user, params.dup
+ @application_setting = application_setting
+ @current_user = user
+ @params = params.dup
end
end
end
diff --git a/app/services/authorized_project_update/find_records_due_for_refresh_service.rb b/app/services/authorized_project_update/find_records_due_for_refresh_service.rb
new file mode 100644
index 00000000000..c4b18a26d0e
--- /dev/null
+++ b/app/services/authorized_project_update/find_records_due_for_refresh_service.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+module AuthorizedProjectUpdate
+ # Service for finding the authorized_projects records of a user that needs addition or removal.
+ #
+ # Usage:
+ #
+ # user = User.find_by(username: 'alice')
+ # service = AuthorizedProjectUpdate::FindRecordsDueForRefreshService.new(some_user)
+ # service.execute
+ class FindRecordsDueForRefreshService
+ def initialize(user, source: nil, incorrect_auth_found_callback: nil, missing_auth_found_callback: nil)
+ @user = user
+ @source = source
+ @incorrect_auth_found_callback = incorrect_auth_found_callback
+ @missing_auth_found_callback = missing_auth_found_callback
+ end
+
+ def execute
+ current = current_authorizations_per_project
+ fresh = fresh_access_levels_per_project
+
+ # Projects that have more than one authorizations associated with
+ # the user needs to be deleted.
+ # The correct authorization is added to the ``add`` array in the
+ # next stage.
+ remove = projects_with_duplicates
+ current.except!(*projects_with_duplicates)
+
+ remove |= current.each_with_object([]) do |(project_id, row), array|
+ # rows not in the new list or with a different access level should be
+ # removed.
+ if !fresh[project_id] || fresh[project_id] != row.access_level
+ if incorrect_auth_found_callback
+ incorrect_auth_found_callback.call(project_id, row.access_level)
+ end
+
+ array << row.project_id
+ end
+ end
+
+ add = fresh.each_with_object([]) do |(project_id, level), array|
+ # rows not in the old list or with a different access level should be
+ # added.
+ if !current[project_id] || current[project_id].access_level != level
+ if missing_auth_found_callback
+ missing_auth_found_callback.call(project_id, level)
+ end
+
+ array << [user.id, project_id, level]
+ end
+ end
+
+ [remove, add]
+ end
+
+ def needs_refresh?
+ remove, add = execute
+
+ remove.present? || add.present?
+ end
+
+ def fresh_access_levels_per_project
+ fresh_authorizations.each_with_object({}) do |row, hash|
+ hash[row.project_id] = row.access_level
+ end
+ end
+
+ def current_authorizations_per_project
+ current_authorizations.index_by(&:project_id)
+ end
+
+ def current_authorizations
+ @current_authorizations ||= user.project_authorizations.select(:project_id, :access_level)
+ end
+
+ def fresh_authorizations
+ Gitlab::ProjectAuthorizations.new(user).calculate
+ end
+
+ private
+
+ attr_reader :user, :source, :incorrect_auth_found_callback, :missing_auth_found_callback
+
+ def projects_with_duplicates
+ @projects_with_duplicates ||= current_authorizations
+ .group_by(&:project_id)
+ .select { |project_id, authorizations| authorizations.count > 1 }
+ .keys
+ end
+ end
+end
diff --git a/app/services/base_container_service.rb b/app/services/base_container_service.rb
index 56e4b8c908c..6852237dc25 100644
--- a/app/services/base_container_service.rb
+++ b/app/services/base_container_service.rb
@@ -7,6 +7,8 @@ class BaseContainerService
attr_reader :container, :current_user, :params
def initialize(container:, current_user: nil, params: {})
- @container, @current_user, @params = container, current_user, params.dup
+ @container = container
+ @current_user = current_user
+ @params = params.dup
end
end
diff --git a/app/services/base_service.rb b/app/services/base_service.rb
index b4c4b6980a8..20dfeb67815 100644
--- a/app/services/base_service.rb
+++ b/app/services/base_service.rb
@@ -15,7 +15,9 @@ class BaseService
attr_accessor :project, :current_user, :params
def initialize(project, user = nil, params = {})
- @project, @current_user, @params = project, user, params.dup
+ @project = project
+ @current_user = user
+ @params = params.dup
end
delegate :repository, to: :project
diff --git a/app/services/boards/base_item_move_service.rb b/app/services/boards/base_item_move_service.rb
index bf3e29df54b..28fb1e43043 100644
--- a/app/services/boards/base_item_move_service.rb
+++ b/app/services/boards/base_item_move_service.rb
@@ -22,6 +22,12 @@ module Boards
)
end
+ reposition_ids = move_between_ids(params)
+ if reposition_ids
+ attrs[:move_between_ids] = reposition_ids
+ attrs.merge!(reposition_parent)
+ end
+
attrs
end
@@ -63,10 +69,22 @@ module Boards
if moving_to_list.movable?
moving_from_list.label_id
else
- ::Label.ids_on_board(board.id)
+ board_label_ids
end
Array(label_ids).compact
end
+
+ def board_label_ids
+ ::Label.ids_on_board(board.id)
+ end
+
+ def move_between_ids(move_params)
+ ids = [move_params[:move_after_id], move_params[:move_before_id]]
+ .map(&:to_i)
+ .map { |m| m > 0 ? m : nil }
+
+ ids.any? ? ids : nil
+ end
end
end
diff --git a/app/services/boards/base_service.rb b/app/services/boards/base_service.rb
index 439a5c06223..83bb69b3822 100644
--- a/app/services/boards/base_service.rb
+++ b/app/services/boards/base_service.rb
@@ -6,7 +6,9 @@ module Boards
attr_accessor :parent, :current_user, :params
def initialize(parent, user, params = {})
- @parent, @current_user, @params = parent, user, params.dup
+ @parent = parent
+ @current_user = user
+ @params = params.dup
end
end
end
diff --git a/app/services/boards/destroy_service.rb b/app/services/boards/destroy_service.rb
index 8f3d4b58b7b..0b1cd61b119 100644
--- a/app/services/boards/destroy_service.rb
+++ b/app/services/boards/destroy_service.rb
@@ -3,7 +3,7 @@
module Boards
class DestroyService < Boards::BaseService
def execute(board)
- if parent.boards.size == 1
+ if boards.size == 1
return ServiceResponse.error(message: "The board could not be deleted, because the parent doesn't have any other boards.")
end
@@ -11,5 +11,11 @@ module Boards
ServiceResponse.success
end
+
+ private
+
+ def boards
+ parent.boards
+ end
end
end
diff --git a/app/services/boards/issues/move_service.rb b/app/services/boards/issues/move_service.rb
index 99374fa01ae..76ea57968b2 100644
--- a/app/services/boards/issues/move_service.rb
+++ b/app/services/boards/issues/move_service.rb
@@ -3,8 +3,6 @@
module Boards
module Issues
class MoveService < Boards::BaseItemMoveService
- extend ::Gitlab::Utils::Override
-
def execute_multiple(issues)
return execute_multiple_empty_result if issues.empty?
@@ -57,25 +55,8 @@ module Boards
::Issues::UpdateService.new(issue.project, current_user, issue_modification_params).execute(issue)
end
- override :issuable_params
- def issuable_params(issuable)
- attrs = super
-
- move_between_ids = move_between_ids(params)
- if move_between_ids
- attrs[:move_between_ids] = move_between_ids
- attrs[:board_group_id] = board.group&.id
- end
-
- attrs
- end
-
- def move_between_ids(move_params)
- ids = [move_params[:move_after_id], move_params[:move_before_id]]
- .map(&:to_i)
- .map { |m| m > 0 ? m : nil }
-
- ids.any? ? ids : nil
+ def reposition_parent
+ { board_group_id: board.group&.id }
end
end
end
diff --git a/app/services/boards/lists/base_update_service.rb b/app/services/boards/lists/base_update_service.rb
new file mode 100644
index 00000000000..faf58e405fc
--- /dev/null
+++ b/app/services/boards/lists/base_update_service.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Boards
+ module Lists
+ class BaseUpdateService < Boards::BaseService
+ def execute(list)
+ if execute_by_params(list)
+ success(list: list)
+ else
+ error(list.errors.messages, 422)
+ end
+ end
+
+ private
+
+ def execute_by_params(list)
+ update_preferences_result = update_preferences(list) if can_read?(list)
+ update_position_result = update_position(list) if can_admin?(list)
+
+ update_preferences_result || update_position_result
+ end
+
+ def update_preferences(list)
+ return unless preferences?
+
+ list.update_preferences_for(current_user, preferences)
+ end
+
+ def update_position(list)
+ return unless position?
+
+ move_service = Boards::Lists::MoveService.new(parent, current_user, params)
+
+ move_service.execute(list)
+ end
+
+ def preferences
+ { collapsed: Gitlab::Utils.to_boolean(params[:collapsed]) }
+ end
+
+ def preferences?
+ params.has_key?(:collapsed)
+ end
+
+ def position?
+ params.has_key?(:position)
+ end
+
+ def can_read?(list)
+ raise NotImplementedError
+ end
+
+ def can_admin?(list)
+ raise NotImplementedError
+ end
+ end
+ end
+end
diff --git a/app/services/boards/lists/list_service.rb b/app/services/boards/lists/list_service.rb
index 3c296cde51e..03d54a8c74c 100644
--- a/app/services/boards/lists/list_service.rb
+++ b/app/services/boards/lists/list_service.rb
@@ -23,12 +23,10 @@ module Boards
end
def hidden_lists_for(board)
- hidden = []
-
- hidden << ::List.list_types[:backlog] if board.hide_backlog_list
- hidden << ::List.list_types[:closed] if board.hide_closed_list
-
- hidden
+ [].tap do |hidden|
+ hidden << ::List.list_types[:backlog] if board.hide_backlog_list?
+ hidden << ::List.list_types[:closed] if board.hide_closed_list?
+ end
end
end
end
diff --git a/app/services/boards/lists/update_service.rb b/app/services/boards/lists/update_service.rb
index e2d9c371ca2..2e1a6592cd9 100644
--- a/app/services/boards/lists/update_service.rb
+++ b/app/services/boards/lists/update_service.rb
@@ -2,50 +2,7 @@
module Boards
module Lists
- class UpdateService < Boards::BaseService
- def execute(list)
- if execute_by_params(list)
- success(list: list)
- else
- error(list.errors.messages, 422)
- end
- end
-
- private
-
- def execute_by_params(list)
- update_preferences_result = update_preferences(list) if can_read?(list)
- update_position_result = update_position(list) if can_admin?(list)
-
- update_preferences_result || update_position_result
- end
-
- def update_preferences(list)
- return unless preferences?
-
- list.update_preferences_for(current_user, preferences)
- end
-
- def update_position(list)
- return unless position?
-
- move_service = Boards::Lists::MoveService.new(parent, current_user, params)
-
- move_service.execute(list)
- end
-
- def preferences
- { collapsed: Gitlab::Utils.to_boolean(params[:collapsed]) }
- end
-
- def preferences?
- params.has_key?(:collapsed)
- end
-
- def position?
- params.has_key?(:position)
- end
-
+ class UpdateService < Boards::Lists::BaseUpdateService
def can_read?(list)
Ability.allowed?(current_user, :read_issue_board_list, parent)
end
diff --git a/app/services/ci/abort_pipelines_service.rb b/app/services/ci/abort_pipelines_service.rb
new file mode 100644
index 00000000000..43734c4dd39
--- /dev/null
+++ b/app/services/ci/abort_pipelines_service.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Ci
+ class AbortPipelinesService
+ # NOTE: This call fails pipelines in bulk without running callbacks.
+ # Only for pipeline abandonment scenarios (examples: project delete)
+ def execute(pipelines, failure_reason)
+ pipelines.cancelable.each_batch(of: 100) do |pipeline_batch|
+ now = Time.current
+
+ basic_attributes = { status: :failed }
+ all_attributes = basic_attributes.merge(failure_reason: failure_reason, finished_at: now)
+
+ bulk_fail_for(Ci::Stage, pipeline_batch, basic_attributes)
+ bulk_fail_for(CommitStatus, pipeline_batch, all_attributes)
+
+ pipeline_batch.update_all(all_attributes)
+ end
+
+ ServiceResponse.success(message: 'Pipelines stopped')
+ end
+
+ private
+
+ def bulk_fail_for(klass, pipelines, attributes)
+ klass.in_pipelines(pipelines)
+ .cancelable
+ .in_batches(of: 150) # rubocop:disable Cop/InBatches
+ .update_all(attributes)
+ end
+ end
+end
diff --git a/app/services/ci/abort_project_pipelines_service.rb b/app/services/ci/abort_project_pipelines_service.rb
deleted file mode 100644
index 0b2fa9ed3c0..00000000000
--- a/app/services/ci/abort_project_pipelines_service.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- class AbortProjectPipelinesService
- # Danger: Cancels in bulk without callbacks
- # Only for pipeline abandonment scenarios (current example: project delete)
- def execute(project)
- return unless Feature.enabled?(:abort_deleted_project_pipelines, default_enabled: :yaml)
-
- pipelines = project.all_pipelines.cancelable
- bulk_abort!(pipelines, status: :canceled)
-
- ServiceResponse.success(message: 'Pipelines canceled')
- end
-
- private
-
- def bulk_abort!(pipelines, status:)
- pipelines.each_batch do |pipeline_batch|
- CommitStatus.in_pipelines(pipeline_batch).in_batches.update_all(status: status) # rubocop: disable Cop/InBatches
- pipeline_batch.update_all(status: status)
- end
- end
- end
-end
diff --git a/app/services/ci/after_requeue_job_service.rb b/app/services/ci/after_requeue_job_service.rb
new file mode 100644
index 00000000000..3858ee9d550
--- /dev/null
+++ b/app/services/ci/after_requeue_job_service.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module Ci
+ class AfterRequeueJobService < ::BaseService
+ def execute(processable)
+ process_subsequent_jobs(processable)
+ reset_ancestor_bridges(processable)
+ end
+
+ private
+
+ def process_subsequent_jobs(processable)
+ processable.pipeline.processables.skipped.after_stage(processable.stage_idx).find_each do |processable|
+ process(processable)
+ end
+ end
+
+ def reset_ancestor_bridges(processable)
+ processable.pipeline.reset_ancestor_bridges!
+ end
+
+ def process(processable)
+ Gitlab::OptimisticLocking.retry_lock(processable, name: 'ci_requeue_job') do |processable|
+ processable.process(current_user)
+ end
+ end
+ end
+end
diff --git a/app/services/ci/cancel_user_pipelines_service.rb b/app/services/ci/cancel_user_pipelines_service.rb
deleted file mode 100644
index 3d3a8032e8e..00000000000
--- a/app/services/ci/cancel_user_pipelines_service.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- class CancelUserPipelinesService
- # rubocop: disable CodeReuse/ActiveRecord
- # This is a bug with CodeReuse/ActiveRecord cop
- # https://gitlab.com/gitlab-org/gitlab/issues/32332
- def execute(user)
- # TODO: fix N+1 queries https://gitlab.com/gitlab-org/gitlab/-/issues/300685
- user.pipelines.cancelable.find_each(&:cancel_running)
-
- ServiceResponse.success(message: 'Pipeline canceled')
- rescue ActiveRecord::StaleObjectError
- ServiceResponse.error(message: 'Error canceling pipeline')
- end
- # rubocop: enable CodeReuse/ActiveRecord
- end
-end
diff --git a/app/services/ci/create_job_artifacts_service.rb b/app/services/ci/create_job_artifacts_service.rb
deleted file mode 100644
index f1fdc8e2490..00000000000
--- a/app/services/ci/create_job_artifacts_service.rb
+++ /dev/null
@@ -1,172 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- class CreateJobArtifactsService < ::BaseService
- include Gitlab::Utils::UsageData
-
- ArtifactsExistError = Class.new(StandardError)
-
- LSIF_ARTIFACT_TYPE = 'lsif'
- METRICS_REPORT_UPLOAD_EVENT_NAME = 'i_testing_metrics_report_artifact_uploaders'
-
- OBJECT_STORAGE_ERRORS = [
- Errno::EIO,
- Google::Apis::ServerError,
- Signet::RemoteServerError
- ].freeze
-
- def initialize(job)
- @job = job
- @project = job.project
- end
-
- def authorize(artifact_type:, filesize: nil)
- result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
- return result unless result[:status] == :success
-
- headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
-
- if lsif?(artifact_type)
- headers[:ProcessLsif] = true
- track_usage_event('i_source_code_code_intelligence', project.id)
- end
-
- success(headers: headers)
- end
-
- def execute(artifacts_file, params, metadata_file: nil)
- result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
- return result unless result[:status] == :success
-
- return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
-
- artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
- result = parse_artifact(artifact)
-
- track_artifact_uploader(artifact)
-
- return result unless result[:status] == :success
-
- persist_artifact(artifact, artifact_metadata, params)
- end
-
- private
-
- attr_reader :job, :project
-
- def validate_requirements(artifact_type:, filesize:)
- return too_large_error if too_large?(artifact_type, filesize)
-
- success
- end
-
- def too_large?(type, size)
- size > max_size(type) if size
- end
-
- def lsif?(type)
- type == LSIF_ARTIFACT_TYPE
- end
-
- def max_size(type)
- Ci::JobArtifact.max_artifact_size(type: type, project: project)
- end
-
- def forbidden_type_error(type)
- error("#{type} artifacts are forbidden", :forbidden)
- end
-
- def too_large_error
- error('file size has reached maximum size limit', :payload_too_large)
- end
-
- def build_artifact(artifacts_file, params, metadata_file)
- expire_in = params['expire_in'] ||
- Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
-
- artifact = Ci::JobArtifact.new(
- job_id: job.id,
- project: project,
- file: artifacts_file,
- file_type: params[:artifact_type],
- file_format: params[:artifact_format],
- file_sha256: artifacts_file.sha256,
- expire_in: expire_in)
-
- artifact_metadata = if metadata_file
- Ci::JobArtifact.new(
- job_id: job.id,
- project: project,
- file: metadata_file,
- file_type: :metadata,
- file_format: :gzip,
- file_sha256: metadata_file.sha256,
- expire_in: expire_in)
- end
-
- [artifact, artifact_metadata]
- end
-
- def parse_artifact(artifact)
- unless Feature.enabled?(:ci_synchronous_artifact_parsing, project, default_enabled: true)
- return success
- end
-
- case artifact.file_type
- when 'dotenv' then parse_dotenv_artifact(artifact)
- when 'cluster_applications' then parse_cluster_applications_artifact(artifact)
- else success
- end
- end
-
- def persist_artifact(artifact, artifact_metadata, params)
- Ci::JobArtifact.transaction do
- artifact.save!
- artifact_metadata&.save!
-
- # NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
- job.update_column(:artifacts_expire_at, artifact.expire_at)
- end
-
- success
- rescue ActiveRecord::RecordNotUnique => error
- track_exception(error, params)
- error('another artifact of the same type already exists', :bad_request)
- rescue *OBJECT_STORAGE_ERRORS => error
- track_exception(error, params)
- error(error.message, :service_unavailable)
- rescue => error
- track_exception(error, params)
- error(error.message, :bad_request)
- end
-
- def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
- existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
- return false unless existing_artifact
-
- existing_artifact.file_sha256 == artifacts_file.sha256
- end
-
- def track_exception(error, params)
- Gitlab::ErrorTracking.track_exception(error,
- job_id: job.id,
- project_id: job.project_id,
- uploading_type: params[:artifact_type]
- )
- end
-
- def track_artifact_uploader(artifact)
- return unless artifact.file_type == 'metrics'
-
- track_usage_event(METRICS_REPORT_UPLOAD_EVENT_NAME, @job.user_id)
- end
-
- def parse_dotenv_artifact(artifact)
- Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
- end
-
- def parse_cluster_applications_artifact(artifact)
- Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
- end
- end
-end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 0fd47e625fd..ca936307acc 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -96,7 +96,8 @@ module Ci
# rubocop: enable Metrics/ParameterLists
def execute!(*args, &block)
- source, params = args[0], Hash(args[1])
+ source = args[0]
+ params = Hash(args[1])
execute(source, **params, &block).tap do |pipeline|
unless pipeline.persisted?
diff --git a/app/services/ci/create_web_ide_terminal_service.rb b/app/services/ci/create_web_ide_terminal_service.rb
index 785d82094b9..3b89a599180 100644
--- a/app/services/ci/create_web_ide_terminal_service.rb
+++ b/app/services/ci/create_web_ide_terminal_service.rb
@@ -58,7 +58,8 @@ module Ci
builds: [terminal_build_seed]
}
- Gitlab::Ci::Pipeline::Seed::Stage.new(pipeline, attributes, [])
+ seed_context = Gitlab::Ci::Pipeline::Seed::Context.new(pipeline)
+ Gitlab::Ci::Pipeline::Seed::Stage.new(seed_context, attributes, [])
end
def terminal_build_seed
diff --git a/app/services/ci/destroy_expired_job_artifacts_service.rb b/app/services/ci/destroy_expired_job_artifacts_service.rb
deleted file mode 100644
index d91cfb3cc82..00000000000
--- a/app/services/ci/destroy_expired_job_artifacts_service.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- class DestroyExpiredJobArtifactsService
- include ::Gitlab::ExclusiveLeaseHelpers
- include ::Gitlab::LoopHelpers
-
- BATCH_SIZE = 100
- LOOP_TIMEOUT = 5.minutes
- LOOP_LIMIT = 1000
- EXCLUSIVE_LOCK_KEY = 'expired_job_artifacts:destroy:lock'
- LOCK_TIMEOUT = 6.minutes
-
- def initialize
- @removed_artifacts_count = 0
- end
-
- ##
- # Destroy expired job artifacts on GitLab instance
- #
- # This destroy process cannot run for more than 6 minutes. This is for
- # preventing multiple `ExpireBuildArtifactsWorker` CRON jobs run concurrently,
- # which is scheduled every 7 minutes.
- def execute
- in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
- destroy_job_artifacts_with_slow_iteration(Time.current)
- end
-
- @removed_artifacts_count
- end
-
- private
-
- def destroy_job_artifacts_with_slow_iteration(start_at)
- Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
- # For performance reasons, join with ci_pipelines after the batch is queried.
- # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
- artifacts = relation.unlocked
-
- service_response = destroy_batch_async(artifacts)
- @removed_artifacts_count += service_response[:destroyed_artifacts_count]
-
- break if loop_timeout?(start_at)
- break if index >= LOOP_LIMIT
- end
- end
-
- def destroy_batch_async(artifacts)
- Ci::JobArtifactsDestroyBatchService.new(artifacts).execute
- end
-
- def loop_timeout?(start_at)
- Time.current > start_at + LOOP_TIMEOUT
- end
- end
-end
diff --git a/app/services/ci/disable_user_pipeline_schedules_service.rb b/app/services/ci/disable_user_pipeline_schedules_service.rb
new file mode 100644
index 00000000000..6499fbba0ec
--- /dev/null
+++ b/app/services/ci/disable_user_pipeline_schedules_service.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Ci
+ class DisableUserPipelineSchedulesService
+ def execute(user)
+ Ci::PipelineSchedule.active.owned_by(user).each_batch do |relation|
+ relation.update_all(active: false)
+ end
+ end
+ end
+end
diff --git a/app/services/ci/drop_pipeline_service.rb b/app/services/ci/drop_pipeline_service.rb
new file mode 100644
index 00000000000..f510943251b
--- /dev/null
+++ b/app/services/ci/drop_pipeline_service.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Ci
+ class DropPipelineService
+ # execute service asynchronously for each cancelable pipeline
+ def execute_async_for_all(pipelines, failure_reason, context_user)
+ pipelines.cancelable.select(:id).find_in_batches do |pipelines_batch|
+ Ci::DropPipelineWorker.bulk_perform_async_with_contexts(
+ pipelines_batch,
+ arguments_proc: -> (pipeline) { [pipeline.id, failure_reason] },
+ context_proc: -> (_) { { user: context_user } }
+ )
+ end
+ end
+
+ def execute(pipeline, failure_reason, retries: 3)
+ Gitlab::OptimisticLocking.retry_lock(pipeline.cancelable_statuses, retries, name: 'ci_pipeline_drop_running') do |cancelables|
+ cancelables.find_in_batches do |batch|
+ preload_associations_for_drop(batch)
+
+ batch.each do |job|
+ job.drop(failure_reason)
+ end
+ end
+ end
+ end
+
+ private
+
+ def preload_associations_for_drop(builds_batch)
+ ActiveRecord::Associations::Preloader.new.preload( # rubocop: disable CodeReuse/ActiveRecord
+ builds_batch,
+ [:project, :pipeline, :metadata, :deployment, :taggings]
+ )
+ end
+ end
+end
diff --git a/app/services/ci/generate_coverage_reports_service.rb b/app/services/ci/generate_coverage_reports_service.rb
index b3aa7b3091b..4e6fbc5462a 100644
--- a/app/services/ci/generate_coverage_reports_service.rb
+++ b/app/services/ci/generate_coverage_reports_service.rb
@@ -15,7 +15,13 @@ module Ci
data: head_pipeline.pipeline_artifacts.find_by_file_type(:code_coverage).present.for_files(merge_request.new_paths)
}
rescue => e
- Gitlab::ErrorTracking.track_exception(e, project_id: project.id)
+ Gitlab::ErrorTracking.track_exception(
+ e,
+ project_id: project.id,
+ base_pipeline_id: base_pipeline&.id,
+ head_pipeline_id: head_pipeline&.id
+ )
+
{
status: :error,
key: key(base_pipeline, head_pipeline),
diff --git a/app/services/ci/job_artifacts/create_service.rb b/app/services/ci/job_artifacts/create_service.rb
new file mode 100644
index 00000000000..65752e56c64
--- /dev/null
+++ b/app/services/ci/job_artifacts/create_service.rb
@@ -0,0 +1,174 @@
+# frozen_string_literal: true
+
+module Ci
+ module JobArtifacts
+ class CreateService < ::BaseService
+ include Gitlab::Utils::UsageData
+
+ ArtifactsExistError = Class.new(StandardError)
+
+ LSIF_ARTIFACT_TYPE = 'lsif'
+ METRICS_REPORT_UPLOAD_EVENT_NAME = 'i_testing_metrics_report_artifact_uploaders'
+
+ OBJECT_STORAGE_ERRORS = [
+ Errno::EIO,
+ Google::Apis::ServerError,
+ Signet::RemoteServerError
+ ].freeze
+
+ def initialize(job)
+ @job = job
+ @project = job.project
+ end
+
+ def authorize(artifact_type:, filesize: nil)
+ result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
+ return result unless result[:status] == :success
+
+ headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
+
+ if lsif?(artifact_type)
+ headers[:ProcessLsif] = true
+ track_usage_event('i_source_code_code_intelligence', project.id)
+ end
+
+ success(headers: headers)
+ end
+
+ def execute(artifacts_file, params, metadata_file: nil)
+ result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
+ return result unless result[:status] == :success
+
+ return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
+
+ artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
+ result = parse_artifact(artifact)
+
+ track_artifact_uploader(artifact)
+
+ return result unless result[:status] == :success
+
+ persist_artifact(artifact, artifact_metadata, params)
+ end
+
+ private
+
+ attr_reader :job, :project
+
+ def validate_requirements(artifact_type:, filesize:)
+ return too_large_error if too_large?(artifact_type, filesize)
+
+ success
+ end
+
+ def too_large?(type, size)
+ size > max_size(type) if size
+ end
+
+ def lsif?(type)
+ type == LSIF_ARTIFACT_TYPE
+ end
+
+ def max_size(type)
+ Ci::JobArtifact.max_artifact_size(type: type, project: project)
+ end
+
+ def forbidden_type_error(type)
+ error("#{type} artifacts are forbidden", :forbidden)
+ end
+
+ def too_large_error
+ error('file size has reached maximum size limit', :payload_too_large)
+ end
+
+ def build_artifact(artifacts_file, params, metadata_file)
+ expire_in = params['expire_in'] ||
+ Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
+
+ artifact = Ci::JobArtifact.new(
+ job_id: job.id,
+ project: project,
+ file: artifacts_file,
+ file_type: params[:artifact_type],
+ file_format: params[:artifact_format],
+ file_sha256: artifacts_file.sha256,
+ expire_in: expire_in)
+
+ artifact_metadata = if metadata_file
+ Ci::JobArtifact.new(
+ job_id: job.id,
+ project: project,
+ file: metadata_file,
+ file_type: :metadata,
+ file_format: :gzip,
+ file_sha256: metadata_file.sha256,
+ expire_in: expire_in)
+ end
+
+ [artifact, artifact_metadata]
+ end
+
+ def parse_artifact(artifact)
+ unless Feature.enabled?(:ci_synchronous_artifact_parsing, project, default_enabled: true)
+ return success
+ end
+
+ case artifact.file_type
+ when 'dotenv' then parse_dotenv_artifact(artifact)
+ when 'cluster_applications' then parse_cluster_applications_artifact(artifact)
+ else success
+ end
+ end
+
+ def persist_artifact(artifact, artifact_metadata, params)
+ Ci::JobArtifact.transaction do
+ artifact.save!
+ artifact_metadata&.save!
+
+ # NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
+ job.update_column(:artifacts_expire_at, artifact.expire_at)
+ end
+
+ success
+ rescue ActiveRecord::RecordNotUnique => error
+ track_exception(error, params)
+ error('another artifact of the same type already exists', :bad_request)
+ rescue *OBJECT_STORAGE_ERRORS => error
+ track_exception(error, params)
+ error(error.message, :service_unavailable)
+ rescue => error
+ track_exception(error, params)
+ error(error.message, :bad_request)
+ end
+
+ def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
+ existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
+ return false unless existing_artifact
+
+ existing_artifact.file_sha256 == artifacts_file.sha256
+ end
+
+ def track_exception(error, params)
+ Gitlab::ErrorTracking.track_exception(error,
+ job_id: job.id,
+ project_id: job.project_id,
+ uploading_type: params[:artifact_type]
+ )
+ end
+
+ def track_artifact_uploader(artifact)
+ return unless artifact.file_type == 'metrics'
+
+ track_usage_event(METRICS_REPORT_UPLOAD_EVENT_NAME, @job.user_id)
+ end
+
+ def parse_dotenv_artifact(artifact)
+ Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
+ end
+
+ def parse_cluster_applications_artifact(artifact)
+ Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
+ end
+ end
+ end
+end
diff --git a/app/services/ci/job_artifacts/destroy_all_expired_service.rb b/app/services/ci/job_artifacts/destroy_all_expired_service.rb
new file mode 100644
index 00000000000..3e9cc95d135
--- /dev/null
+++ b/app/services/ci/job_artifacts/destroy_all_expired_service.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Ci
+ module JobArtifacts
+ class DestroyAllExpiredService
+ include ::Gitlab::ExclusiveLeaseHelpers
+ include ::Gitlab::LoopHelpers
+
+ BATCH_SIZE = 100
+ LOOP_TIMEOUT = 5.minutes
+ LOOP_LIMIT = 1000
+ EXCLUSIVE_LOCK_KEY = 'expired_job_artifacts:destroy:lock'
+ LOCK_TIMEOUT = 6.minutes
+
+ def initialize
+ @removed_artifacts_count = 0
+ end
+
+ ##
+ # Destroy expired job artifacts on GitLab instance
+ #
+ # This destroy process cannot run for more than 6 minutes. This is for
+ # preventing multiple `ExpireBuildArtifactsWorker` CRON jobs run concurrently,
+ # which is scheduled every 7 minutes.
+ def execute
+ in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
+ destroy_job_artifacts_with_slow_iteration(Time.current)
+ end
+
+ @removed_artifacts_count
+ end
+
+ private
+
+ def destroy_job_artifacts_with_slow_iteration(start_at)
+ Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
+ # For performance reasons, join with ci_pipelines after the batch is queried.
+ # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
+ artifacts = relation.unlocked
+
+ service_response = destroy_batch_async(artifacts)
+ @removed_artifacts_count += service_response[:destroyed_artifacts_count]
+
+ break if loop_timeout?(start_at)
+ break if index >= LOOP_LIMIT
+ end
+ end
+
+ def destroy_batch_async(artifacts)
+ Ci::JobArtifacts::DestroyBatchService.new(artifacts).execute
+ end
+
+ def loop_timeout?(start_at)
+ Time.current > start_at + LOOP_TIMEOUT
+ end
+ end
+ end
+end
diff --git a/app/services/ci/job_artifacts/destroy_batch_service.rb b/app/services/ci/job_artifacts/destroy_batch_service.rb
new file mode 100644
index 00000000000..95315dd11ec
--- /dev/null
+++ b/app/services/ci/job_artifacts/destroy_batch_service.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+module Ci
+ module JobArtifacts
+ class DestroyBatchService
+ include BaseServiceUtility
+ include ::Gitlab::Utils::StrongMemoize
+
+ # Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts
+ # Not for use outside of the Ci:: namespace
+
+ # Adds the passed batch of job artifacts to the `ci_deleted_objects` table
+ # for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService`
+ # and then deletes the batch of related `ci_job_artifacts` records.
+ # Params:
+ # +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE)
+ # +pick_up_at+:: When to pick up for deletion of files
+ # Returns:
+ # +Hash+:: A hash with status and destroyed_artifacts_count keys
+ def initialize(job_artifacts, pick_up_at: nil)
+ @job_artifacts = job_artifacts.with_destroy_preloads.to_a
+ @pick_up_at = pick_up_at
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def execute
+ return success(destroyed_artifacts_count: artifacts_count) if @job_artifacts.empty?
+
+ Ci::DeletedObject.transaction do
+ Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
+ Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
+ destroy_related_records(@job_artifacts)
+ end
+
+ # This is executed outside of the transaction because it depends on Redis
+ update_project_statistics
+ increment_monitoring_statistics(artifacts_count)
+
+ success(destroyed_artifacts_count: artifacts_count)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ private
+
+ # This method is implemented in EE and it must do only database work
+ def destroy_related_records(artifacts); end
+
+ def update_project_statistics
+ artifacts_by_project = @job_artifacts.group_by(&:project)
+ artifacts_by_project.each do |project, artifacts|
+ delta = -artifacts.sum { |artifact| artifact.size.to_i }
+ ProjectStatistics.increment_statistic(
+ project, Ci::JobArtifact.project_statistics_name, delta)
+ end
+ end
+
+ def increment_monitoring_statistics(size)
+ metrics.increment_destroyed_artifacts(size)
+ end
+
+ def metrics
+ @metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new
+ end
+
+ def artifacts_count
+ strong_memoize(:artifacts_count) do
+ @job_artifacts.count
+ end
+ end
+ end
+ end
+end
+
+Ci::JobArtifacts::DestroyBatchService.prepend_if_ee('EE::Ci::JobArtifacts::DestroyBatchService')
diff --git a/app/services/ci/job_artifacts_destroy_batch_service.rb b/app/services/ci/job_artifacts_destroy_batch_service.rb
deleted file mode 100644
index f8ece27fe86..00000000000
--- a/app/services/ci/job_artifacts_destroy_batch_service.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- class JobArtifactsDestroyBatchService
- include BaseServiceUtility
- include ::Gitlab::Utils::StrongMemoize
-
- # Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts
- # Not for use outsie of the ci namespace
-
- # Adds the passed batch of job artifacts to the `ci_deleted_objects` table
- # for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService`
- # and then deletes the batch of related `ci_job_artifacts` records.
- # Params:
- # +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE)
- # +pick_up_at+:: When to pick up for deletion of files
- # Returns:
- # +Hash+:: A hash with status and destroyed_artifacts_count keys
- def initialize(job_artifacts, pick_up_at: nil)
- @job_artifacts = job_artifacts.with_destroy_preloads.to_a
- @pick_up_at = pick_up_at
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def execute
- return success(destroyed_artifacts_count: artifacts_count) if @job_artifacts.empty?
-
- Ci::DeletedObject.transaction do
- Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
- Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
- destroy_related_records(@job_artifacts)
- end
-
- # This is executed outside of the transaction because it depends on Redis
- update_project_statistics
- increment_monitoring_statistics(artifacts_count)
-
- success(destroyed_artifacts_count: artifacts_count)
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- private
-
- # This method is implemented in EE and it must do only database work
- def destroy_related_records(artifacts); end
-
- def update_project_statistics
- artifacts_by_project = @job_artifacts.group_by(&:project)
- artifacts_by_project.each do |project, artifacts|
- delta = -artifacts.sum { |artifact| artifact.size.to_i }
- ProjectStatistics.increment_statistic(
- project, Ci::JobArtifact.project_statistics_name, delta)
- end
- end
-
- def increment_monitoring_statistics(size)
- metrics.increment_destroyed_artifacts(size)
- end
-
- def metrics
- @metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new
- end
-
- def artifacts_count
- strong_memoize(:artifacts_count) do
- @job_artifacts.count
- end
- end
- end
-end
-
-Ci::JobArtifactsDestroyBatchService.prepend_if_ee('EE::Ci::JobArtifactsDestroyBatchService')
diff --git a/app/services/ci/pipeline_artifacts/destroy_expired_artifacts_service.rb b/app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb
index 0dbabe178da..fed40aef697 100644
--- a/app/services/ci/pipeline_artifacts/destroy_expired_artifacts_service.rb
+++ b/app/services/ci/pipeline_artifacts/destroy_all_expired_service.rb
@@ -2,7 +2,7 @@
module Ci
module PipelineArtifacts
- class DestroyExpiredArtifactsService
+ class DestroyAllExpiredService
include ::Gitlab::LoopHelpers
include ::Gitlab::Utils::StrongMemoize
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
index 35818e2cf3d..883a70c9795 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
@@ -91,17 +91,17 @@ module Ci
def all_statuses_by_id
strong_memoize(:all_statuses_by_id) do
- all_statuses.map do |row|
+ all_statuses.to_h do |row|
[row[:id], row]
- end.to_h
+ end
end
end
def all_statuses_by_name
strong_memoize(:statuses_by_name) do
- all_statuses.map do |row|
+ all_statuses.to_h do |row|
[row[:name], row]
- end.to_h
+ end
end
end
diff --git a/app/services/ci/pipeline_trigger_service.rb b/app/services/ci/pipeline_trigger_service.rb
index dbbaefb2b2f..a5f70d62e13 100644
--- a/app/services/ci/pipeline_trigger_service.rb
+++ b/app/services/ci/pipeline_trigger_service.rb
@@ -6,8 +6,10 @@ module Ci
def execute
if trigger_from_token
+ set_application_context_from_trigger(trigger_from_token)
create_pipeline_from_trigger(trigger_from_token)
elsif job_from_token
+ set_application_context_from_job(job_from_token)
create_pipeline_from_job(job_from_token)
end
@@ -73,11 +75,7 @@ module Ci
end
def variables
- if ::Feature.enabled?(:ci_trigger_payload_into_pipeline, project, default_enabled: :yaml)
- param_variables + [payload_variable]
- else
- param_variables
- end
+ param_variables + [payload_variable]
end
def param_variables
@@ -91,5 +89,20 @@ module Ci
value: params.except(*PAYLOAD_VARIABLE_HIDDEN_PARAMS).to_json,
variable_type: :file }
end
+
+ def set_application_context_from_trigger(trigger)
+ Gitlab::ApplicationContext.push(
+ user: trigger.owner,
+ project: trigger.project
+ )
+ end
+
+ def set_application_context_from_job(job)
+ Gitlab::ApplicationContext.push(
+ user: job.user,
+ project: job.project,
+ runner: job.runner
+ )
+ end
end
end
diff --git a/app/services/ci/play_bridge_service.rb b/app/services/ci/play_bridge_service.rb
index 70c4a8e6136..c5b19a3963a 100644
--- a/app/services/ci/play_bridge_service.rb
+++ b/app/services/ci/play_bridge_service.rb
@@ -8,6 +8,10 @@ module Ci
bridge.tap do |bridge|
bridge.user = current_user
bridge.enqueue!
+
+ next unless ::Feature.enabled?(:ci_fix_pipeline_status_for_dag_needs_manual, project, default_enabled: :yaml)
+
+ AfterRequeueJobService.new(project, current_user).execute(bridge)
end
end
end
diff --git a/app/services/ci/play_build_service.rb b/app/services/ci/play_build_service.rb
index ebc980a9053..4953b1ea5fc 100644
--- a/app/services/ci/play_build_service.rb
+++ b/app/services/ci/play_build_service.rb
@@ -12,7 +12,13 @@ module Ci
# Try to enqueue the build, otherwise create a duplicate.
#
if build.enqueue
- build.tap { |action| action.update(user: current_user, job_variables_attributes: job_variables_attributes || []) }
+ build.tap do |build|
+ build.update(user: current_user, job_variables_attributes: job_variables_attributes || [])
+
+ next unless ::Feature.enabled?(:ci_fix_pipeline_status_for_dag_needs_manual, project, default_enabled: :yaml)
+
+ AfterRequeueJobService.new(project, current_user).execute(build)
+ end
else
Ci::Build.retry(build, current_user)
end
diff --git a/app/services/ci/process_build_service.rb b/app/services/ci/process_build_service.rb
index 733aa75f255..73cf3308fe7 100644
--- a/app/services/ci/process_build_service.rb
+++ b/app/services/ci/process_build_service.rb
@@ -26,14 +26,6 @@ module Ci
end
def valid_statuses_for_build(build)
- if ::Feature.enabled?(:skip_dag_manual_and_delayed_jobs, build.project, default_enabled: :yaml)
- current_valid_statuses_for_build(build)
- else
- legacy_valid_statuses_for_build(build)
- end
- end
-
- def current_valid_statuses_for_build(build)
case build.when
when 'on_success', 'manual', 'delayed'
build.scheduling_type_dag? ? %w[success] : %w[success skipped]
@@ -45,23 +37,6 @@ module Ci
[]
end
end
-
- def legacy_valid_statuses_for_build(build)
- case build.when
- when 'on_success'
- build.scheduling_type_dag? ? %w[success] : %w[success skipped]
- when 'on_failure'
- %w[failed]
- when 'always'
- %w[success failed skipped]
- when 'manual'
- %w[success skipped]
- when 'delayed'
- %w[success skipped]
- else
- []
- end
- end
end
end
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index 970652b4da3..6c69df0c616 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -19,7 +19,7 @@ module Ci
end
def metrics
- @metrics ||= ::Gitlab::Ci::Pipeline::Metrics.new
+ @metrics ||= ::Gitlab::Ci::Pipeline::Metrics
end
private
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index ed9e44d60f1..90341b26fd6 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -10,7 +10,11 @@ module Ci
Result = Struct.new(:build, :build_json, :valid?)
- MAX_QUEUE_DEPTH = 50
+ ##
+ # The queue depth limit number has been determined by observing 95
+ # percentile of effective queue depth on gitlab.com. This is only likely to
+ # affect 5% of the worst case scenarios.
+ MAX_QUEUE_DEPTH = 45
def initialize(runner)
@runner = runner
@@ -20,7 +24,7 @@ module Ci
def execute(params = {})
@metrics.increment_queue_operation(:queue_attempt)
- @metrics.observe_queue_time do
+ @metrics.observe_queue_time(:process, @runner.runner_type) do
process_queue(params)
end
end
@@ -105,22 +109,30 @@ module Ci
builds = builds.queued_before(params[:job_age].seconds.ago)
end
- if Feature.enabled?(:ci_register_job_service_one_by_one, runner)
- build_ids = builds.pluck(:id)
+ if Feature.enabled?(:ci_register_job_service_one_by_one, runner, default_enabled: true)
+ build_ids = retrieve_queue(-> { builds.pluck(:id) })
- @metrics.observe_queue_size(-> { build_ids.size })
+ @metrics.observe_queue_size(-> { build_ids.size }, @runner.runner_type)
build_ids.each do |build_id|
yield Ci::Build.find(build_id)
end
else
- @metrics.observe_queue_size(-> { builds.to_a.size })
+ builds_array = retrieve_queue(-> { builds.to_a })
- builds.each(&blk)
+ @metrics.observe_queue_size(-> { builds_array.size }, @runner.runner_type)
+
+ builds_array.each(&blk)
end
end
# rubocop: enable CodeReuse/ActiveRecord
+ def retrieve_queue(queue_query_proc)
+ @metrics.observe_queue_time(:retrieve, @runner.runner_type) do
+ queue_query_proc.call
+ end
+ end
+
def process_build(build, params)
unless build.pending?
@metrics.increment_queue_operation(:build_not_pending)
@@ -171,7 +183,7 @@ module Ci
def max_queue_depth
@max_queue_depth ||= begin
- if Feature.enabled?(:gitlab_ci_builds_queue_limit, runner, default_enabled: false)
+ if Feature.enabled?(:gitlab_ci_builds_queue_limit, runner, default_enabled: true)
MAX_QUEUE_DEPTH
else
::Gitlab::Database::MAX_INT_VALUE
@@ -266,7 +278,7 @@ module Ci
# Workaround for weird Rails bug, that makes `runner.groups.to_sql` to return `runner_id = NULL`
groups = ::Group.joins(:runner_namespaces).merge(runner.runner_namespaces)
- hierarchy_groups = Gitlab::ObjectHierarchy.new(groups).base_and_descendants
+ hierarchy_groups = Gitlab::ObjectHierarchy.new(groups, options: { use_distinct: Feature.enabled?(:use_distinct_in_register_job_object_hierarchy) }).base_and_descendants
projects = Project.where(namespace_id: hierarchy_groups)
.with_group_runners_enabled
.with_builds_enabled
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index b2c5249a0c7..e3de7f43fda 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -2,8 +2,6 @@
module Ci
class RetryBuildService < ::BaseService
- include Gitlab::OptimisticLocking
-
def self.clone_accessors
%i[pipeline project ref tag options name
allow_failure stage stage_id stage_idx trigger_request
@@ -16,12 +14,10 @@ module Ci
build.ensure_scheduling_type!
reprocess!(build).tap do |new_build|
- mark_subsequent_stages_as_processable(build)
- build.pipeline.reset_ancestor_bridges!
-
Gitlab::OptimisticLocking.retry_lock(new_build, name: 'retry_build', &:enqueue)
+ AfterRequeueJobService.new(project, current_user).execute(build)
- MergeRequests::AddTodoWhenBuildFailsService
+ ::MergeRequests::AddTodoWhenBuildFailsService
.new(project, current_user)
.close(new_build)
end
@@ -33,9 +29,9 @@ module Ci
raise Gitlab::Access::AccessDeniedError
end
- attributes = self.class.clone_accessors.map do |attribute|
+ attributes = self.class.clone_accessors.to_h do |attribute|
[attribute, build.public_send(attribute)] # rubocop:disable GitlabSecurity/PublicSend
- end.to_h
+ end
attributes[:user] = current_user
@@ -65,12 +61,6 @@ module Ci
end
build
end
-
- def mark_subsequent_stages_as_processable(build)
- build.pipeline.processables.skipped.after_stage(build.stage_idx).find_each do |skipped|
- retry_optimistic_lock(skipped, name: 'ci_retry_build_mark_subsequent_stages') { |build| build.process(current_user) }
- end
- end
end
end
diff --git a/app/services/ci/retry_pipeline_service.rb b/app/services/ci/retry_pipeline_service.rb
index 90ee7b9b3ba..bb8590a769c 100644
--- a/app/services/ci/retry_pipeline_service.rb
+++ b/app/services/ci/retry_pipeline_service.rb
@@ -28,7 +28,7 @@ module Ci
pipeline.reset_ancestor_bridges!
- MergeRequests::AddTodoWhenBuildFailsService
+ ::MergeRequests::AddTodoWhenBuildFailsService
.new(project, current_user)
.close_all(pipeline)
diff --git a/app/services/ci/stop_environments_service.rb b/app/services/ci/stop_environments_service.rb
index b6c5b398cb1..81457130fa0 100644
--- a/app/services/ci/stop_environments_service.rb
+++ b/app/services/ci/stop_environments_service.rb
@@ -35,7 +35,7 @@ module Ci
private
def environments
- @environments ||= EnvironmentsFinder
+ @environments ||= EnvironmentsByDeploymentsFinder
.new(project, current_user, ref: @ref, recently_updated: true)
.execute
end
diff --git a/app/services/ci/test_failure_history_service.rb b/app/services/ci/test_failure_history_service.rb
index 61fda79a4a2..58bbc716ff0 100644
--- a/app/services/ci/test_failure_history_service.rb
+++ b/app/services/ci/test_failure_history_service.rb
@@ -34,7 +34,7 @@ module Ci
# We fetch for up to MAX_TRACKABLE_FAILURES + 1 builds. So if ever we get
# 201 total number of builds with the assumption that each job has at least
- # 1 failed test case, then we have at least 201 failed test cases which exceeds
+ # 1 failed unit test, then we have at least 201 failed unit tests which exceeds
# the MAX_TRACKABLE_FAILURES of 200. If this is the case, let's early exit so we
# don't have to parse each JUnit report of each of the 201 builds.
failed_builds.length <= MAX_TRACKABLE_FAILURES
@@ -51,25 +51,29 @@ module Ci
end
def track_failures
- failed_test_cases = gather_failed_test_cases(failed_builds)
+ failed_unit_tests = gather_failed_unit_tests_from_reports(failed_builds)
- return if failed_test_cases.size > MAX_TRACKABLE_FAILURES
+ return if failed_unit_tests.size > MAX_TRACKABLE_FAILURES
- failed_test_cases.keys.each_slice(100) do |key_hashes|
- Ci::TestCase.transaction do
- ci_test_cases = Ci::TestCase.find_or_create_by_batch(project, key_hashes)
- failures = test_case_failures(ci_test_cases, failed_test_cases)
+ failed_unit_tests.each_slice(100) do |batch|
+ Ci::UnitTest.transaction do
+ unit_test_attrs = ci_unit_test_attrs(batch)
+ ci_unit_tests = Ci::UnitTest.find_or_create_by_batch(project, unit_test_attrs)
- Ci::TestCaseFailure.insert_all(failures)
+ failures = ci_unit_test_failure_attrs(ci_unit_tests, failed_unit_tests)
+ Ci::UnitTestFailure.insert_all(failures)
end
end
end
- def gather_failed_test_cases(failed_builds)
- failed_builds.each_with_object({}) do |build, failed_test_cases|
+ def gather_failed_unit_tests_from_reports(failed_builds)
+ failed_builds.each_with_object({}) do |build, failed_unit_tests|
test_suite = generate_test_suite!(build)
- test_suite.failed.keys.each do |key|
- failed_test_cases[key] = build
+ test_suite.failed.each do |key, unit_test|
+ failed_unit_tests[key] = {
+ build: build, # This will be used in ci_unit_test_failure_attrs
+ unit_test: unit_test # This will be used in ci_unit_test_attrs
+ }
end
end
end
@@ -79,12 +83,24 @@ module Ci
build.collect_test_reports!(Gitlab::Ci::Reports::TestReports.new)
end
- def test_case_failures(ci_test_cases, failed_test_cases)
- ci_test_cases.map do |test_case|
- build = failed_test_cases[test_case.key_hash]
+ def ci_unit_test_attrs(batch)
+ batch.map do |item|
+ unit_test = item.last[:unit_test]
{
- test_case_id: test_case.id,
+ key_hash: unit_test.key,
+ name: unit_test.name,
+ suite_name: unit_test.suite_name
+ }
+ end
+ end
+
+ def ci_unit_test_failure_attrs(ci_unit_tests, failed_unit_tests)
+ ci_unit_tests.map do |ci_unit_test|
+ build = failed_unit_tests[ci_unit_test.key_hash][:build]
+
+ {
+ unit_test_id: ci_unit_test.id,
build_id: build.id,
failed_at: build.finished_at
}
diff --git a/app/services/clusters/create_service.rb b/app/services/clusters/create_service.rb
index 6693a58683f..cb2de8b943c 100644
--- a/app/services/clusters/create_service.rb
+++ b/app/services/clusters/create_service.rb
@@ -5,7 +5,8 @@ module Clusters
attr_reader :current_user, :params
def initialize(user = nil, params = {})
- @current_user, @params = user, params.dup
+ @current_user = user
+ @params = params.dup
end
def execute(access_token: nil)
diff --git a/app/services/clusters/destroy_service.rb b/app/services/clusters/destroy_service.rb
index a8de04683fa..371f947add7 100644
--- a/app/services/clusters/destroy_service.rb
+++ b/app/services/clusters/destroy_service.rb
@@ -5,7 +5,8 @@ module Clusters
attr_reader :current_user, :params
def initialize(user = nil, params = {})
- @current_user, @params = user, params.dup
+ @current_user = user
+ @params = params.dup
@response = {}
end
diff --git a/app/services/clusters/integrations/create_service.rb b/app/services/clusters/integrations/create_service.rb
new file mode 100644
index 00000000000..f9e9dd3e457
--- /dev/null
+++ b/app/services/clusters/integrations/create_service.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Integrations
+ class CreateService < BaseContainerService
+ attr_accessor :cluster
+
+ def initialize(container:, cluster:, current_user: nil, params: {})
+ @cluster = cluster
+
+ super(container: container, current_user: current_user, params: params)
+ end
+
+ def execute
+ return ServiceResponse.error(message: 'Unauthorized') unless authorized?
+
+ integration.enabled = params[:enabled]
+ integration.save!
+
+ if integration.enabled?
+ ServiceResponse.success(message: s_('ClusterIntegration|Integration enabled'), payload: { integration: integration })
+ else
+ ServiceResponse.success(message: s_('ClusterIntegration|Integration disabled'), payload: { integration: integration })
+ end
+ end
+
+ private
+
+ def integration
+ case params[:application_type]
+ when 'prometheus'
+ cluster.find_or_build_integration_prometheus
+ else
+ raise ArgumentError, "invalid application_type: #{params[:application_type]}"
+ end
+ end
+
+ def authorized?
+ Ability.allowed?(current_user, :admin_cluster, cluster)
+ end
+ end
+ end
+end
diff --git a/app/services/clusters/update_service.rb b/app/services/clusters/update_service.rb
index ba20826848d..5432d9fbca1 100644
--- a/app/services/clusters/update_service.rb
+++ b/app/services/clusters/update_service.rb
@@ -5,7 +5,8 @@ module Clusters
attr_reader :current_user, :params
def initialize(user = nil, params = {})
- @current_user, @params = user, params.dup
+ @current_user = user
+ @params = params.dup
end
def execute(cluster)
diff --git a/app/services/concerns/integrations/project_test_data.rb b/app/services/concerns/integrations/project_test_data.rb
index 57bcba98b49..5968b90f8fe 100644
--- a/app/services/concerns/integrations/project_test_data.rb
+++ b/app/services/concerns/integrations/project_test_data.rb
@@ -9,7 +9,7 @@ module Integrations
end
def note_events_data
- note = NotesFinder.new(current_user, project: project, target: project).execute.reorder(nil).last # rubocop: disable CodeReuse/ActiveRecord
+ note = NotesFinder.new(current_user, project: project, target: project, sort: 'id_desc').execute.first
return { error: s_('TestHooks|Ensure the project has notes.') } unless note.present?
diff --git a/app/services/concerns/suggestible.rb b/app/services/concerns/suggestible.rb
index 0cba9bf1b8a..82e43c856f8 100644
--- a/app/services/concerns/suggestible.rb
+++ b/app/services/concerns/suggestible.rb
@@ -5,7 +5,7 @@ module Suggestible
include Gitlab::Utils::StrongMemoize
# This translates into limiting suggestion changes to `suggestion:-100+100`.
- MAX_LINES_CONTEXT = 100.freeze
+ MAX_LINES_CONTEXT = 100
def diff_lines
strong_memoize(:diff_lines) do
diff --git a/app/services/deployments/link_merge_requests_service.rb b/app/services/deployments/link_merge_requests_service.rb
index eba5082e6c3..39fbef5dee2 100644
--- a/app/services/deployments/link_merge_requests_service.rb
+++ b/app/services/deployments/link_merge_requests_service.rb
@@ -18,7 +18,22 @@ module Deployments
# app deployments, as this is not useful.
return if deployment.environment.environment_type
- if (prev = deployment.previous_environment_deployment)
+ # This service is triggered by a Sidekiq worker, which only runs when a
+ # deployment is successful. We add an extra check here in case we ever
+ # call this service elsewhere and forget to check the status there.
+ #
+ # The reason we only want to link successful deployments is as follows:
+ # when we link a merge request, we don't link it to future deployments for
+ # the same environment. If we were to link an MR to a failed deploy, we
+ # wouldn't be able to later on link it to a successful deploy (e.g. after
+ # the deploy is retried).
+ #
+ # In addition, showing failed deploys in the UI of a merge request isn't
+ # useful to users, as they can't act upon the information in any
+ # meaningful way (i.e. they can't just retry the deploy themselves).
+ return unless deployment.success?
+
+ if (prev = deployment.previous_deployment)
link_merge_requests_for_range(prev.sha, deployment.sha)
else
# When no previous deployment is found we fall back to linking all merge
@@ -51,8 +66,15 @@ module Deployments
deployment.link_merge_requests(merge_requests)
- picked_merge_requests =
- project.merge_requests.by_cherry_pick_sha(slice)
+ # The cherry picked commits are tracked via `notes.commit_id`
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22209
+ #
+ # NOTE: cross-joining `merge_requests` table and `notes` table could
+ # result in very poor performance because PG planner often uses an
+ # inappropriate index.
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/321032.
+ mr_ids = project.notes.cherry_picked_merge_requests(slice)
+ picked_merge_requests = project.merge_requests.id_in(mr_ids)
deployment.link_merge_requests(picked_merge_requests)
end
diff --git a/app/services/draft_notes/base_service.rb b/app/services/draft_notes/base_service.rb
index 95c291ea800..66f9e04ef24 100644
--- a/app/services/draft_notes/base_service.rb
+++ b/app/services/draft_notes/base_service.rb
@@ -5,7 +5,9 @@ module DraftNotes
attr_accessor :merge_request, :current_user, :params
def initialize(merge_request, current_user, params = nil)
- @merge_request, @current_user, @params = merge_request, current_user, params.dup
+ @merge_request = merge_request
+ @current_user = current_user
+ @params = params.dup
end
def merge_request_activity_counter
diff --git a/app/services/git/wiki_push_service.rb b/app/services/git/wiki_push_service.rb
index 0905b2d98df..82958abfe6e 100644
--- a/app/services/git/wiki_push_service.rb
+++ b/app/services/git/wiki_push_service.rb
@@ -8,7 +8,9 @@ module Git
attr_reader :wiki
def initialize(wiki, current_user, params)
- @wiki, @current_user, @params = wiki, current_user, params.dup
+ @wiki = wiki
+ @current_user = current_user
+ @params = params.dup
end
def execute
diff --git a/app/services/git/wiki_push_service/change.rb b/app/services/git/wiki_push_service/change.rb
index 3d1d0fe8c4e..9109a7f9d58 100644
--- a/app/services/git/wiki_push_service/change.rb
+++ b/app/services/git/wiki_push_service/change.rb
@@ -9,7 +9,9 @@ module Git
# @param [Hash] change - must have keys `:oldrev` and `:newrev`
# @param [Gitlab::Git::RawDiffChange] raw_change
def initialize(wiki, change, raw_change)
- @wiki, @raw_change, @change = wiki, raw_change, change
+ @wiki = wiki
+ @raw_change = raw_change
+ @change = change
end
def page
diff --git a/app/services/groups/base_service.rb b/app/services/groups/base_service.rb
index 019cd047ae9..06136aff50e 100644
--- a/app/services/groups/base_service.rb
+++ b/app/services/groups/base_service.rb
@@ -5,11 +5,25 @@ module Groups
attr_accessor :group, :current_user, :params
def initialize(group, user, params = {})
- @group, @current_user, @params = group, user, params.dup
+ @group = group
+ @current_user = user
+ @params = params.dup
end
private
+ def handle_namespace_settings
+ settings_params = params.slice(*::NamespaceSetting::NAMESPACE_SETTINGS_PARAMS)
+
+ return if settings_params.empty?
+
+ ::NamespaceSetting::NAMESPACE_SETTINGS_PARAMS.each do |nsp|
+ params.delete(nsp)
+ end
+
+ ::NamespaceSettings::UpdateService.new(current_user, group, settings_params).execute
+ end
+
def remove_unallowed_params
# overridden in EE
end
diff --git a/app/services/groups/count_service.rb b/app/services/groups/count_service.rb
new file mode 100644
index 00000000000..2a15ae3bc57
--- /dev/null
+++ b/app/services/groups/count_service.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Groups
+ class CountService < BaseCountService
+ include Gitlab::Utils::StrongMemoize
+
+ VERSION = 1
+ CACHED_COUNT_THRESHOLD = 1000
+ EXPIRATION_TIME = 24.hours
+
+ attr_reader :group, :user
+
+ def initialize(group, user = nil)
+ @group = group
+ @user = user
+ end
+
+ def count
+ cached_count = Rails.cache.read(cache_key)
+ return cached_count unless cached_count.blank?
+
+ refreshed_count = uncached_count
+ update_cache_for_key(cache_key) { refreshed_count } if refreshed_count > CACHED_COUNT_THRESHOLD
+ refreshed_count
+ end
+
+ def cache_key
+ ['groups', "#{issuable_key}_count_service", VERSION, group.id, cache_key_name]
+ end
+
+ private
+
+ def relation_for_count
+ raise NotImplementedError
+ end
+
+ def cache_options
+ super.merge({ expires_in: EXPIRATION_TIME })
+ end
+
+ def cache_key_name
+ raise NotImplementedError, 'cache_key_name must be implemented and return a String'
+ end
+
+ def issuable_key
+ raise NotImplementedError, 'issuable_key must be implemented and return a String'
+ end
+ end
+end
diff --git a/app/services/groups/create_service.rb b/app/services/groups/create_service.rb
index 3ead2323588..9ddb8ae7695 100644
--- a/app/services/groups/create_service.rb
+++ b/app/services/groups/create_service.rb
@@ -3,7 +3,8 @@
module Groups
class CreateService < Groups::BaseService
def initialize(user, params = {})
- @current_user, @params = user, params.dup
+ @current_user = user
+ @params = params.dup
@chat_team = @params.delete(:create_chat_team)
end
@@ -11,7 +12,10 @@ module Groups
remove_unallowed_params
set_visibility_level
- @group = Group.new(params)
+ @group = Group.new(params.except(*::NamespaceSetting::NAMESPACE_SETTINGS_PARAMS))
+
+ @group.build_namespace_settings
+ handle_namespace_settings
after_build_hook(@group, params)
@@ -33,7 +37,6 @@ module Groups
Group.transaction do
if @group.save
@group.add_owner(current_user)
- @group.create_namespace_settings unless @group.namespace_settings
Service.create_from_active_default_integrations(@group, :group_id)
OnboardingProgress.onboard(@group)
end
diff --git a/app/services/groups/group_links/create_service.rb b/app/services/groups/group_links/create_service.rb
index 57c746c3841..0a60140d037 100644
--- a/app/services/groups/group_links/create_service.rb
+++ b/app/services/groups/group_links/create_service.rb
@@ -2,7 +2,7 @@
module Groups
module GroupLinks
- class CreateService < BaseService
+ class CreateService < Groups::BaseService
def execute(shared_group)
unless group && shared_group &&
can?(current_user, :admin_group_member, shared_group) &&
diff --git a/app/services/groups/merge_requests_count_service.rb b/app/services/groups/merge_requests_count_service.rb
new file mode 100644
index 00000000000..bb49efe571a
--- /dev/null
+++ b/app/services/groups/merge_requests_count_service.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module Groups
+ # Service class for counting and caching the number of open merge requests of a group.
+ class MergeRequestsCountService < Groups::CountService
+ private
+
+ def cache_key_name
+ 'open_merge_requests_count'
+ end
+
+ def relation_for_count
+ MergeRequestsFinder
+ .new(user, group_id: group.id, state: 'opened', non_archived: true, include_subgroups: true)
+ .execute
+ end
+
+ def issuable_key
+ 'open_merge_requests'
+ end
+ end
+end
diff --git a/app/services/groups/nested_create_service.rb b/app/services/groups/nested_create_service.rb
index a51ac9aa593..35d45aaf0cc 100644
--- a/app/services/groups/nested_create_service.rb
+++ b/app/services/groups/nested_create_service.rb
@@ -5,7 +5,8 @@ module Groups
attr_reader :group_path, :visibility_level
def initialize(user, params)
- @current_user, @params = user, params.dup
+ @current_user = user
+ @params = params.dup
@group_path = @params.delete(:group_path)
@visibility_level = @params.delete(:visibility_level) ||
Gitlab::CurrentSettings.current_application_settings.default_group_visibility
diff --git a/app/services/groups/open_issues_count_service.rb b/app/services/groups/open_issues_count_service.rb
index db1ca09212a..ef787a04315 100644
--- a/app/services/groups/open_issues_count_service.rb
+++ b/app/services/groups/open_issues_count_service.rb
@@ -2,47 +2,12 @@
module Groups
# Service class for counting and caching the number of open issues of a group.
- class OpenIssuesCountService < BaseCountService
- include Gitlab::Utils::StrongMemoize
-
- VERSION = 1
+ class OpenIssuesCountService < Groups::CountService
PUBLIC_COUNT_KEY = 'group_public_open_issues_count'
TOTAL_COUNT_KEY = 'group_total_open_issues_count'
- CACHED_COUNT_THRESHOLD = 1000
- EXPIRATION_TIME = 24.hours
-
- attr_reader :group, :user
-
- def initialize(group, user = nil)
- @group = group
- @user = user
- end
-
- # Reads count value from cache and return it if present.
- # If empty or expired, #uncached_count will calculate the issues count for the group and
- # compare it with the threshold. If it is greater, it will be written to the cache and returned.
- # If below, it will be returned without being cached.
- # This results in only caching large counts and calculating the rest with every call to maintain
- # accuracy.
- def count
- cached_count = Rails.cache.read(cache_key)
- return cached_count unless cached_count.blank?
-
- refreshed_count = uncached_count
- update_cache_for_key(cache_key) { refreshed_count } if refreshed_count > CACHED_COUNT_THRESHOLD
- refreshed_count
- end
-
- def cache_key(key = nil)
- ['groups', 'open_issues_count_service', VERSION, group.id, cache_key_name]
- end
private
- def cache_options
- super.merge({ expires_in: EXPIRATION_TIME })
- end
-
def cache_key_name
public_only? ? PUBLIC_COUNT_KEY : TOTAL_COUNT_KEY
end
@@ -60,5 +25,9 @@ module Groups
def relation_for_count
IssuesFinder.new(user, group_id: group.id, state: 'opened', non_archived: true, include_subgroups: true, public_only: public_only?).execute
end
+
+ def issuable_key
+ 'open_issues'
+ end
end
end
diff --git a/app/services/groups/update_service.rb b/app/services/groups/update_service.rb
index 84385f5da25..ff369d01efc 100644
--- a/app/services/groups/update_service.rb
+++ b/app/services/groups/update_service.rb
@@ -46,18 +46,6 @@ module Groups
private
- def handle_namespace_settings
- settings_params = params.slice(*::NamespaceSetting::NAMESPACE_SETTINGS_PARAMS)
-
- return if settings_params.empty?
-
- ::NamespaceSetting::NAMESPACE_SETTINGS_PARAMS.each do |nsp|
- params.delete(nsp)
- end
-
- ::NamespaceSettings::UpdateService.new(current_user, group, settings_params).execute
- end
-
def valid_path_change_with_npm_packages?
return true unless group.packages_feature_enabled?
return true if params[:path].blank?
diff --git a/app/services/issuable/bulk_update_service.rb b/app/services/issuable/bulk_update_service.rb
index d3d543edcd7..8bcbb92cd0e 100644
--- a/app/services/issuable/bulk_update_service.rb
+++ b/app/services/issuable/bulk_update_service.rb
@@ -7,7 +7,9 @@ module Issuable
attr_accessor :parent, :current_user, :params
def initialize(parent, user = nil, params = {})
- @parent, @current_user, @params = parent, user, params.dup
+ @parent = parent
+ @current_user = user
+ @params = params.dup
end
def execute(type)
@@ -15,7 +17,7 @@ module Issuable
set_update_params(type)
items = update_issuables(type, ids)
- response_success(payload: { count: items.count })
+ response_success(payload: { count: items.size })
rescue ArgumentError => e
response_error(e.message, 422)
end
@@ -59,10 +61,17 @@ module Issuable
def find_issuables(parent, model_class, ids)
if parent.is_a?(Project)
- model_class.id_in(ids).of_projects(parent)
+ projects = parent
elsif parent.is_a?(Group)
- model_class.id_in(ids).of_projects(parent.all_projects)
+ projects = parent.all_projects
+ else
+ return
end
+
+ model_class
+ .id_in(ids)
+ .of_projects(projects)
+ .includes_for_bulk_update
end
def response_success(message: nil, payload: nil)
diff --git a/app/services/issuable/destroy_service.rb b/app/services/issuable/destroy_service.rb
index 4c64655a622..d5aa84d8d6c 100644
--- a/app/services/issuable/destroy_service.rb
+++ b/app/services/issuable/destroy_service.rb
@@ -3,12 +3,36 @@
module Issuable
class DestroyService < IssuableBaseService
def execute(issuable)
- TodoService.new.destroy_target(issuable) do |issuable|
- if issuable.destroy
- issuable.update_project_counter_caches
- issuable.assignees.each(&:invalidate_cache_counts)
- end
+ if issuable.destroy
+ after_destroy(issuable)
+ end
+ end
+
+ private
+
+ def after_destroy(issuable)
+ delete_todos(issuable)
+ issuable.update_project_counter_caches
+ issuable.assignees.each(&:invalidate_cache_counts)
+ end
+
+ def group_for(issuable)
+ issuable.resource_parent.group
+ end
+
+ def delete_todos(issuable)
+ actor = group_for(issuable)
+
+ if Feature.enabled?(:destroy_issuable_todos_async, actor, default_enabled: :yaml)
+ TodosDestroyer::DestroyedIssuableWorker
+ .perform_async(issuable.id, issuable.class.name)
+ else
+ TodosDestroyer::DestroyedIssuableWorker
+ .new
+ .perform(issuable.id, issuable.class.name)
end
end
end
end
+
+Issuable::DestroyService.prepend_if_ee('EE::Issuable::DestroyService')
diff --git a/app/services/issuable/process_assignees.rb b/app/services/issuable/process_assignees.rb
index c9c6b0bed85..1ef6d3d9c42 100644
--- a/app/services/issuable/process_assignees.rb
+++ b/app/services/issuable/process_assignees.rb
@@ -14,12 +14,13 @@ module Issuable
end
def execute
- if assignee_ids.blank?
- updated_new_assignees = new_assignee_ids
+ updated_new_assignees = new_assignee_ids
+
+ if add_assignee_ids.blank? && remove_assignee_ids.blank?
+ updated_new_assignees = assignee_ids if assignee_ids
+ else
updated_new_assignees |= add_assignee_ids if add_assignee_ids
updated_new_assignees -= remove_assignee_ids if remove_assignee_ids
- else
- updated_new_assignees = assignee_ids
end
updated_new_assignees.uniq
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 094b31b4ad6..add53bc6267 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -29,32 +29,48 @@ class IssuableBaseService < BaseService
params.delete(:label_ids)
params.delete(:assignee_ids)
params.delete(:assignee_id)
+ params.delete(:add_assignee_ids)
+ params.delete(:remove_assignee_ids)
params.delete(:due_date)
params.delete(:canonical_issue_id)
params.delete(:project)
params.delete(:discussion_locked)
end
- filter_assignee(issuable)
+ filter_assignees(issuable)
filter_milestone
filter_labels
end
- def filter_assignee(issuable)
- return if params[:assignee_ids].blank?
+ def filter_assignees(issuable)
+ filter_assignees_with_key(issuable, :assignee_ids, :assignees)
+ filter_assignees_with_key(issuable, :add_assignee_ids, :add_assignees)
+ filter_assignees_with_key(issuable, :remove_assignee_ids, :remove_assignees)
+ end
+
+ def filter_assignees_with_key(issuable, id_key, key)
+ if params[key] && params[id_key].blank?
+ params[id_key] = params[key].map(&:id)
+ end
+
+ return if params[id_key].blank?
+
+ filter_assignees_using_checks(issuable, id_key)
+ end
+ def filter_assignees_using_checks(issuable, id_key)
unless issuable.allows_multiple_assignees?
- params[:assignee_ids] = params[:assignee_ids].first(1)
+ params[id_key] = params[id_key].first(1)
end
- assignee_ids = params[:assignee_ids].select { |assignee_id| user_can_read?(issuable, assignee_id) }
+ assignee_ids = params[id_key].select { |assignee_id| user_can_read?(issuable, assignee_id) }
- if params[:assignee_ids].map(&:to_s) == [IssuableFinder::Params::NONE]
- params[:assignee_ids] = []
+ if params[id_key].map(&:to_s) == [IssuableFinder::Params::NONE]
+ params[id_key] = []
elsif assignee_ids.any?
- params[:assignee_ids] = assignee_ids
+ params[id_key] = assignee_ids
else
- params.delete(:assignee_ids)
+ params.delete(id_key)
end
end
@@ -116,6 +132,15 @@ class IssuableBaseService < BaseService
new_label_ids.uniq
end
+ def process_assignee_ids(attributes, existing_assignee_ids: nil, extra_assignee_ids: [])
+ process = Issuable::ProcessAssignees.new(assignee_ids: attributes.delete(:assignee_ids),
+ add_assignee_ids: attributes.delete(:add_assignee_ids),
+ remove_assignee_ids: attributes.delete(:remove_assignee_ids),
+ existing_assignee_ids: existing_assignee_ids,
+ extra_assignee_ids: extra_assignee_ids)
+ process.execute
+ end
+
def handle_quick_actions(issuable)
merge_quick_actions_into_params!(issuable)
end
@@ -145,6 +170,10 @@ class IssuableBaseService < BaseService
params[:author] ||= current_user
params[:label_ids] = process_label_ids(params, extra_label_ids: issuable.label_ids.to_a)
+ if issuable.respond_to?(:assignee_ids)
+ params[:assignee_ids] = process_assignee_ids(params, extra_assignee_ids: issuable.assignee_ids.to_a)
+ end
+
issuable.assign_attributes(params)
before_create(issuable)
@@ -191,6 +220,7 @@ class IssuableBaseService < BaseService
old_associations = associations_before_update(issuable)
assign_requested_labels(issuable)
+ assign_requested_assignees(issuable)
if issuable.changed? || params.present?
issuable.assign_attributes(params)
@@ -354,6 +384,16 @@ class IssuableBaseService < BaseService
issuable.touch
end
+ def assign_requested_assignees(issuable)
+ return if issuable.is_a?(Epic)
+
+ assignee_ids = process_assignee_ids(params, existing_assignee_ids: issuable.assignee_ids)
+ if ids_changing?(issuable.assignee_ids, assignee_ids)
+ params[:assignee_ids] = assignee_ids
+ issuable.touch
+ end
+ end
+
# Arrays of ids are used, but we should really use sets of ids, so
# let's have an helper to properly check if some ids are changing
def ids_changing?(old_array, new_array)
@@ -384,6 +424,20 @@ class IssuableBaseService < BaseService
associations
end
+ def handle_move_between_ids(issuable_position)
+ return unless params[:move_between_ids]
+
+ after_id, before_id = params.delete(:move_between_ids)
+ positioning_scope_id = params.delete(positioning_scope_key)
+
+ issuable_before = issuable_for_positioning(before_id, positioning_scope_id)
+ issuable_after = issuable_for_positioning(after_id, positioning_scope_id)
+
+ raise ActiveRecord::RecordNotFound unless issuable_before || issuable_after
+
+ issuable_position.move_between(issuable_before, issuable_after)
+ end
+
def has_changes?(issuable, old_labels: [], old_assignees: [], old_reviewers: [])
valid_attrs = [:title, :description, :assignee_ids, :reviewer_ids, :milestone_id, :target_branch]
@@ -429,6 +483,8 @@ class IssuableBaseService < BaseService
# we need to check this because milestone from milestone_id param is displayed on "new" page
# where private project milestone could leak without this check
def ensure_milestone_available(issuable)
+ return unless issuable.supports_milestone? && issuable.milestone_id.present?
+
issuable.milestone_id = nil unless issuable.milestone_available?
end
diff --git a/app/services/issuable_links/create_service.rb b/app/services/issuable_links/create_service.rb
index f148c503dcf..cbb81f1f521 100644
--- a/app/services/issuable_links/create_service.rb
+++ b/app/services/issuable_links/create_service.rb
@@ -7,7 +7,9 @@ module IssuableLinks
attr_reader :issuable, :current_user, :params
def initialize(issuable, user, params)
- @issuable, @current_user, @params = issuable, user, params.dup
+ @issuable = issuable
+ @current_user = user
+ @params = params.dup
end
def execute
@@ -107,11 +109,11 @@ module IssuableLinks
end
def issuables_assigned_message
- 'Issue(s) already assigned'
+ _("Issue(s) already assigned")
end
def issuables_not_found_message
- 'No Issue found for given params'
+ _("No matching issue found. Make sure that you are adding a valid issue URL.")
end
end
end
diff --git a/app/services/issuable_links/destroy_service.rb b/app/services/issuable_links/destroy_service.rb
index 57e1314e0da..28035bbb291 100644
--- a/app/services/issuable_links/destroy_service.rb
+++ b/app/services/issuable_links/destroy_service.rb
@@ -15,14 +15,18 @@ module IssuableLinks
return error(not_found_message, 404) unless permission_to_remove_relation?
remove_relation
- create_notes
- track_event
+ after_destroy
success(message: 'Relation was removed')
end
private
+ def after_destroy
+ create_notes
+ track_event
+ end
+
def remove_relation
link.destroy!
end
diff --git a/app/services/issuable_links/list_service.rb b/app/services/issuable_links/list_service.rb
index 10a2da7eb03..fe9678dcc32 100644
--- a/app/services/issuable_links/list_service.rb
+++ b/app/services/issuable_links/list_service.rb
@@ -7,7 +7,8 @@ module IssuableLinks
attr_reader :issuable, :current_user
def initialize(issuable, user)
- @issuable, @current_user = issuable, user
+ @issuable = issuable
+ @current_user = user
end
def execute
diff --git a/app/services/issue_rebalancing_service.rb b/app/services/issue_rebalancing_service.rb
index db5c5ddfb84..f9c3388204f 100644
--- a/app/services/issue_rebalancing_service.rb
+++ b/app/services/issue_rebalancing_service.rb
@@ -62,7 +62,7 @@ class IssueRebalancingService
def run_update_query(values, query_name)
Issue.connection.exec_query(<<~SQL, query_name)
- WITH cte(cte_id, new_pos) AS (
+ WITH cte(cte_id, new_pos) AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
SELECT *
FROM (VALUES #{values}) as t (id, pos)
)
diff --git a/app/services/issues/after_create_service.rb b/app/services/issues/after_create_service.rb
new file mode 100644
index 00000000000..0c6ec65f0e2
--- /dev/null
+++ b/app/services/issues/after_create_service.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Issues
+ class AfterCreateService < Issues::BaseService
+ def execute(issue)
+ todo_service.new_issue(issue, current_user)
+ delete_milestone_total_issue_counter_cache(issue.milestone)
+ track_incident_action(current_user, issue, :incident_created)
+ end
+ end
+end
+
+Issues::AfterCreateService.prepend_ee_mod
diff --git a/app/services/issues/base_service.rb b/app/services/issues/base_service.rb
index 25f319da03b..87615d1b4f2 100644
--- a/app/services/issues/base_service.rb
+++ b/app/services/issues/base_service.rb
@@ -52,7 +52,7 @@ module Issues
end
def execute_hooks(issue, action = 'open', old_associations: {})
- issue_data = hook_data(issue, action, old_associations: old_associations)
+ issue_data = Gitlab::Lazy.new { hook_data(issue, action, old_associations: old_associations) }
hooks_scope = issue.confidential? ? :confidential_issue_hooks : :issue_hooks
issue.project.execute_hooks(issue_data, hooks_scope)
issue.project.execute_services(issue_data, hooks_scope)
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index 3fdc66ed84e..68660b35bee 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -6,7 +6,7 @@ module Issues
def execute(skip_system_notes: false)
@request = params.delete(:request)
- @spam_params = Spam::SpamActionService.filter_spam_params!(params)
+ @spam_params = Spam::SpamActionService.filter_spam_params!(params, @request)
@issue = BuildService.new(project, current_user, params).execute
@@ -32,13 +32,11 @@ module Issues
end
end
+ # Add new items to Issues::AfterCreateService if they can be performed in Sidekiq
def after_create(issue)
add_incident_label(issue)
- todo_service.new_issue(issue, current_user)
user_agent_detail_service.create
resolve_discussions_with_issue(issue)
- delete_milestone_total_issue_counter_cache(issue.milestone)
- track_incident_action(current_user, issue, :incident_created)
super
end
@@ -77,4 +75,4 @@ module Issues
end
end
-Issues::CreateService.prepend_if_ee('EE::Issues::CreateService')
+Issues::CreateService.prepend_ee_mod
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index 2906bdf62a7..702527d80a7 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -8,7 +8,7 @@ module Issues
handle_move_between_ids(issue)
@request = params.delete(:request)
- @spam_params = Spam::SpamActionService.filter_spam_params!(params)
+ @spam_params = Spam::SpamActionService.filter_spam_params!(params, @request)
change_issue_duplicate(issue)
move_issue_to_new_project(issue) || clone_issue(issue) || update_task_event(issue) || update(issue)
@@ -96,19 +96,15 @@ module Issues
end
def handle_move_between_ids(issue)
- return unless params[:move_between_ids]
-
- after_id, before_id = params.delete(:move_between_ids)
- board_group_id = params.delete(:board_group_id)
-
- issue_before = get_issue_if_allowed(before_id, board_group_id)
- issue_after = get_issue_if_allowed(after_id, board_group_id)
- raise ActiveRecord::RecordNotFound unless issue_before || issue_after
+ super
- issue.move_between(issue_before, issue_after)
rebalance_if_needed(issue)
end
+ def positioning_scope_key
+ :board_group_id
+ end
+
# rubocop: disable CodeReuse/ActiveRecord
def change_issue_duplicate(issue)
canonical_issue_id = params.delete(:canonical_issue_id)
@@ -185,7 +181,7 @@ module Issues
end
# rubocop: disable CodeReuse/ActiveRecord
- def get_issue_if_allowed(id, board_group_id = nil)
+ def issuable_for_positioning(id, board_group_id = nil)
return unless id
issue =
diff --git a/app/services/jira_connect_subscriptions/base_service.rb b/app/services/jira_connect_subscriptions/base_service.rb
index 0e5bb91660e..042169acb6f 100644
--- a/app/services/jira_connect_subscriptions/base_service.rb
+++ b/app/services/jira_connect_subscriptions/base_service.rb
@@ -5,7 +5,9 @@ module JiraConnectSubscriptions
attr_accessor :jira_connect_installation, :current_user, :params
def initialize(jira_connect_installation, user = nil, params = {})
- @jira_connect_installation, @current_user, @params = jira_connect_installation, user, params.dup
+ @jira_connect_installation = jira_connect_installation
+ @current_user = user
+ @params = params.dup
end
end
end
diff --git a/app/services/keys/base_service.rb b/app/services/keys/base_service.rb
index 113e22b01ce..9b238e2f176 100644
--- a/app/services/keys/base_service.rb
+++ b/app/services/keys/base_service.rb
@@ -5,7 +5,8 @@ module Keys
attr_accessor :user, :params
def initialize(user, params = {})
- @user, @params = user, params
+ @user = user
+ @params = params
@ip_address = @params.delete(:ip_address)
end
diff --git a/app/services/keys/create_service.rb b/app/services/keys/create_service.rb
index c256de7b35d..c1c3ef8792f 100644
--- a/app/services/keys/create_service.rb
+++ b/app/services/keys/create_service.rb
@@ -5,7 +5,8 @@ module Keys
attr_accessor :current_user
def initialize(current_user, params = {})
- @current_user, @params = current_user, params
+ @current_user = current_user
+ @params = params
@ip_address = @params.delete(:ip_address)
@user = params.delete(:user) || current_user
end
diff --git a/app/services/keys/expiry_notification_service.rb b/app/services/keys/expiry_notification_service.rb
new file mode 100644
index 00000000000..b486f77ced2
--- /dev/null
+++ b/app/services/keys/expiry_notification_service.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Keys
+ class ExpiryNotificationService < ::Keys::BaseService
+ attr_accessor :keys, :expiring_soon
+
+ def initialize(user, params)
+ @keys = params[:keys]
+ @expiring_soon = params[:expiring_soon]
+
+ super
+ end
+
+ def execute
+ return unless allowed?
+
+ if expiring_soon
+ trigger_expiring_soon_notification
+ else
+ trigger_expired_notification
+ end
+ end
+
+ private
+
+ def allowed?
+ user.can?(:receive_notifications)
+ end
+
+ def trigger_expiring_soon_notification
+ notification_service.ssh_key_expiring_soon(user, keys.map(&:fingerprint))
+
+ keys.update_all(before_expiry_notification_delivered_at: Time.current.utc)
+ end
+
+ def trigger_expired_notification
+ notification_service.ssh_key_expired(user, keys.map(&:fingerprint))
+
+ keys.update_all(expiry_notification_delivered_at: Time.current.utc)
+ end
+ end
+end
diff --git a/app/services/mattermost/create_team_service.rb b/app/services/mattermost/create_team_service.rb
index afcd6439a14..2cbcaaad5e1 100644
--- a/app/services/mattermost/create_team_service.rb
+++ b/app/services/mattermost/create_team_service.rb
@@ -3,7 +3,8 @@
module Mattermost
class CreateTeamService < ::BaseService
def initialize(group, current_user)
- @group, @current_user = group, current_user
+ @group = group
+ @current_user = current_user
end
def execute
diff --git a/app/services/members/create_service.rb b/app/services/members/create_service.rb
index cffccda1a44..953cf7f5bf6 100644
--- a/app/services/members/create_service.rb
+++ b/app/services/members/create_service.rb
@@ -2,67 +2,98 @@
module Members
class CreateService < Members::BaseService
- include Gitlab::Utils::StrongMemoize
+ BlankInvitesError = Class.new(StandardError)
+ TooManyInvitesError = Class.new(StandardError)
- DEFAULT_LIMIT = 100
+ DEFAULT_INVITE_LIMIT = 100
- def execute(source)
- return error(s_('AddMember|No users specified.')) if user_ids.blank?
+ def initialize(*args)
+ super
- return error(s_("AddMember|Too many users specified (limit is %{user_limit})") % { user_limit: user_limit }) if
- user_limit && user_ids.size > user_limit
+ @errors = []
+ @invites = invites_from_params&.split(',')&.uniq&.flatten
+ @source = params[:source]
+ end
+
+ def execute
+ validate_invites!
+
+ add_members
+ enqueue_onboarding_progress_action
+ result
+ rescue BlankInvitesError, TooManyInvitesError => e
+ error(e.message)
+ end
+
+ private
+
+ attr_reader :source, :errors, :invites, :member_created_namespace_id
+
+ def invites_from_params
+ params[:user_ids]
+ end
+
+ def validate_invites!
+ raise BlankInvitesError, blank_invites_message if invites.blank?
+
+ return unless user_limit && invites.size > user_limit
+
+ raise TooManyInvitesError,
+ format(s_("AddMember|Too many users specified (limit is %{user_limit})"), user_limit: user_limit)
+ end
+
+ def blank_invites_message
+ s_('AddMember|No users specified.')
+ end
+ def add_members
members = source.add_users(
- user_ids,
+ invites,
params[:access_level],
expires_at: params[:expires_at],
current_user: current_user
)
- errors = []
-
- members.each do |member|
- if member.invalid?
- current_error =
- # Invited users may not have an associated user
- if member.user.present?
- "#{member.user.username}: "
- else
- ""
- end
-
- current_error += member.errors.full_messages.to_sentence
- errors << current_error
- else
- after_execute(member: member)
- end
- end
-
- enqueue_onboarding_progress_action(source) if members.size > errors.size
-
- return success unless errors.any?
+ members.each { |member| process_result(member) }
+ end
- error(errors.to_sentence)
+ def process_result(member)
+ if member.invalid?
+ add_error_for_member(member)
+ else
+ after_execute(member: member)
+ @member_created_namespace_id ||= member.namespace_id
+ end
end
- private
+ def add_error_for_member(member)
+ prefix = "#{member.user.username}: " if member.user.present?
- def user_ids
- strong_memoize(:user_ids) do
- ids = params[:user_ids] || ''
- ids.split(',').uniq.flatten
- end
+ errors << "#{prefix}#{member.errors.full_messages.to_sentence}"
end
def user_limit
- limit = params.fetch(:limit, DEFAULT_LIMIT)
+ limit = params.fetch(:limit, DEFAULT_INVITE_LIMIT)
limit && limit < 0 ? nil : limit
end
- def enqueue_onboarding_progress_action(source)
- namespace_id = source.is_a?(Project) ? source.namespace_id : source.id
- Namespaces::OnboardingUserAddedWorker.perform_async(namespace_id)
+ def enqueue_onboarding_progress_action
+ return unless member_created_namespace_id
+
+ Namespaces::OnboardingUserAddedWorker.perform_async(member_created_namespace_id)
+ end
+
+ def result
+ if errors.any?
+ error(formatted_errors)
+ else
+ success
+ end
+ end
+
+ def formatted_errors
+ errors.to_sentence
end
end
end
diff --git a/app/services/members/invite_service.rb b/app/services/members/invite_service.rb
index 169500d08f0..48010f9c8e7 100644
--- a/app/services/members/invite_service.rb
+++ b/app/services/members/invite_service.rb
@@ -1,98 +1,46 @@
# frozen_string_literal: true
module Members
- class InviteService < Members::BaseService
- BlankEmailsError = Class.new(StandardError)
- TooManyEmailsError = Class.new(StandardError)
+ class InviteService < Members::CreateService
+ extend ::Gitlab::Utils::Override
def initialize(*args)
super
@errors = {}
- @emails = params[:email]&.split(',')&.uniq&.flatten
- end
-
- def execute(source)
- validate_emails!
-
- @source = source
- emails.each(&method(:process_email))
- result
- rescue BlankEmailsError, TooManyEmailsError => e
- error(e.message)
end
private
- attr_reader :source, :errors, :emails
-
- def validate_emails!
- raise BlankEmailsError, s_('AddMember|Email cannot be blank') if emails.blank?
-
- if user_limit && emails.size > user_limit
- raise TooManyEmailsError, s_("AddMember|Too many users specified (limit is %{user_limit})") % { user_limit: user_limit }
- end
- end
-
- def user_limit
- limit = params.fetch(:limit, Members::CreateService::DEFAULT_LIMIT)
-
- limit < 0 ? nil : limit
- end
-
- def process_email(email)
- return if existing_member?(email)
- return if existing_invite?(email)
- return if existing_request?(email)
-
- add_member(email)
- end
-
- def existing_member?(email)
- existing_member = source.members.with_user_by_email(email).exists?
-
- if existing_member
- errors[email] = s_("AddMember|Already a member of %{source_name}") % { source_name: source.name }
- return true
- end
+ alias_method :formatted_errors, :errors
- false
+ def invites_from_params
+ params[:email]
end
- def existing_invite?(email)
- existing_invite = source.members.search_invite_email(email).exists?
+ def validate_invites!
+ super
- if existing_invite
- errors[email] = s_("AddMember|Member already invited to %{source_name}") % { source_name: source.name }
- return true
- end
+ # we need the below due to add_users hitting Member#parse_users_list and ignoring invalid emails
+ # ideally we wouldn't need this, but we can't really change the add_users method
+ valid, invalid = invites.partition { |email| Member.valid_email?(email) }
+ @invites = valid
- false
+ invalid.each { |email| errors[email] = s_('AddMember|Invite email is invalid') }
end
- def existing_request?(email)
- existing_request = source.requesters.with_user_by_email(email).exists?
-
- if existing_request
- errors[email] = s_("AddMember|Member cannot be invited because they already requested to join %{source_name}") % { source_name: source.name }
- return true
- end
-
- false
+ override :blank_invites_message
+ def blank_invites_message
+ s_('AddMember|Emails cannot be blank')
end
- def add_member(email)
- new_member = source.add_user(email, params[:access_level], current_user: current_user, expires_at: params[:expires_at])
-
- errors[email] = new_member.errors.full_messages.to_sentence if new_member.invalid?
+ override :add_error_for_member
+ def add_error_for_member(member)
+ errors[invite_email(member)] = member.errors.full_messages.to_sentence
end
- def result
- if errors.any?
- error(errors)
- else
- success
- end
+ def invite_email(member)
+ member.invite_email || member.user.email
end
end
end
diff --git a/app/services/merge_requests/add_context_service.rb b/app/services/merge_requests/add_context_service.rb
index b693f8509a2..77b00f645c9 100644
--- a/app/services/merge_requests/add_context_service.rb
+++ b/app/services/merge_requests/add_context_service.rb
@@ -49,11 +49,9 @@ module MergeRequests
def duplicates
existing_oids = merge_request.merge_request_context_commits.map { |commit| commit.sha.to_s }
- duplicate_oids = existing_oids.select do |existing_oid|
+ existing_oids.select do |existing_oid|
commit_ids.select { |commit_id| existing_oid.start_with?(commit_id) }.count > 0
end
-
- duplicate_oids
end
def build_context_commit_rows(merge_request_id, commits)
diff --git a/app/services/merge_requests/after_create_service.rb b/app/services/merge_requests/after_create_service.rb
index b22afe8a20d..ed9747a8c99 100644
--- a/app/services/merge_requests/after_create_service.rb
+++ b/app/services/merge_requests/after_create_service.rb
@@ -24,6 +24,18 @@ module MergeRequests
merge_request.create_cross_references!(current_user)
OnboardingProgressService.new(merge_request.target_project.namespace).execute(action: :merge_request_created)
+
+ todo_service.new_merge_request(merge_request, current_user)
+ merge_request.cache_merge_request_closes_issues!(current_user)
+
+ Gitlab::UsageDataCounters::MergeRequestCounter.count(:create)
+ link_lfs_objects(merge_request)
+
+ delete_milestone_total_merge_requests_counter_cache(merge_request.milestone)
+ end
+
+ def link_lfs_objects(merge_request)
+ LinkLfsObjectsService.new(merge_request.target_project).execute(merge_request)
end
end
end
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index 317cd11a69d..3a3765355d8 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -143,8 +143,12 @@ module MergeRequests
merge_request, merge_request.project, current_user, old_reviewers)
end
- def create_pipeline_for(merge_request, user)
- MergeRequests::CreatePipelineService.new(project, user).execute(merge_request)
+ def create_pipeline_for(merge_request, user, async: false)
+ if async
+ MergeRequests::CreatePipelineWorker.perform_async(project.id, user.id, merge_request.id)
+ else
+ MergeRequests::CreatePipelineService.new(project, user).execute(merge_request)
+ end
end
def abort_auto_merge(merge_request, reason)
@@ -164,7 +168,7 @@ module MergeRequests
def pipeline_merge_requests(pipeline)
pipeline.all_merge_requests.opened.each do |merge_request|
- next unless pipeline == merge_request.head_pipeline
+ next unless pipeline.id == merge_request.head_pipeline_id
yield merge_request
end
@@ -195,6 +199,12 @@ module MergeRequests
merge_request.update(merge_error: message) if save_message_on_model
end
+
+ def delete_milestone_total_merge_requests_counter_cache(milestone)
+ return unless milestone
+
+ Milestones::MergeRequestsCountService.new(milestone).delete_cache
+ end
end
end
diff --git a/app/services/merge_requests/build_service.rb b/app/services/merge_requests/build_service.rb
index e4d3c91d13e..ecc55eae5de 100644
--- a/app/services/merge_requests/build_service.rb
+++ b/app/services/merge_requests/build_service.rb
@@ -16,17 +16,7 @@ module MergeRequests
merge_request.source_project = find_source_project
merge_request.target_project = find_target_project
- # Force remove the source branch?
- merge_request.merge_params['force_remove_source_branch'] = force_remove_source_branch
-
- # Only assign merge requests params that are allowed
- self.params = assign_allowed_merge_params(merge_request, params)
-
- # Filter out params that are either not allowed or invalid
- filter_params(merge_request)
-
- # Filter out :add_label_ids and :remove_label_ids params
- filter_label_id_params
+ process_params
merge_request.compare_commits = []
set_merge_request_target_branch
@@ -70,21 +60,41 @@ module MergeRequests
end
end
- def filter_label_id_params
+ def filter_id_params
# merge_request.assign_attributes(...) below is a Rails
# method that only work if all the params it is passed have
# corresponding fields in the database. As there are no fields
- # in the database for :add_label_ids and :remove_label_ids, we
+ # in the database for :add_label_ids, :remove_label_ids,
+ # :add_assignee_ids and :remove_assignee_ids, we
# need to remove them from the params before the call to
# merge_request.assign_attributes(...)
#
- # IssuableBaseService#process_label_ids takes care
+ # IssuableBaseService#process_label_ids and
+ # IssuableBaseService#process_assignee_ids take care
# of the removal.
params[:label_ids] = process_label_ids(params, extra_label_ids: merge_request.label_ids.to_a)
+ params[:assignee_ids] = process_assignee_ids(params, extra_assignee_ids: merge_request.assignee_ids.to_a)
+
merge_request.assign_attributes(params.to_h.compact)
end
+ def process_params
+ # Force remove the source branch?
+ merge_request.merge_params['force_remove_source_branch'] = force_remove_source_branch
+
+ # Only assign merge requests params that are allowed
+ self.params = assign_allowed_merge_params(merge_request, params)
+
+ # Filter out params that are either not allowed or invalid
+ filter_params(merge_request)
+
+ # Filter out the following from params:
+ # - :add_label_ids and :remove_label_ids
+ # - :add_assignee_ids and :remove_assignee_ids
+ filter_id_params
+ end
+
def find_source_project
source_project = project_from_params(:source_project)
return source_project if source_project.present? && can?(current_user, :create_merge_request_from, source_project)
diff --git a/app/services/merge_requests/create_service.rb b/app/services/merge_requests/create_service.rb
index ac84a13f437..8186472ec65 100644
--- a/app/services/merge_requests/create_service.rb
+++ b/app/services/merge_requests/create_service.rb
@@ -14,16 +14,12 @@ module MergeRequests
end
def after_create(issuable)
+ issuable.mark_as_preparing
+
# Add new items to MergeRequests::AfterCreateService if they can
# be performed in Sidekiq
NewMergeRequestWorker.perform_async(issuable.id, current_user.id)
- todo_service.new_merge_request(issuable, current_user)
- issuable.cache_merge_request_closes_issues!(current_user)
-
- Gitlab::UsageDataCounters::MergeRequestCounter.count(:create)
- link_lfs_objects(issuable)
-
super
end
@@ -54,10 +50,6 @@ module MergeRequests
raise Gitlab::Access::AccessDeniedError
end
end
-
- def link_lfs_objects(issuable)
- LinkLfsObjectsService.new(issuable.target_project).execute(issuable)
- end
end
end
diff --git a/app/services/merge_requests/handle_assignees_change_service.rb b/app/services/merge_requests/handle_assignees_change_service.rb
new file mode 100644
index 00000000000..77ff0791eb4
--- /dev/null
+++ b/app/services/merge_requests/handle_assignees_change_service.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ class HandleAssigneesChangeService < MergeRequests::BaseService
+ def async_execute(merge_request, old_assignees, options = {})
+ if Feature.enabled?(:async_handle_merge_request_assignees_change, merge_request.target_project, default_enabled: :yaml)
+ MergeRequests::HandleAssigneesChangeWorker
+ .perform_async(
+ merge_request.id,
+ current_user.id,
+ old_assignees.map(&:id),
+ options
+ )
+ else
+ execute(merge_request, old_assignees, options)
+ end
+ end
+
+ def execute(merge_request, old_assignees, options = {})
+ create_assignee_note(merge_request, old_assignees)
+ notification_service.async.reassigned_merge_request(merge_request, current_user, old_assignees.to_a)
+ todo_service.reassigned_assignable(merge_request, current_user, old_assignees)
+
+ new_assignees = merge_request.assignees - old_assignees
+ merge_request_activity_counter.track_users_assigned_to_mr(users: new_assignees)
+ merge_request_activity_counter.track_assignees_changed_action(user: current_user)
+
+ execute_assignees_hooks(merge_request, old_assignees) if options[:execute_hooks]
+ end
+
+ private
+
+ def execute_assignees_hooks(merge_request, old_assignees)
+ execute_hooks(
+ merge_request,
+ 'update',
+ old_associations: { assignees: old_assignees }
+ )
+ end
+ end
+end
+
+MergeRequests::HandleAssigneesChangeService.prepend_if_ee('EE::MergeRequests::HandleAssigneesChangeService')
diff --git a/app/services/merge_requests/merge_to_ref_service.rb b/app/services/merge_requests/merge_to_ref_service.rb
index c0115e94903..e07e0c985b4 100644
--- a/app/services/merge_requests/merge_to_ref_service.rb
+++ b/app/services/merge_requests/merge_to_ref_service.rb
@@ -66,7 +66,13 @@ module MergeRequests
end
def commit
- repository.merge_to_ref(current_user, source, merge_request, target_ref, commit_message, first_parent_ref, allow_conflicts)
+ repository.merge_to_ref(current_user,
+ source_sha: source,
+ branch: merge_request.target_branch,
+ target_ref: target_ref,
+ message: commit_message,
+ first_parent_ref: first_parent_ref,
+ allow_conflicts: allow_conflicts)
rescue Gitlab::Git::PreReceiveError, Gitlab::Git::CommandError => error
raise MergeError, error.message
end
diff --git a/app/services/merge_requests/migrate_external_diffs_service.rb b/app/services/merge_requests/migrate_external_diffs_service.rb
index 89b1e594c95..b1d2cd5d1c7 100644
--- a/app/services/merge_requests/migrate_external_diffs_service.rb
+++ b/app/services/merge_requests/migrate_external_diffs_service.rb
@@ -2,7 +2,7 @@
module MergeRequests
class MigrateExternalDiffsService < ::BaseService
- MAX_JOBS = 1000.freeze
+ MAX_JOBS = 1000
attr_reader :diff
diff --git a/app/services/merge_requests/push_options_handler_service.rb b/app/services/merge_requests/push_options_handler_service.rb
index 821558b8d6f..05ec87c7d60 100644
--- a/app/services/merge_requests/push_options_handler_service.rb
+++ b/app/services/merge_requests/push_options_handler_service.rb
@@ -129,7 +129,9 @@ module MergeRequests
target_branch: push_options[:target],
force_remove_source_branch: push_options[:remove_source_branch],
label: push_options[:label],
- unlabel: push_options[:unlabel]
+ unlabel: push_options[:unlabel],
+ assign: push_options[:assign],
+ unassign: push_options[:unassign]
}
params.compact!
@@ -137,6 +139,9 @@ module MergeRequests
params[:add_labels] = params.delete(:label).keys if params.has_key?(:label)
params[:remove_labels] = params.delete(:unlabel).keys if params.has_key?(:unlabel)
+ params[:add_assignee_ids] = params.delete(:assign).keys if params.has_key?(:assign)
+ params[:remove_assignee_ids] = params.delete(:unassign).keys if params.has_key?(:unassign)
+
params
end
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index 0fb16597aff..e04c5168cef 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -162,9 +162,12 @@ module MergeRequests
end
def refresh_pipelines_on_merge_requests(merge_request)
- create_pipeline_for(merge_request, current_user)
-
- UpdateHeadPipelineForMergeRequestWorker.perform_async(merge_request.id)
+ if Feature.enabled?(:code_review_async_pipeline_creation, project, default_enabled: :yaml)
+ create_pipeline_for(merge_request, current_user, async: true)
+ else
+ create_pipeline_for(merge_request, current_user, async: false)
+ UpdateHeadPipelineForMergeRequestWorker.perform_async(merge_request.id)
+ end
end
def abort_auto_merges(merge_request)
diff --git a/app/services/merge_requests/resolve_todos_service.rb b/app/services/merge_requests/resolve_todos_service.rb
new file mode 100644
index 00000000000..0010b596eee
--- /dev/null
+++ b/app/services/merge_requests/resolve_todos_service.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ class ResolveTodosService
+ include BaseServiceUtility
+
+ def initialize(merge_request, user)
+ @merge_request = merge_request
+ @user = user
+ end
+
+ def async_execute
+ if Feature.enabled?(:resolve_merge_request_todos_async, merge_request.target_project, default_enabled: :yaml)
+ MergeRequests::ResolveTodosWorker.perform_async(merge_request.id, user.id)
+ else
+ execute
+ end
+ end
+
+ def execute
+ todo_service.resolve_todos_for_target(merge_request, user)
+ end
+
+ private
+
+ attr_reader :merge_request, :user
+ end
+end
diff --git a/app/services/merge_requests/retarget_chain_service.rb b/app/services/merge_requests/retarget_chain_service.rb
index f24d67243c9..e8101e447d2 100644
--- a/app/services/merge_requests/retarget_chain_service.rb
+++ b/app/services/merge_requests/retarget_chain_service.rb
@@ -17,7 +17,7 @@ module MergeRequests
.opened
.by_target_branch(merge_request.source_branch)
.preload_source_project
- .at_most(MAX_RETARGET_MERGE_REQUESTS)
+ .limit(MAX_RETARGET_MERGE_REQUESTS)
other_merge_requests.find_each do |other_merge_request|
# Update only MRs on projects that we have access to
diff --git a/app/services/merge_requests/update_assignees_service.rb b/app/services/merge_requests/update_assignees_service.rb
new file mode 100644
index 00000000000..b339a644e8c
--- /dev/null
+++ b/app/services/merge_requests/update_assignees_service.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ class UpdateAssigneesService < UpdateService
+ # a stripped down service that only does what it must to update the
+ # assignees, and knows that it does not have to check for other updates.
+ # This saves a lot of queries for irrelevant things that cannot possibly
+ # change in the execution of this service.
+ def execute(merge_request)
+ return merge_request unless current_user&.can?(:update_merge_request, merge_request)
+
+ old_assignees = merge_request.assignees
+ old_ids = old_assignees.map(&:id)
+ new_ids = new_assignee_ids(merge_request)
+ return merge_request if new_ids.size != update_attrs[:assignee_ids].size
+ return merge_request if old_ids.to_set == new_ids.to_set # no-change
+
+ attrs = update_attrs.merge(assignee_ids: new_ids)
+ merge_request.update!(**attrs)
+
+ # Defer the more expensive operations (handle_assignee_changes) to the background
+ MergeRequests::HandleAssigneesChangeService
+ .new(project, current_user)
+ .async_execute(merge_request, old_assignees, execute_hooks: true)
+
+ merge_request
+ end
+
+ private
+
+ def new_assignee_ids(merge_request)
+ # prime the cache - prevent N+1 lookup during authorization loop.
+ merge_request.project.team.max_member_access_for_user_ids(update_attrs[:assignee_ids])
+ User.id_in(update_attrs[:assignee_ids]).map do |user|
+ if user.can?(:read_merge_request, merge_request)
+ user.id
+ else
+ merge_request.errors.add(
+ :assignees,
+ "Cannot assign #{user.to_reference} to #{merge_request.to_reference}"
+ )
+ nil
+ end
+ end.compact
+ end
+
+ def assignee_ids
+ params.fetch(:assignee_ids).first(1)
+ end
+
+ def params
+ ps = super
+
+ # allow either assignee_id or assignee_ids, preferring assignee_id if passed.
+ { assignee_ids: ps.key?(:assignee_id) ? Array.wrap(ps[:assignee_id]) : ps[:assignee_ids] }
+ end
+
+ def update_attrs
+ @attrs ||= { updated_at: Time.current, updated_by: current_user, assignee_ids: assignee_ids }
+ end
+ end
+end
+
+MergeRequests::UpdateAssigneesService.prepend_if_ee('EE::MergeRequests::UpdateAssigneesService')
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index f5e14797f7e..8995c5f2411 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -11,18 +11,7 @@ module MergeRequests
end
def execute(merge_request)
- # We don't allow change of source/target projects and source branch
- # after merge request was created
- params.delete(:source_project_id)
- params.delete(:target_project_id)
- params.delete(:source_branch)
-
- if merge_request.closed_or_merged_without_fork?
- params.delete(:target_branch)
- params.delete(:force_remove_source_branch)
- end
-
- update_task_event(merge_request) || update(merge_request)
+ update_merge_request_with_specialized_service(merge_request) || general_fallback(merge_request)
end
def handle_changes(merge_request, options)
@@ -86,6 +75,21 @@ module MergeRequests
attr_reader :target_branch_was_deleted
+ def general_fallback(merge_request)
+ # We don't allow change of source/target projects and source branch
+ # after merge request was created
+ params.delete(:source_project_id)
+ params.delete(:target_project_id)
+ params.delete(:source_branch)
+
+ if merge_request.closed_or_merged_without_fork?
+ params.delete(:target_branch)
+ params.delete(:force_remove_source_branch)
+ end
+
+ update_task_event(merge_request) || update(merge_request)
+ end
+
def track_title_and_desc_edits(changed_fields)
tracked_fields = %w(title description)
@@ -147,7 +151,11 @@ module MergeRequests
def resolve_todos(merge_request, old_labels, old_assignees, old_reviewers)
return unless has_changes?(merge_request, old_labels: old_labels, old_assignees: old_assignees, old_reviewers: old_reviewers)
- todo_service.resolve_todos_for_target(merge_request, current_user)
+ service_user = current_user
+
+ merge_request.run_after_commit_or_now do
+ ::MergeRequests::ResolveTodosService.new(merge_request, service_user).async_execute
+ end
end
def handle_target_branch_change(merge_request)
@@ -200,21 +208,22 @@ module MergeRequests
merge_request_activity_counter.track_milestone_changed_action(user: current_user)
+ previous_milestone = Milestone.find_by_id(merge_request.previous_changes['milestone_id'].first)
+ delete_milestone_total_merge_requests_counter_cache(previous_milestone)
+
if merge_request.milestone.nil?
notification_service.async.removed_milestone_merge_request(merge_request, current_user)
else
notification_service.async.changed_milestone_merge_request(merge_request, merge_request.milestone, current_user)
+
+ delete_milestone_total_merge_requests_counter_cache(merge_request.milestone)
end
end
def handle_assignees_change(merge_request, old_assignees)
- create_assignee_note(merge_request, old_assignees)
- notification_service.async.reassigned_merge_request(merge_request, current_user, old_assignees)
- todo_service.reassigned_assignable(merge_request, current_user, old_assignees)
-
- new_assignees = merge_request.assignees - old_assignees
- merge_request_activity_counter.track_users_assigned_to_mr(users: new_assignees)
- merge_request_activity_counter.track_assignees_changed_action(user: current_user)
+ MergeRequests::HandleAssigneesChangeService
+ .new(project, current_user)
+ .async_execute(merge_request, old_assignees)
end
def handle_reviewers_change(merge_request, old_reviewers)
@@ -267,6 +276,34 @@ module MergeRequests
def quick_action_options
{ merge_request_diff_head_sha: params.delete(:merge_request_diff_head_sha) }
end
+
+ def update_merge_request_with_specialized_service(merge_request)
+ return unless params.delete(:use_specialized_service)
+
+ # If we're attempting to modify only a single attribute, look up whether
+ # we have a specialized, targeted service we should use instead. We may
+ # in the future extend this to include specialized services that operate
+ # on multiple attributes, but for now limit to only single attribute
+ # updates.
+ #
+ return unless params.one?
+
+ attempt_specialized_update_services(merge_request, params.each_key.first.to_sym)
+ end
+
+ def attempt_specialized_update_services(merge_request, attribute)
+ case attribute
+ when :assignee_ids
+ assignees_service.execute(merge_request)
+ else
+ nil
+ end
+ end
+
+ def assignees_service
+ @assignees_service ||= ::MergeRequests::UpdateAssigneesService
+ .new(project, current_user, params)
+ end
end
end
diff --git a/app/services/metrics/dashboard/annotations/create_service.rb b/app/services/metrics/dashboard/annotations/create_service.rb
index c04f4c56b51..54f4e96378c 100644
--- a/app/services/metrics/dashboard/annotations/create_service.rb
+++ b/app/services/metrics/dashboard/annotations/create_service.rb
@@ -13,7 +13,8 @@ module Metrics
:create
def initialize(user, params)
- @user, @params = user, params
+ @user = user
+ @params = params
end
def execute
diff --git a/app/services/metrics/dashboard/annotations/delete_service.rb b/app/services/metrics/dashboard/annotations/delete_service.rb
index c6a6c4f5fbf..3efe6924a9b 100644
--- a/app/services/metrics/dashboard/annotations/delete_service.rb
+++ b/app/services/metrics/dashboard/annotations/delete_service.rb
@@ -11,7 +11,8 @@ module Metrics
:delete
def initialize(user, annotation)
- @user, @annotation = user, annotation
+ @user = user
+ @annotation = annotation
end
def execute
diff --git a/app/services/metrics/dashboard/grafana_metric_embed_service.rb b/app/services/metrics/dashboard/grafana_metric_embed_service.rb
index b8c5c17c738..6069d236e82 100644
--- a/app/services/metrics/dashboard/grafana_metric_embed_service.rb
+++ b/app/services/metrics/dashboard/grafana_metric_embed_service.rb
@@ -122,7 +122,8 @@ module Metrics
# Identifies the uid of the dashboard based on url format
class GrafanaUidParser
def initialize(grafana_url, project)
- @grafana_url, @project = grafana_url, project
+ @grafana_url = grafana_url
+ @project = project
end
def parse
@@ -145,7 +146,8 @@ module Metrics
# If no panel is specified, defaults to the first valid panel.
class DatasourceNameParser
def initialize(grafana_url, grafana_dashboard)
- @grafana_url, @grafana_dashboard = grafana_url, grafana_dashboard
+ @grafana_url = grafana_url
+ @grafana_dashboard = grafana_dashboard
end
def parse
diff --git a/app/services/metrics/dashboard/panel_preview_service.rb b/app/services/metrics/dashboard/panel_preview_service.rb
index 5b24d817fb6..02dd908e229 100644
--- a/app/services/metrics/dashboard/panel_preview_service.rb
+++ b/app/services/metrics/dashboard/panel_preview_service.rb
@@ -22,7 +22,9 @@ module Metrics
].freeze
def initialize(project, panel_yaml, environment)
- @project, @panel_yaml, @environment = project, panel_yaml, environment
+ @project = project
+ @panel_yaml = panel_yaml
+ @environment = environment
end
def execute
diff --git a/app/services/metrics/users_starred_dashboards/create_service.rb b/app/services/metrics/users_starred_dashboards/create_service.rb
index 7784ed4eb4e..9642df87861 100644
--- a/app/services/metrics/users_starred_dashboards/create_service.rb
+++ b/app/services/metrics/users_starred_dashboards/create_service.rb
@@ -11,7 +11,9 @@ module Metrics
:create
def initialize(user, project, dashboard_path)
- @user, @project, @dashboard_path = user, project, dashboard_path
+ @user = user
+ @project = project
+ @dashboard_path = dashboard_path
end
def execute
diff --git a/app/services/metrics/users_starred_dashboards/delete_service.rb b/app/services/metrics/users_starred_dashboards/delete_service.rb
index 579715bd49f..229c0e8cfc0 100644
--- a/app/services/metrics/users_starred_dashboards/delete_service.rb
+++ b/app/services/metrics/users_starred_dashboards/delete_service.rb
@@ -5,7 +5,9 @@ module Metrics
module UsersStarredDashboards
class DeleteService < ::BaseService
def initialize(user, project, dashboard_path = nil)
- @user, @project, @dashboard_path = user, project, dashboard_path
+ @user = user
+ @project = project
+ @dashboard_path = dashboard_path
end
def execute
diff --git a/app/services/milestones/base_service.rb b/app/services/milestones/base_service.rb
index f30194c0bfe..0d7d855bf5e 100644
--- a/app/services/milestones/base_service.rb
+++ b/app/services/milestones/base_service.rb
@@ -6,7 +6,9 @@ module Milestones
attr_accessor :parent, :current_user, :params
def initialize(parent, user, params = {})
- @parent, @current_user, @params = parent, user, params.dup
+ @parent = parent
+ @current_user = user
+ @params = params.dup
super
end
end
diff --git a/app/services/milestones/find_or_create_service.rb b/app/services/milestones/find_or_create_service.rb
index 881011e5106..b467ff98f54 100644
--- a/app/services/milestones/find_or_create_service.rb
+++ b/app/services/milestones/find_or_create_service.rb
@@ -5,7 +5,9 @@ module Milestones
attr_accessor :project, :current_user, :params
def initialize(project, user, params = {})
- @project, @current_user, @params = project, user, params.dup
+ @project = project
+ @current_user = user
+ @params = params.dup
end
def execute
diff --git a/app/services/milestones/merge_requests_count_service.rb b/app/services/milestones/merge_requests_count_service.rb
new file mode 100644
index 00000000000..be9ce3af44d
--- /dev/null
+++ b/app/services/milestones/merge_requests_count_service.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Milestones
+ class MergeRequestsCountService < BaseCountService
+ def initialize(milestone)
+ @milestone = milestone
+ end
+
+ def cache_key
+ "milestone_merge_requests_count_#{@milestone.milestoneish_id}"
+ end
+
+ def relation_for_count
+ @milestone.merge_requests
+ end
+ end
+end
diff --git a/app/services/milestones/transfer_service.rb b/app/services/milestones/transfer_service.rb
index 18d7e41adc7..b9bd259ca8b 100644
--- a/app/services/milestones/transfer_service.rb
+++ b/app/services/milestones/transfer_service.rb
@@ -24,6 +24,9 @@ module Milestones
update_issues_milestone(milestone, new_milestone)
update_merge_requests_milestone(milestone.id, new_milestone&.id)
+
+ delete_milestone_counts_caches(milestone)
+ delete_milestone_counts_caches(new_milestone)
end
end
end
@@ -71,9 +74,6 @@ module Milestones
def update_issues_milestone(old_milestone, new_milestone)
Issue.where(project: project, milestone_id: old_milestone.id)
.update_all(milestone_id: new_milestone&.id)
-
- delete_milestone_issues_caches(old_milestone)
- delete_milestone_issues_caches(new_milestone)
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -84,11 +84,12 @@ module Milestones
end
# rubocop: enable CodeReuse/ActiveRecord
- def delete_milestone_issues_caches(milestone)
+ def delete_milestone_counts_caches(milestone)
return unless milestone
Milestones::IssuesCountService.new(milestone).delete_cache
Milestones::ClosedIssuesCountService.new(milestone).delete_cache
+ Milestones::MergeRequestsCountService.new(milestone).delete_cache
end
end
end
diff --git a/app/services/namespace_settings/update_service.rb b/app/services/namespace_settings/update_service.rb
index 3c9b7b637ac..c6c04b63690 100644
--- a/app/services/namespace_settings/update_service.rb
+++ b/app/services/namespace_settings/update_service.rb
@@ -13,12 +13,25 @@ module NamespaceSettings
end
def execute
+ validate_resource_access_token_creation_allowed_param
+
if group.namespace_settings
group.namespace_settings.attributes = settings_params
else
group.build_namespace_settings(settings_params)
end
end
+
+ private
+
+ def validate_resource_access_token_creation_allowed_param
+ return if settings_params[:resource_access_token_creation_allowed].nil?
+
+ unless can?(current_user, :admin_group, group)
+ settings_params.delete(:resource_access_token_creation_allowed)
+ group.namespace_settings.errors.add(:resource_access_token_creation_allowed, _('can only be changed by a group admin.'))
+ end
+ end
end
end
diff --git a/app/services/namespaces/in_product_marketing_emails_service.rb b/app/services/namespaces/in_product_marketing_emails_service.rb
index f009f5d8538..eb81253bc08 100644
--- a/app/services/namespaces/in_product_marketing_emails_service.rb
+++ b/app/services/namespaces/in_product_marketing_emails_service.rb
@@ -23,10 +23,12 @@ module Namespaces
def initialize(track, interval)
@track = track
@interval = interval
- @sent_email_user_ids = []
+ @in_product_marketing_email_records = []
end
def execute
+ raise ArgumentError, "Track #{track} not defined" unless TRACKS.key?(track)
+
groups_for_track.each_batch do |groups|
groups.each do |group|
send_email_for_group(group)
@@ -36,16 +38,23 @@ module Namespaces
private
- attr_reader :track, :interval, :sent_email_user_ids
+ attr_reader :track, :interval, :in_product_marketing_email_records
def send_email_for_group(group)
- experiment_enabled_for_group = experiment_enabled_for_group?(group)
- experiment_add_group(group, experiment_enabled_for_group)
- return unless experiment_enabled_for_group
+ if Gitlab.com?
+ experiment_enabled_for_group = experiment_enabled_for_group?(group)
+ experiment_add_group(group, experiment_enabled_for_group)
+ return unless experiment_enabled_for_group
+ end
users_for_group(group).each do |user|
- send_email(user, group) if can_perform_action?(user, group)
+ if can_perform_action?(user, group)
+ send_email(user, group)
+ track_sent_email(user, track, series)
+ end
end
+
+ save_tracked_emails!
end
def experiment_enabled_for_group?(group)
@@ -70,8 +79,9 @@ module Namespaces
end
def users_for_group(group)
- group.users.where(email_opted_in: true)
- .where.not(id: sent_email_user_ids)
+ group.users
+ .where(email_opted_in: true)
+ .merge(Users::InProductMarketingEmail.without_track_and_series(track, series))
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -85,14 +95,11 @@ module Namespaces
user.can?(:start_trial, group)
when :team
user.can?(:admin_group_member, group)
- else
- raise NotImplementedError, "No ability defined for track #{track}"
end
end
def send_email(user, group)
NotificationService.new.in_product_marketing(user.id, group.id, track, series)
- sent_email_user_ids << user.id
end
def completed_actions
@@ -101,7 +108,8 @@ module Namespaces
end
def range
- (interval + 1).days.ago.beginning_of_day..(interval + 1).days.ago.end_of_day
+ date = (interval + 1).days.ago
+ date.beginning_of_day..date.end_of_day
end
def incomplete_action
@@ -111,5 +119,20 @@ module Namespaces
def series
INTERVAL_DAYS.index(interval)
end
+
+ def save_tracked_emails!
+ Users::InProductMarketingEmail.bulk_insert!(in_product_marketing_email_records)
+ @in_product_marketing_email_records = []
+ end
+
+ def track_sent_email(user, track, series)
+ in_product_marketing_email_records << Users::InProductMarketingEmail.new(
+ user: user,
+ track: track,
+ series: series,
+ created_at: Time.zone.now,
+ updated_at: Time.zone.now
+ )
+ end
end
end
diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb
index 488c847dcbb..e63099a0820 100644
--- a/app/services/notes/create_service.rb
+++ b/app/services/notes/create_service.rb
@@ -75,16 +75,9 @@ module Notes
increment_usage_counter(note)
track_event(note, current_user)
- if Feature.enabled?(:notes_create_service_tracking, project)
- Gitlab::Tracking.event('Notes::CreateService', 'execute', **tracking_data_for(note))
- end
-
if note.for_merge_request? && note.diff_note? && note.start_of_discussion?
Discussions::CaptureDiffNotePositionService.new(note.noteable, note.diff_file&.paths).execute(note.discussion)
end
-
- track_note_creation_usage_for_issues(note) if note.for_issue?
- track_note_creation_usage_for_merge_requests(note) if note.for_merge_request?
end
def do_commands(note, update_params, message, only_commands)
@@ -111,6 +104,16 @@ module Notes
}
end
+ def track_event(note, user)
+ track_note_creation_usage_for_issues(note) if note.for_issue?
+ track_note_creation_usage_for_merge_requests(note) if note.for_merge_request?
+ track_usage_event(:incident_management_incident_comment, user.id) if note.for_issue? && note.noteable.incident?
+
+ if Feature.enabled?(:notes_create_service_tracking, project)
+ Gitlab::Tracking.event('Notes::CreateService', 'execute', **tracking_data_for(note))
+ end
+ end
+
def tracking_data_for(note)
label = Gitlab.ee? && note.author == User.visual_review_bot ? 'anonymous_visual_review_note' : 'note'
@@ -120,12 +123,6 @@ module Notes
}
end
- def track_event(note, user)
- return unless note.noteable.is_a?(Issue) && note.noteable.incident?
-
- track_usage_event(:incident_management_incident_comment, user.id)
- end
-
def track_note_creation_usage_for_issues(note)
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_comment_added_action(author: note.author)
end
@@ -135,3 +132,5 @@ module Notes
end
end
end
+
+Notes::CreateService.prepend_if_ee('EE::Notes::CreateService')
diff --git a/app/services/notification_recipients/builder/base.rb b/app/services/notification_recipients/builder/base.rb
index 81e6750a4ca..b41b969ad7c 100644
--- a/app/services/notification_recipients/builder/base.rb
+++ b/app/services/notification_recipients/builder/base.rb
@@ -100,6 +100,8 @@ module NotificationRecipients
# Get project/group users with CUSTOM notification level
# rubocop: disable CodeReuse/ActiveRecord
def add_custom_notifications
+ return new_add_custom_notifications if Feature.enabled?(:notification_setting_recipient_refactor, project)
+
user_ids = []
# Users with a notification setting on group or project
@@ -115,6 +117,48 @@ module NotificationRecipients
add_recipients(user_scope.where(id: user_ids), :custom, nil)
end
+
+ def new_add_custom_notifications
+ notification_by_sources = related_notification_settings_sources(:custom)
+
+ return if notification_by_sources.blank?
+
+ user_ids = NotificationSetting.from_union(notification_by_sources).select(:user_id)
+
+ add_recipients(user_scope.where(id: user_ids), :custom, nil)
+ end
+
+ def related_notification_settings_sources(level)
+ sources = [project, group].compact
+
+ sources.map do |source|
+ source
+ .notification_settings
+ .where(source_or_global_setting_by_level_query(level)).select(:user_id)
+ end
+ end
+
+ def global_setting_by_level_query(level)
+ table = NotificationSetting.arel_table
+ aliased_table = table.alias
+
+ table
+ .project('true')
+ .from(aliased_table)
+ .where(
+ aliased_table[:user_id].eq(table[:user_id])
+ .and(aliased_table[:source_id].eq(nil))
+ .and(aliased_table[:source_type].eq(nil))
+ .and(aliased_table[:level].eq(level))
+ ).exists
+ end
+
+ def source_or_global_setting_by_level_query(level)
+ table = NotificationSetting.arel_table
+ table.grouping(
+ table[:level].eq(:global).and(global_setting_by_level_query(level))
+ ).or(table[:level].eq(level))
+ end
# rubocop: enable CodeReuse/ActiveRecord
def add_project_watchers
diff --git a/app/services/notification_recipients/builder/request_review.rb b/app/services/notification_recipients/builder/request_review.rb
index 911d89c6a8e..8dd0c5d1587 100644
--- a/app/services/notification_recipients/builder/request_review.rb
+++ b/app/services/notification_recipients/builder/request_review.rb
@@ -6,7 +6,9 @@ module NotificationRecipients
attr_reader :merge_request, :current_user, :reviewer
def initialize(merge_request, current_user, reviewer)
- @merge_request, @current_user, @reviewer = merge_request, current_user, reviewer
+ @merge_request = merge_request
+ @current_user = current_user
+ @reviewer = reviewer
end
def target
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index fc2eb1dc4e4..6f1f3309ad9 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -79,6 +79,20 @@ class NotificationService
mailer.access_token_expired_email(user).deliver_later
end
+ # Notify the user when at least one of their ssh key has expired today
+ def ssh_key_expired(user, fingerprints)
+ return unless user.can?(:receive_notifications)
+
+ mailer.ssh_key_expired_email(user, fingerprints).deliver_later
+ end
+
+ # Notify the user when at least one of their ssh key is expiring soon
+ def ssh_key_expiring_soon(user, fingerprints)
+ return unless user.can?(:receive_notifications)
+
+ mailer.ssh_key_expiring_soon_email(user, fingerprints).deliver_later
+ end
+
# Notify a user when a previously unknown IP or device is used to
# sign in to their account
def unknown_sign_in(user, ip, time)
@@ -857,7 +871,7 @@ class NotificationService
end
def warn_skipping_notifications(user, object)
- Gitlab::AppLogger.warn(message: "Skipping sending notifications", user: user.id, klass: object.class, object_id: object.id)
+ Gitlab::AppLogger.warn(message: "Skipping sending notifications", user: user.id, klass: object.class.to_s, object_id: object.id)
end
end
diff --git a/app/services/packages/composer/composer_json_service.rb b/app/services/packages/composer/composer_json_service.rb
index 98aabd84d3d..f346b654c59 100644
--- a/app/services/packages/composer/composer_json_service.rb
+++ b/app/services/packages/composer/composer_json_service.rb
@@ -6,7 +6,8 @@ module Packages
InvalidJson = Class.new(StandardError)
def initialize(project, target)
- @project, @target = project, target
+ @project = project
+ @target = target
end
def execute
diff --git a/app/services/packages/composer/version_parser_service.rb b/app/services/packages/composer/version_parser_service.rb
index 811cac0b3b7..36275d1b680 100644
--- a/app/services/packages/composer/version_parser_service.rb
+++ b/app/services/packages/composer/version_parser_service.rb
@@ -4,7 +4,8 @@ module Packages
module Composer
class VersionParserService
def initialize(tag_name: nil, branch_name: nil)
- @tag_name, @branch_name = tag_name, branch_name
+ @tag_name = tag_name
+ @branch_name = branch_name
end
def execute
diff --git a/app/services/packages/debian/create_distribution_service.rb b/app/services/packages/debian/create_distribution_service.rb
index c6df033e3c1..f947d2e4293 100644
--- a/app/services/packages/debian/create_distribution_service.rb
+++ b/app/services/packages/debian/create_distribution_service.rb
@@ -4,7 +4,8 @@ module Packages
module Debian
class CreateDistributionService
def initialize(container, user, params)
- @container, @params = container, params
+ @container = container
+ @params = params
@params[:creator] = user
@components = params.delete(:components) || ['main']
diff --git a/app/services/packages/debian/extract_changes_metadata_service.rb b/app/services/packages/debian/extract_changes_metadata_service.rb
new file mode 100644
index 00000000000..eb5baa7e53f
--- /dev/null
+++ b/app/services/packages/debian/extract_changes_metadata_service.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+module Packages
+ module Debian
+ class ExtractChangesMetadataService
+ include Gitlab::Utils::StrongMemoize
+
+ ExtractionError = Class.new(StandardError)
+
+ def initialize(package_file)
+ @package_file = package_file
+ @entries = {}
+ end
+
+ def execute
+ {
+ file_type: file_type,
+ architecture: metadata[:architecture],
+ fields: fields,
+ files: files
+ }
+ rescue ActiveModel::ValidationError => e
+ raise ExtractionError.new(e.message)
+ end
+
+ private
+
+ def metadata
+ strong_memoize(:metadata) do
+ ::Packages::Debian::ExtractMetadataService.new(@package_file).execute
+ end
+ end
+
+ def file_type
+ metadata[:file_type]
+ end
+
+ def fields
+ metadata[:fields]
+ end
+
+ def files
+ strong_memoize(:files) do
+ raise ExtractionError.new("is not a changes file") unless file_type == :changes
+ raise ExtractionError.new("Files field is missing") if fields['Files'].blank?
+ raise ExtractionError.new("Checksums-Sha1 field is missing") if fields['Checksums-Sha1'].blank?
+ raise ExtractionError.new("Checksums-Sha256 field is missing") if fields['Checksums-Sha256'].blank?
+
+ init_entries_from_files
+ entries_from_checksums_sha1
+ entries_from_checksums_sha256
+ entries_from_package_files
+
+ @entries
+ end
+ end
+
+ def init_entries_from_files
+ each_lines_for('Files') do |line|
+ md5sum, size, section, priority, filename = line.split
+ entry = FileEntry.new(
+ filename: filename,
+ size: size.to_i,
+ md5sum: md5sum,
+ section: section,
+ priority: priority)
+
+ @entries[filename] = entry
+ end
+ end
+
+ def entries_from_checksums_sha1
+ each_lines_for('Checksums-Sha1') do |line|
+ sha1sum, size, filename = line.split
+ entry = @entries[filename]
+ raise ExtractionError.new("#{filename} is listed in Checksums-Sha1 but not in Files") unless entry
+ raise ExtractionError.new("Size for #{filename} in Files and Checksums-Sha1 differ") unless entry.size == size.to_i
+
+ entry.sha1sum = sha1sum
+ end
+ end
+
+ def entries_from_checksums_sha256
+ each_lines_for('Checksums-Sha256') do |line|
+ sha256sum, size, filename = line.split
+ entry = @entries[filename]
+ raise ExtractionError.new("#{filename} is listed in Checksums-Sha256 but not in Files") unless entry
+ raise ExtractionError.new("Size for #{filename} in Files and Checksums-Sha256 differ") unless entry.size == size.to_i
+
+ entry.sha256sum = sha256sum
+ end
+ end
+
+ def each_lines_for(field)
+ fields[field].split("\n").each do |line|
+ next if line.blank?
+
+ yield(line)
+ end
+ end
+
+ def entries_from_package_files
+ @entries.each do |filename, entry|
+ entry.package_file = ::Packages::PackageFileFinder.new(@package_file.package, filename).execute!
+ entry.validate!
+ rescue ActiveRecord::RecordNotFound
+ raise ExtractionError.new("#{filename} is listed in Files but was not uploaded")
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/debian/extract_metadata_service.rb b/app/services/packages/debian/extract_metadata_service.rb
index fd5832bc0ba..015f472c7c9 100644
--- a/app/services/packages/debian/extract_metadata_service.rb
+++ b/app/services/packages/debian/extract_metadata_service.rb
@@ -58,21 +58,22 @@ module Packages
file_type == :dsc || file_type == :buildinfo || file_type == :changes
end
- def extracted_fields
- if file_type_debian?
- package_file.file.use_file do |file_path|
- ::Packages::Debian::ExtractDebMetadataService.new(file_path).execute
- end
- elsif file_type_meta?
- package_file.file.use_file do |file_path|
- ::Packages::Debian::ParseDebian822Service.new(File.read(file_path)).execute.each_value.first
+ def fields
+ strong_memoize(:fields) do
+ if file_type_debian?
+ package_file.file.use_file do |file_path|
+ ::Packages::Debian::ExtractDebMetadataService.new(file_path).execute
+ end
+ elsif file_type_meta?
+ package_file.file.use_file do |file_path|
+ ::Packages::Debian::ParseDebian822Service.new(File.read(file_path)).execute.each_value.first
+ end
end
end
end
def extract_metadata
- fields = extracted_fields
- architecture = fields.delete(:Architecture) if file_type_debian?
+ architecture = fields['Architecture'] if file_type_debian?
{
file_type: file_type,
diff --git a/app/services/packages/debian/parse_debian822_service.rb b/app/services/packages/debian/parse_debian822_service.rb
index 665929d2324..8be5fdf3b66 100644
--- a/app/services/packages/debian/parse_debian822_service.rb
+++ b/app/services/packages/debian/parse_debian822_service.rb
@@ -26,7 +26,7 @@ module Packages
section[field] += line[1..] unless paragraph_separator?(line)
elsif match = match_section_line(line)
section_name = match[:name] if section_name.nil?
- field = match[:field].to_sym
+ field = match[:field]
raise InvalidDebian822Error, "Duplicate field '#{field}' in section '#{section_name}'" if section.include?(field)
diff --git a/app/services/packages/debian/process_changes_service.rb b/app/services/packages/debian/process_changes_service.rb
new file mode 100644
index 00000000000..881ad2c46f4
--- /dev/null
+++ b/app/services/packages/debian/process_changes_service.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+module Packages
+ module Debian
+ class ProcessChangesService
+ include ExclusiveLeaseGuard
+ include Gitlab::Utils::StrongMemoize
+
+ # used by ExclusiveLeaseGuard
+ DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze
+
+ def initialize(package_file, creator)
+ @package_file = package_file
+ @creator = creator
+ end
+
+ def execute
+ try_obtain_lease do
+ # return if changes file has already been processed
+ break if package_file.debian_file_metadatum&.changes?
+
+ validate!
+
+ package_file.transaction do
+ update_files_metadata
+ update_changes_metadata
+ end
+ end
+ end
+
+ private
+
+ attr_reader :package_file, :creator
+
+ def validate!
+ raise ArgumentError, 'invalid package file' unless package_file.debian_file_metadatum
+ raise ArgumentError, 'invalid package file' unless package_file.debian_file_metadatum.unknown?
+ raise ArgumentError, 'invalid package file' unless metadata[:file_type] == :changes
+ end
+
+ def update_files_metadata
+ files.each do |filename, entry|
+ entry.package_file.package = package
+
+ file_metadata = ::Packages::Debian::ExtractMetadataService.new(entry.package_file).execute
+
+ entry.package_file.debian_file_metadatum.update!(
+ file_type: file_metadata[:file_type],
+ component: files[filename].component,
+ architecture: file_metadata[:architecture],
+ fields: file_metadata[:fields]
+ )
+ entry.package_file.save!
+ end
+ end
+
+ def update_changes_metadata
+ package_file.update!(package: package)
+ package_file.debian_file_metadatum.update!(
+ file_type: metadata[:file_type],
+ fields: metadata[:fields]
+ )
+ end
+
+ def metadata
+ strong_memoize(:metadata) do
+ ::Packages::Debian::ExtractChangesMetadataService.new(package_file).execute
+ end
+ end
+
+ def files
+ metadata[:files]
+ end
+
+ def project
+ package_file.package.project
+ end
+
+ def package
+ strong_memoize(:package) do
+ params = {
+ 'name': metadata[:fields]['Source'],
+ 'version': metadata[:fields]['Version'],
+ 'distribution_name': metadata[:fields]['Distribution']
+ }
+ response = Packages::Debian::FindOrCreatePackageService.new(project, creator, params).execute
+ response.payload[:package]
+ end
+ end
+
+ # used by ExclusiveLeaseGuard
+ def lease_key
+ "packages:debian:process_changes_service:package_file:#{package_file.id}"
+ end
+
+ # used by ExclusiveLeaseGuard
+ def lease_timeout
+ DEFAULT_LEASE_TIMEOUT
+ end
+ end
+ end
+end
diff --git a/app/services/packages/debian/update_distribution_service.rb b/app/services/packages/debian/update_distribution_service.rb
index 5bb59b854e9..95face912d5 100644
--- a/app/services/packages/debian/update_distribution_service.rb
+++ b/app/services/packages/debian/update_distribution_service.rb
@@ -4,7 +4,8 @@ module Packages
module Debian
class UpdateDistributionService
def initialize(distribution, params)
- @distribution, @params = distribution, params
+ @distribution = distribution
+ @params = params
@components = params.delete(:components)
diff --git a/app/services/packages/go/create_package_service.rb b/app/services/packages/go/create_package_service.rb
new file mode 100644
index 00000000000..4e8b8ef8d6b
--- /dev/null
+++ b/app/services/packages/go/create_package_service.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+module Packages
+ module Go
+ class CreatePackageService < BaseService
+ GoZipSizeError = Class.new(StandardError)
+
+ attr_accessor :version
+
+ def initialize(project, user = nil, version:)
+ super(project, user)
+
+ @version = version
+ end
+
+ def execute
+ # check for existing package to avoid SQL errors due to the index
+ package = ::Packages::Go::PackageFinder.new(version.mod.project, version.mod.name, version.name).execute
+ return package if package
+
+ # this can be expensive, so do it outside the transaction
+ files = {}
+ files[:mod] = prepare_file(version, :mod, version.gomod)
+ files[:zip] = prepare_file(version, :zip, version.archive.string)
+
+ ActiveRecord::Base.transaction do
+ # create new package and files
+ package = create_package
+ files.each { |type, (file, digests)| create_file(package, type, file, digests) }
+ package
+ end
+ end
+
+ private
+
+ def prepare_file(version, type, content)
+ file = CarrierWaveStringFile.new(content)
+ raise GoZipSizeError, "#{version.mod.name}@#{version.name}.#{type} exceeds size limit" if file.size > project.actual_limits.golang_max_file_size
+
+ digests = {
+ md5: Digest::MD5.hexdigest(content),
+ sha1: Digest::SHA1.hexdigest(content),
+ sha256: Digest::SHA256.hexdigest(content)
+ }
+
+ [file, digests]
+ end
+
+ def create_package
+ version.mod.project.packages.create!(
+ name: version.mod.name,
+ version: version.name,
+ package_type: :golang,
+ created_at: version.commit.committed_date
+ )
+ end
+
+ def create_file(package, type, file, digests)
+ CreatePackageFileService.new(package,
+ file: file,
+ size: file.size,
+ file_name: "#{version.name}.#{type}",
+ file_md5: digests[:md5],
+ file_sha1: digests[:sha1],
+ file_sha256: digests[:sha256]
+ ).execute
+ end
+ end
+ end
+end
diff --git a/app/services/packages/go/sync_packages_service.rb b/app/services/packages/go/sync_packages_service.rb
new file mode 100644
index 00000000000..c35d3600388
--- /dev/null
+++ b/app/services/packages/go/sync_packages_service.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module Packages
+ module Go
+ class SyncPackagesService < BaseService
+ include Gitlab::Golang
+
+ def initialize(project, ref, path = '')
+ super(project)
+
+ @ref = ref
+ @path = path
+
+ raise ArgumentError, 'project is required' unless project
+ raise ArgumentError, 'ref is required' unless ref
+ raise ArgumentError, "ref #{ref} not found" unless project.repository.find_tag(ref) || project.repository.find_branch(ref)
+ end
+
+ def execute_async
+ Packages::Go::SyncPackagesWorker.perform_async(project.id, @ref, @path)
+ end
+ end
+ end
+end
diff --git a/app/services/packages/maven/find_or_create_package_service.rb b/app/services/packages/maven/find_or_create_package_service.rb
index 401e52f7e51..a6cffa3038c 100644
--- a/app/services/packages/maven/find_or_create_package_service.rb
+++ b/app/services/packages/maven/find_or_create_package_service.rb
@@ -33,7 +33,8 @@ module Packages
#
# The first upload has to create the proper package (the one with the version set).
if params[:file_name] == Packages::Maven::Metadata.filename && !params[:path]&.ends_with?(SNAPSHOT_TERM)
- package_name, version = params[:path], nil
+ package_name = params[:path]
+ version = nil
else
package_name, _, version = params[:path].rpartition('/')
end
diff --git a/app/services/packages/maven/metadata/sync_service.rb b/app/services/packages/maven/metadata/sync_service.rb
index a6534aa706d..48e157d4930 100644
--- a/app/services/packages/maven/metadata/sync_service.rb
+++ b/app/services/packages/maven/metadata/sync_service.rb
@@ -13,16 +13,20 @@ module Packages
def execute
return error('Blank package name') unless package_name
return error('Not allowed') unless Ability.allowed?(current_user, :destroy_package, project)
- return error('Non existing versionless package') unless versionless_package_for_versions
- return error('Non existing metadata file for versions') unless metadata_package_file_for_versions
+ result = success('Non existing versionless package(s). Nothing to do.')
+
+ # update versionless package for plugins if it exists
if metadata_package_file_for_plugins
result = update_plugins_xml
return result if result.error?
end
- update_versions_xml
+ # update versionless_package for versions if it exists
+ return update_versions_xml if metadata_package_file_for_versions
+
+ result
end
private
@@ -79,6 +83,9 @@ module Packages
def metadata_package_file_for_plugins
strong_memoize(:metadata_package_file_for_plugins) do
+ pkg_name = package_name_for_plugins
+ next unless pkg_name
+
metadata_package_file_for(versionless_package_named(package_name_for_plugins))
end
end
@@ -106,6 +113,8 @@ module Packages
end
def package_name_for_plugins
+ return unless versionless_package_for_versions
+
group = versionless_package_for_versions.maven_metadatum.app_group
group.tr('.', '/')
end
diff --git a/app/services/packages/nuget/create_dependency_service.rb b/app/services/packages/nuget/create_dependency_service.rb
index 19143fe3778..62ab485c0fc 100644
--- a/app/services/packages/nuget/create_dependency_service.rb
+++ b/app/services/packages/nuget/create_dependency_service.rb
@@ -54,9 +54,9 @@ module Packages
end
def dependencies_for_create_dependency_service
- names_and_versions = @dependencies.map do |dependency|
+ names_and_versions = @dependencies.to_h do |dependency|
[dependency[:name], version_or_empty_string(dependency[:version])]
- end.to_h
+ end
{ 'dependencies' => names_and_versions }
end
diff --git a/app/services/packages/rubygems/create_dependencies_service.rb b/app/services/packages/rubygems/create_dependencies_service.rb
new file mode 100644
index 00000000000..dea429148cf
--- /dev/null
+++ b/app/services/packages/rubygems/create_dependencies_service.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module Packages
+ module Rubygems
+ class CreateDependenciesService
+ include BulkInsertSafe
+
+ def initialize(package, gemspec)
+ @package = package
+ @gemspec = gemspec
+ end
+
+ def execute
+ set_dependencies
+ end
+
+ private
+
+ attr_reader :package, :gemspec
+
+ def set_dependencies
+ Packages::Dependency.transaction do
+ dependency_type_rows = gemspec.dependencies.map do |dependency|
+ dependency = Packages::Dependency.safe_find_or_create_by!(
+ name: dependency.name,
+ version_pattern: dependency.requirement.to_s
+ )
+
+ {
+ dependency_id: dependency.id,
+ package_id: package.id,
+ dependency_type: :dependencies
+ }
+ end
+
+ package.dependency_links.upsert_all(
+ dependency_type_rows,
+ unique_by: %i[package_id dependency_id dependency_type]
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rubygems/create_gemspec_service.rb b/app/services/packages/rubygems/create_gemspec_service.rb
new file mode 100644
index 00000000000..22533264480
--- /dev/null
+++ b/app/services/packages/rubygems/create_gemspec_service.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Packages
+ module Rubygems
+ class CreateGemspecService
+ def initialize(package, gemspec)
+ @package = package
+ @gemspec = gemspec
+ end
+
+ def execute
+ write_gemspec_to_file
+ end
+
+ private
+
+ attr_reader :package, :gemspec
+
+ def write_gemspec_to_file
+ file = Tempfile.new
+
+ begin
+ content = gemspec.to_ruby
+ file.write(content)
+ file.flush
+
+ package.package_files.create!(
+ file: file,
+ size: file.size,
+ file_name: "#{gemspec.name}.gemspec",
+ file_sha1: Digest::SHA1.hexdigest(content),
+ file_md5: Digest::MD5.hexdigest(content),
+ file_sha256: Digest::SHA256.hexdigest(content)
+ )
+ ensure
+ file.close
+ file.unlink
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rubygems/metadata_extraction_service.rb b/app/services/packages/rubygems/metadata_extraction_service.rb
new file mode 100644
index 00000000000..b3bac1854d7
--- /dev/null
+++ b/app/services/packages/rubygems/metadata_extraction_service.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+module Packages
+ module Rubygems
+ class MetadataExtractionService
+ def initialize(package, gemspec)
+ @package = package
+ @gemspec = gemspec
+ end
+
+ def execute
+ write_metadata
+ end
+
+ private
+
+ attr_reader :package, :gemspec
+
+ # rubocop:disable Metrics/AbcSize
+ # rubocop:disable Metrics/PerceivedComplexity
+ # rubocop:disable Metrics/CyclomaticComplexity
+ def write_metadata
+ metadatum.update!(
+ authors: gemspec&.authors,
+ files: gemspec&.files&.to_json,
+ summary: gemspec&.summary,
+ description: gemspec&.description,
+ email: gemspec&.email,
+ homepage: gemspec&.homepage,
+ licenses: gemspec&.licenses&.to_json,
+ metadata: gemspec&.metadata&.to_json,
+ author: gemspec&.author,
+ bindir: gemspec&.bindir,
+ executables: gemspec&.executables&.to_json,
+ extensions: gemspec&.extensions&.to_json,
+ extra_rdoc_files: gemspec&.extra_rdoc_files&.to_json,
+ platform: gemspec&.platform,
+ post_install_message: gemspec&.post_install_message,
+ rdoc_options: gemspec&.rdoc_options&.to_json,
+ require_paths: gemspec&.require_paths&.to_json,
+ required_ruby_version: gemspec&.required_ruby_version&.to_s,
+ required_rubygems_version: gemspec&.required_rubygems_version&.to_s,
+ requirements: gemspec&.requirements&.to_json,
+ rubygems_version: gemspec&.rubygems_version
+ )
+ end
+ # rubocop:enable Metrics/AbcSize
+ # rubocop:enable Metrics/PerceivedComplexity
+ # rubocop:enable Metrics/CyclomaticComplexity
+
+ def metadatum
+ Packages::Rubygems::Metadatum.safe_find_or_create_by!(package: package)
+ end
+ end
+ end
+end
diff --git a/app/services/packages/rubygems/process_gem_service.rb b/app/services/packages/rubygems/process_gem_service.rb
new file mode 100644
index 00000000000..59bf2a1ec28
--- /dev/null
+++ b/app/services/packages/rubygems/process_gem_service.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'rubygems/package'
+
+module Packages
+ module Rubygems
+ class ProcessGemService
+ include Gitlab::Utils::StrongMemoize
+ include ExclusiveLeaseGuard
+
+ ExtractionError = Class.new(StandardError)
+ DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze
+
+ def initialize(package_file)
+ @package_file = package_file
+ end
+
+ def execute
+ return success if process_gem
+
+ error('Gem was not processed')
+ end
+
+ private
+
+ attr_reader :package_file
+
+ def process_gem
+ return false unless package_file
+
+ try_obtain_lease do
+ package.transaction do
+ rename_package_and_set_version
+ rename_package_file
+ ::Packages::Rubygems::MetadataExtractionService.new(package, gemspec).execute
+ ::Packages::Rubygems::CreateGemspecService.new(package, gemspec).execute
+ ::Packages::Rubygems::CreateDependenciesService.new(package, gemspec).execute
+ cleanup_temp_package
+ end
+ end
+
+ true
+ end
+
+ def rename_package_and_set_version
+ package.update!(
+ name: gemspec.name,
+ version: gemspec.version,
+ status: :default
+ )
+ end
+
+ def rename_package_file
+ # Updating file_name updates the path where the file is stored.
+ # We must pass the file again so that CarrierWave can handle the update
+ package_file.update!(
+ file_name: "#{gemspec.name}-#{gemspec.version}.gem",
+ file: package_file.file,
+ package_id: package.id
+ )
+ end
+
+ def cleanup_temp_package
+ temp_package.destroy if package.id != temp_package.id
+ end
+
+ def gemspec
+ strong_memoize(:gemspec) do
+ gem.spec
+ end
+ end
+
+ def success
+ ServiceResponse.success(payload: { package: package })
+ end
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+
+ def temp_package
+ strong_memoize(:temp_package) do
+ package_file.package
+ end
+ end
+
+ def package
+ strong_memoize(:package) do
+ # if package with name/version already exists, use that package
+ package = temp_package.project
+ .packages
+ .rubygems
+ .with_name(gemspec.name)
+ .with_version(gemspec.version.to_s)
+ .last
+ package || temp_package
+ end
+ end
+
+ def gem
+ # use_file will set an exclusive lease on the file for as long as
+ # the resulting gem object is being used. This means we are not
+ # able to rename the package_file while also using the gem object.
+ # We need to use a separate AR object to create the gem file to allow
+ # `package_file` to be free for update so we re-find the file here.
+ Packages::PackageFile.find(package_file.id).file.use_file do |file_path|
+ Gem::Package.new(File.open(file_path))
+ end
+ rescue
+ raise ExtractionError.new('Unable to read gem file')
+ end
+
+ # used by ExclusiveLeaseGuard
+ def lease_key
+ "packages:rubygems:process_gem_service:package:#{package.id}"
+ end
+
+ # used by ExclusiveLeaseGuard
+ def lease_timeout
+ DEFAULT_LEASE_TIMEOUT
+ end
+ end
+ end
+end
diff --git a/app/services/pages/delete_service.rb b/app/services/pages/delete_service.rb
index 3dc9254718e..c4009dcc4ec 100644
--- a/app/services/pages/delete_service.rb
+++ b/app/services/pages/delete_service.rb
@@ -9,7 +9,7 @@ module Pages
DestroyPagesDeploymentsWorker.perform_async(project.id)
# TODO: remove this call https://gitlab.com/gitlab-org/gitlab/-/issues/320775
- PagesRemoveWorker.perform_async(project.id) if Feature.enabled?(:pages_update_legacy_storage, default_enabled: true)
+ PagesRemoveWorker.perform_async(project.id) if ::Settings.pages.local_store.enabled
end
end
end
diff --git a/app/services/pages/migrate_from_legacy_storage_service.rb b/app/services/pages/migrate_from_legacy_storage_service.rb
index 9b36b3f11b4..b6aa08bba01 100644
--- a/app/services/pages/migrate_from_legacy_storage_service.rb
+++ b/app/services/pages/migrate_from_legacy_storage_service.rb
@@ -2,36 +2,45 @@
module Pages
class MigrateFromLegacyStorageService
- def initialize(logger, migration_threads:, batch_size:, ignore_invalid_entries:)
+ def initialize(logger, ignore_invalid_entries:, mark_projects_as_not_deployed:)
@logger = logger
- @migration_threads = migration_threads
- @batch_size = batch_size
@ignore_invalid_entries = ignore_invalid_entries
+ @mark_projects_as_not_deployed = mark_projects_as_not_deployed
@migrated = 0
@errored = 0
@counters_lock = Mutex.new
end
- def execute
+ def execute_with_threads(threads:, batch_size:)
@queue = SizedQueue.new(1)
- threads = start_migration_threads
+ migration_threads = start_migration_threads(threads)
- ProjectPagesMetadatum.only_on_legacy_storage.each_batch(of: @batch_size) do |batch|
+ ProjectPagesMetadatum.only_on_legacy_storage.each_batch(of: batch_size) do |batch|
@queue.push(batch)
end
@queue.close
- @logger.info("Waiting for threads to finish...")
- threads.each(&:join)
+ @logger.info(message: "Pages legacy storage migration: Waiting for threads to finish...")
+ migration_threads.each(&:join)
{ migrated: @migrated, errored: @errored }
end
- def start_migration_threads
- Array.new(@migration_threads) do
+ def execute_for_batch(project_ids)
+ batch = ProjectPagesMetadatum.only_on_legacy_storage.where(project_id: project_ids) # rubocop: disable CodeReuse/ActiveRecord
+
+ process_batch(batch)
+
+ { migrated: @migrated, errored: @errored }
+ end
+
+ private
+
+ def start_migration_threads(count)
+ Array.new(count) do
Thread.new do
while batch = @queue.pop
Rails.application.executor.wrap do
@@ -49,30 +58,32 @@ module Pages
migrate_project(project)
end
- @logger.info("#{@migrated} projects are migrated successfully, #{@errored} projects failed to be migrated")
+ @logger.info(message: "Pages legacy storage migration: batch processed", migrated: @migrated, errored: @errored)
rescue => e
# This method should never raise exception otherwise all threads might be killed
# and this will result in queue starving (and deadlock)
Gitlab::ErrorTracking.track_exception(e)
- @logger.error("failed processing a batch: #{e.message}")
+ @logger.error(message: "Pages legacy storage migration: failed processing a batch: #{e.message}")
end
def migrate_project(project)
result = nil
time = Benchmark.realtime do
- result = ::Pages::MigrateLegacyStorageToDeploymentService.new(project, ignore_invalid_entries: @ignore_invalid_entries).execute
+ result = ::Pages::MigrateLegacyStorageToDeploymentService.new(project,
+ ignore_invalid_entries: @ignore_invalid_entries,
+ mark_projects_as_not_deployed: @mark_projects_as_not_deployed).execute
end
if result[:status] == :success
- @logger.info("project_id: #{project.id} #{project.pages_path} has been migrated in #{time.round(2)} seconds")
+ @logger.info(message: "Pages legacy storage migration: project migrated: #{result[:message]}", project_id: project.id, pages_path: project.pages_path, duration: time.round(2))
@counters_lock.synchronize { @migrated += 1 }
else
- @logger.error("project_id: #{project.id} #{project.pages_path} failed to be migrated in #{time.round(2)} seconds: #{result[:message]}")
+ @logger.error(message: "Pages legacy storage migration: project failed to be migrated: #{result[:message]}", project_id: project.id, pages_path: project.pages_path, duration: time.round(2))
@counters_lock.synchronize { @errored += 1 }
end
rescue => e
@counters_lock.synchronize { @errored += 1 }
- @logger.error("project_id: #{project&.id} #{project&.pages_path} failed to be migrated: #{e.message}")
+ @logger.error(message: "Pages legacy storage migration: project failed to be migrated: #{result[:message]}", project_id: project&.id, pages_path: project&.pages_path)
Gitlab::ErrorTracking.track_exception(e, project_id: project&.id)
end
end
diff --git a/app/services/pages/migrate_legacy_storage_to_deployment_service.rb b/app/services/pages/migrate_legacy_storage_to_deployment_service.rb
index 63410b9fe4a..95c7107eb62 100644
--- a/app/services/pages/migrate_legacy_storage_to_deployment_service.rb
+++ b/app/services/pages/migrate_legacy_storage_to_deployment_service.rb
@@ -9,9 +9,10 @@ module Pages
attr_reader :project
- def initialize(project, ignore_invalid_entries: false)
+ def initialize(project, ignore_invalid_entries: false, mark_projects_as_not_deployed: false)
@project = project
@ignore_invalid_entries = ignore_invalid_entries
+ @mark_projects_as_not_deployed = mark_projects_as_not_deployed
end
def execute
@@ -30,16 +31,20 @@ module Pages
zip_result = ::Pages::ZipDirectoryService.new(project.pages_path, ignore_invalid_entries: @ignore_invalid_entries).execute
if zip_result[:status] == :error
- if !project.pages_metadatum&.reload&.pages_deployment &&
- Feature.enabled?(:pages_migration_mark_as_not_deployed, project)
- project.mark_pages_as_not_deployed
- end
-
return error("Can't create zip archive: #{zip_result[:message]}")
end
archive_path = zip_result[:archive_path]
+ unless archive_path
+ return error("Archive not created. Missing public directory in #{@project.pages_path}") unless @mark_projects_as_not_deployed
+
+ project.set_first_pages_deployment!(nil)
+
+ return success(
+ message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed")
+ end
+
deployment = nil
File.open(archive_path) do |file|
deployment = project.pages_deployments.create!(
diff --git a/app/services/pages/zip_directory_service.rb b/app/services/pages/zip_directory_service.rb
index ae08d40ee37..6cb79452e1b 100644
--- a/app/services/pages/zip_directory_service.rb
+++ b/app/services/pages/zip_directory_service.rb
@@ -18,9 +18,7 @@ module Pages
end
def execute
- unless resolve_public_dir
- return error("Can not find valid public dir in #{@input_dir}")
- end
+ return success unless resolve_public_dir
output_file = File.join(real_dir, "@migrated.zip") # '@' to avoid any name collision with groups or projects
diff --git a/app/services/pod_logs/kubernetes_service.rb b/app/services/pod_logs/kubernetes_service.rb
index 03b84f98973..28b1a179635 100644
--- a/app/services/pod_logs/kubernetes_service.rb
+++ b/app/services/pod_logs/kubernetes_service.rb
@@ -2,7 +2,7 @@
module PodLogs
class KubernetesService < PodLogs::BaseService
- LOGS_LIMIT = 500.freeze
+ LOGS_LIMIT = 500
REPLACEMENT_CHAR = "\u{FFFD}"
EncodingHelperError = Class.new(StandardError)
diff --git a/app/services/post_receive_service.rb b/app/services/post_receive_service.rb
index 84d9db5435b..3dc8fd8929a 100644
--- a/app/services/post_receive_service.rb
+++ b/app/services/post_receive_service.rb
@@ -48,7 +48,7 @@ class PostReceiveService
end
def process_mr_push_options(push_options, changes)
- Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/61359')
+ Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/-/issues/28494')
return unless repository
unless repository.repo_type.project?
diff --git a/app/services/projects/alerting/notify_service.rb b/app/services/projects/alerting/notify_service.rb
index 2ba64b73699..a5ee7173bdf 100644
--- a/app/services/projects/alerting/notify_service.rb
+++ b/app/services/projects/alerting/notify_service.rb
@@ -36,7 +36,7 @@ module Projects
override :alert_source
def alert_source
- alert.monitoring_tool || integration&.name || 'Generic Alert Endpoint'
+ super || integration&.name || 'Generic Alert Endpoint'
end
def active_integration?
diff --git a/app/services/projects/branches_by_mode_service.rb b/app/services/projects/branches_by_mode_service.rb
index fb66bfa073b..dbdcef066f4 100644
--- a/app/services/projects/branches_by_mode_service.rb
+++ b/app/services/projects/branches_by_mode_service.rb
@@ -71,7 +71,8 @@ class Projects::BranchesByModeService
# And increase it whenever we go to the next page
previous_offset = params[:offset].to_i
- previous_path, next_path = nil, nil
+ previous_path = nil
+ next_path = nil
return [branches, previous_path, next_path] if branches.blank?
diff --git a/app/services/projects/create_from_template_service.rb b/app/services/projects/create_from_template_service.rb
index 45b52a1861c..3c66ff709c9 100644
--- a/app/services/projects/create_from_template_service.rb
+++ b/app/services/projects/create_from_template_service.rb
@@ -7,7 +7,8 @@ module Projects
attr_reader :template_name
def initialize(user, params)
- @current_user, @params = user, params.to_h.dup
+ @current_user = user
+ @params = params.to_h.dup
@template_name = @params.delete(:template_name).presence
end
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index e3b1fd5f4c0..5fb0bda912e 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -5,11 +5,12 @@ module Projects
include ValidatesClassificationLabel
def initialize(user, params)
- @current_user, @params = user, params.dup
- @skip_wiki = @params.delete(:skip_wiki)
+ @current_user = user
+ @params = params.dup
+ @skip_wiki = @params.delete(:skip_wiki)
@initialize_with_readme = Gitlab::Utils.to_boolean(@params.delete(:initialize_with_readme))
- @import_data = @params.delete(:import_data)
- @relations_block = @params.delete(:relations_block)
+ @import_data = @params.delete(:import_data)
+ @relations_block = @params.delete(:relations_block)
end
def execute
@@ -110,7 +111,12 @@ module Projects
setup_authorizations
current_user.invalidate_personal_projects_count
- create_prometheus_service
+
+ if Feature.enabled?(:projects_post_creation_worker, current_user, default_enabled: :yaml)
+ Projects::PostCreationWorker.perform_async(@project.id)
+ else
+ create_prometheus_service
+ end
create_readme if @initialize_with_readme
end
@@ -193,6 +199,7 @@ module Projects
@project
end
+ # Deprecated: https://gitlab.com/gitlab-org/gitlab/-/issues/326665
def create_prometheus_service
service = @project.find_or_initialize_service(::PrometheusService.to_param)
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index 6840c395a76..4ba48f74273 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -27,7 +27,9 @@ module Projects
# Git data (e.g. a list of branch names).
flush_caches(project)
- ::Ci::AbortProjectPipelinesService.new.execute(project)
+ if Feature.enabled?(:abort_deleted_project_pipelines, default_enabled: :yaml)
+ ::Ci::AbortPipelinesService.new.execute(project.all_pipelines, :project_deleted)
+ end
Projects::UnlinkForkService.new(project, current_user).execute
diff --git a/app/services/projects/download_service.rb b/app/services/projects/download_service.rb
index 9810db84605..72cb3997045 100644
--- a/app/services/projects/download_service.rb
+++ b/app/services/projects/download_service.rb
@@ -7,7 +7,8 @@ module Projects
].freeze
def initialize(project, url)
- @project, @url = project, url
+ @project = project
+ @url = url
end
def execute
diff --git a/app/services/projects/gitlab_projects_import_service.rb b/app/services/projects/gitlab_projects_import_service.rb
index 27cce15f97d..38f0e2f7c1a 100644
--- a/app/services/projects/gitlab_projects_import_service.rb
+++ b/app/services/projects/gitlab_projects_import_service.rb
@@ -11,7 +11,9 @@ module Projects
attr_reader :current_user, :params
def initialize(user, import_params, override_params = nil)
- @current_user, @params, @override_params = user, import_params.dup, override_params
+ @current_user = user
+ @params = import_params.dup
+ @override_params = override_params
end
def execute
diff --git a/app/services/projects/update_pages_configuration_service.rb b/app/services/projects/update_pages_configuration_service.rb
index 01539d58545..b63903c6c61 100644
--- a/app/services/projects/update_pages_configuration_service.rb
+++ b/app/services/projects/update_pages_configuration_service.rb
@@ -11,7 +11,7 @@ module Projects
end
def execute
- return success unless Feature.enabled?(:pages_update_legacy_storage, default_enabled: true)
+ return success unless ::Settings.pages.local_store.enabled
# If the pages were never deployed, we can't write out the config, as the
# directory would not exist.
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index 2b59fdd539d..6fa42b293c5 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -23,7 +23,8 @@ module Projects
attr_reader :build
def initialize(project, build)
- @project, @build = project, build
+ @project = project
+ @build = build
end
def execute
@@ -31,9 +32,9 @@ module Projects
# Create status notifying the deployment of pages
@status = create_status
+ @status.update_older_statuses_retried! if Feature.enabled?(:ci_fix_commit_status_retried, project, default_enabled: :yaml)
@status.enqueue!
@status.run!
- @status.update_older_statuses_retried! if Feature.enabled?(:ci_fix_commit_status_retried, project, default_enabled: :yaml)
raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
raise InvalidStateError, 'build SHA is outdated for this ref' unless latest?
@@ -83,7 +84,9 @@ module Projects
def deploy_to_legacy_storage(artifacts_path)
# path today used by one project can later be used by another
# so we can't really scope this feature flag by project or group
- return unless Feature.enabled?(:pages_update_legacy_storage, default_enabled: true)
+ return unless ::Settings.pages.local_store.enabled
+
+ return if Feature.enabled?(:skip_pages_deploy_to_legacy_storage, project, default_enabled: :yaml)
# Create temporary directory in which we will extract the artifacts
make_secure_tmp_dir(tmp_path) do |tmp_path|
@@ -250,13 +253,17 @@ module Projects
def make_secure_tmp_dir(tmp_path)
FileUtils.mkdir_p(tmp_path)
- path = Dir.mktmpdir(nil, tmp_path)
+ path = Dir.mktmpdir(tmp_dir_prefix, tmp_path)
begin
yield(path)
ensure
FileUtils.remove_entry_secure(path)
end
end
+
+ def tmp_dir_prefix
+ "project-#{project.id}-build-#{build.id}-"
+ end
end
end
diff --git a/app/services/projects/update_remote_mirror_service.rb b/app/services/projects/update_remote_mirror_service.rb
index 6115db54829..8832a1bc027 100644
--- a/app/services/projects/update_remote_mirror_service.rb
+++ b/app/services/projects/update_remote_mirror_service.rb
@@ -9,8 +9,10 @@ module Projects
def execute(remote_mirror, tries)
return success unless remote_mirror.enabled?
+ # Blocked URLs are a hard failure, no need to attempt to retry
if Gitlab::UrlBlocker.blocked_url?(normalized_url(remote_mirror.url))
- return error("The remote mirror URL is invalid.")
+ hard_retry_or_fail(remote_mirror, _('The remote mirror URL is invalid.'), tries)
+ return error(remote_mirror.last_error)
end
update_mirror(remote_mirror)
@@ -19,11 +21,11 @@ module Projects
rescue Gitlab::Git::CommandError => e
# This happens if one of the gitaly calls above fail, for example when
# branches have diverged, or the pre-receive hook fails.
- retry_or_fail(remote_mirror, e.message, tries)
+ hard_retry_or_fail(remote_mirror, e.message, tries)
error(e.message)
rescue => e
- remote_mirror.mark_as_failed!(e.message)
+ remote_mirror.hard_fail!(e.message)
raise e
end
@@ -70,15 +72,15 @@ module Projects
).execute
end
- def retry_or_fail(mirror, message, tries)
+ def hard_retry_or_fail(mirror, message, tries)
if tries < MAX_TRIES
- mirror.mark_for_retry!(message)
+ mirror.hard_retry!(message)
else
# It's not likely we'll be able to recover from this ourselves, so we'll
# notify the users of the problem, and don't trigger any sidekiq retries
# Instead, we'll wait for the next change to try the push again, or until
# a user manually retries.
- mirror.mark_as_failed!(message)
+ mirror.hard_fail!(message)
end
end
end
diff --git a/app/services/prometheus/create_default_alerts_service.rb b/app/services/prometheus/create_default_alerts_service.rb
index 53baf6a650e..4ae2743cc28 100644
--- a/app/services/prometheus/create_default_alerts_service.rb
+++ b/app/services/prometheus/create_default_alerts_service.rb
@@ -84,7 +84,7 @@ module Prometheus
def environment
strong_memoize(:environment) do
- EnvironmentsFinder.new(project, nil, name: 'production').find.first ||
+ EnvironmentsFinder.new(project, nil, name: 'production').execute.first ||
project.environments.first
end
end
diff --git a/app/services/prometheus/proxy_service.rb b/app/services/prometheus/proxy_service.rb
index c1bafd03b48..33635796771 100644
--- a/app/services/prometheus/proxy_service.rb
+++ b/app/services/prometheus/proxy_service.rb
@@ -44,8 +44,8 @@ module Prometheus
def self.from_cache(proxyable_class_name, proxyable_id, method, path, params)
proxyable_class = begin
proxyable_class_name.constantize
- rescue NameError
- nil
+ rescue NameError
+ nil
end
return unless proxyable_class
diff --git a/app/services/prometheus/proxy_variable_substitution_service.rb b/app/services/prometheus/proxy_variable_substitution_service.rb
index 820b551c30a..846dfeb33ce 100644
--- a/app/services/prometheus/proxy_variable_substitution_service.rb
+++ b/app/services/prometheus/proxy_variable_substitution_service.rb
@@ -41,7 +41,8 @@ module Prometheus
# }
# })
def initialize(environment, params = {})
- @environment, @params = environment, params.deep_dup
+ @environment = environment
+ @params = params.deep_dup
end
# @return - params [Hash<Symbol,Any>] Returns a Hash containing a params key which is
diff --git a/app/services/releases/base_service.rb b/app/services/releases/base_service.rb
index d0e1577bd8d..de7c97b3518 100644
--- a/app/services/releases/base_service.rb
+++ b/app/services/releases/base_service.rb
@@ -8,7 +8,9 @@ module Releases
attr_accessor :project, :current_user, :params
def initialize(project, user = nil, params = {})
- @project, @current_user, @params = project, user, params.dup
+ @project = project
+ @current_user = user
+ @params = params.dup
end
def tag_name
diff --git a/app/services/repositories/changelog_service.rb b/app/services/repositories/changelog_service.rb
index 3981e91e7f3..0122bfb154d 100644
--- a/app/services/repositories/changelog_service.rb
+++ b/app/services/repositories/changelog_service.rb
@@ -61,14 +61,14 @@ module Repositories
# rubocop: enable Metrics/ParameterLists
def execute
- from = start_of_commit_range
+ config = Gitlab::Changelog::Config.from_git(@project)
+ from = start_of_commit_range(config)
# For every entry we want to only include the merge request that
# originally introduced the commit, which is the oldest merge request that
# contains the commit. We fetch there merge requests in batches, reducing
# the number of SQL queries needed to get this data.
mrs_finder = MergeRequests::OldestPerCommitFinder.new(@project)
- config = Gitlab::Changelog::Config.from_git(@project)
release = Gitlab::Changelog::Release
.new(version: @version, date: @date, config: config)
@@ -98,10 +98,12 @@ module Repositories
.commit(release: release, file: @file, branch: @branch, message: @message)
end
- def start_of_commit_range
+ def start_of_commit_range(config)
return @from if @from
- if (prev_tag = PreviousTagFinder.new(@project).execute(@version))
+ finder = ChangelogTagFinder.new(@project, regex: config.tag_regex)
+
+ if (prev_tag = finder.execute(@version))
return prev_tag.target_commit.id
end
diff --git a/app/services/resource_access_tokens/create_service.rb b/app/services/resource_access_tokens/create_service.rb
index 36858f33b49..620dfff91e2 100644
--- a/app/services/resource_access_tokens/create_service.rb
+++ b/app/services/resource_access_tokens/create_service.rb
@@ -39,7 +39,7 @@ module ResourceAccessTokens
attr_reader :resource_type, :resource
def has_permission_to_create?
- %w(project group).include?(resource_type) && can?(current_user, :admin_resource_access_tokens, resource)
+ %w(project group).include?(resource_type) && can?(current_user, :create_resource_access_tokens, resource)
end
def create_user
diff --git a/app/services/resource_access_tokens/revoke_service.rb b/app/services/resource_access_tokens/revoke_service.rb
index 59402701ddc..0924ca3bac4 100644
--- a/app/services/resource_access_tokens/revoke_service.rb
+++ b/app/services/resource_access_tokens/revoke_service.rb
@@ -14,7 +14,7 @@ module ResourceAccessTokens
end
def execute
- return error("#{current_user.name} cannot delete #{bot_user.name}") unless can_destroy_bot_member?
+ return error("#{current_user.name} cannot delete #{bot_user.name}") unless can_destroy_token?
return error("Failed to find bot user") unless find_member
access_token.revoke!
@@ -37,14 +37,8 @@ module ResourceAccessTokens
DeleteUserWorker.perform_async(current_user.id, bot_user.id, skip_authorization: true)
end
- def can_destroy_bot_member?
- if resource.is_a?(Project)
- can?(current_user, :admin_project_member, @resource)
- elsif resource.is_a?(Group)
- can?(current_user, :admin_group_member, @resource)
- else
- false
- end
+ def can_destroy_token?
+ %w(project group).include?(resource.class.name.downcase) && can?(current_user, :destroy_resource_access_tokens, resource)
end
def find_member
diff --git a/app/services/resource_events/base_synthetic_notes_builder_service.rb b/app/services/resource_events/base_synthetic_notes_builder_service.rb
index a2d78ec67c3..5939b9d2f9c 100644
--- a/app/services/resource_events/base_synthetic_notes_builder_service.rb
+++ b/app/services/resource_events/base_synthetic_notes_builder_service.rb
@@ -25,9 +25,7 @@ module ResourceEvents
def apply_common_filters(events)
events = apply_last_fetched_at(events)
- events = apply_fetch_until(events)
-
- events
+ apply_fetch_until(events)
end
def apply_last_fetched_at(events)
diff --git a/app/services/resource_events/change_labels_service.rb b/app/services/resource_events/change_labels_service.rb
index ddf3b05ac10..89eb90e9360 100644
--- a/app/services/resource_events/change_labels_service.rb
+++ b/app/services/resource_events/change_labels_service.rb
@@ -5,7 +5,8 @@ module ResourceEvents
attr_reader :resource, :user
def initialize(resource, user)
- @resource, @user = resource, user
+ @resource = resource
+ @user = user
end
def execute(added_labels: [], removed_labels: [])
diff --git a/app/services/resource_events/change_state_service.rb b/app/services/resource_events/change_state_service.rb
index c5120ba82e1..d68b86a1513 100644
--- a/app/services/resource_events/change_state_service.rb
+++ b/app/services/resource_events/change_state_service.rb
@@ -5,7 +5,8 @@ module ResourceEvents
attr_reader :resource, :user
def initialize(user:, resource:)
- @user, @resource = user, resource
+ @user = user
+ @resource = resource
end
def execute(params)
diff --git a/app/services/search/global_service.rb b/app/services/search/global_service.rb
index 9038650adb7..055034d87a1 100644
--- a/app/services/search/global_service.rb
+++ b/app/services/search/global_service.rb
@@ -9,7 +9,8 @@ module Search
attr_accessor :current_user, :params
def initialize(user, params)
- @current_user, @params = user, params.dup
+ @current_user = user
+ @params = params.dup
end
def execute
diff --git a/app/services/search/project_service.rb b/app/services/search/project_service.rb
index e5fc5a7a438..4227dfe2fac 100644
--- a/app/services/search/project_service.rb
+++ b/app/services/search/project_service.rb
@@ -9,7 +9,9 @@ module Search
attr_accessor :project, :current_user, :params
def initialize(project, user, params)
- @project, @current_user, @params = project, user, params.dup
+ @project = project
+ @current_user = user
+ @params = params.dup
end
def execute
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
index 802bfd813dc..c95b459cd2a 100644
--- a/app/services/snippets/create_service.rb
+++ b/app/services/snippets/create_service.rb
@@ -6,7 +6,7 @@ module Snippets
# NOTE: disable_spam_action_service can be removed when the ':snippet_spam' feature flag is removed.
disable_spam_action_service = params.delete(:disable_spam_action_service) == true
@request = params.delete(:request)
- @spam_params = Spam::SpamActionService.filter_spam_params!(params)
+ @spam_params = Spam::SpamActionService.filter_spam_params!(params, @request)
@snippet = build_from_params
diff --git a/app/services/snippets/update_service.rb b/app/services/snippets/update_service.rb
index 5b427817a02..aedb6a4819d 100644
--- a/app/services/snippets/update_service.rb
+++ b/app/services/snippets/update_service.rb
@@ -10,7 +10,7 @@ module Snippets
# NOTE: disable_spam_action_service can be removed when the ':snippet_spam' feature flag is removed.
disable_spam_action_service = params.delete(:disable_spam_action_service) == true
@request = params.delete(:request)
- @spam_params = Spam::SpamActionService.filter_spam_params!(params)
+ @spam_params = Spam::SpamActionService.filter_spam_params!(params, @request)
return invalid_params_error(snippet) unless valid_params?
diff --git a/app/services/spam/spam_action_service.rb b/app/services/spam/spam_action_service.rb
index 185b9e39070..2220198583c 100644
--- a/app/services/spam/spam_action_service.rb
+++ b/app/services/spam/spam_action_service.rb
@@ -11,22 +11,30 @@ module Spam
# Takes a hash of parameters from an incoming request to modify a model (via a controller,
# service, or GraphQL mutation). The parameters will either be camelCase (if they are
# received directly via controller params) or underscore_case (if they have come from
- # a GraphQL mutation which has converted them to underscore)
+ # a GraphQL mutation which has converted them to underscore), or in the
+ # headers when using the header based flow.
#
# Deletes the parameters which are related to spam and captcha processing, and returns
# them in a SpamParams parameters object. See:
# https://refactoring.com/catalog/introduceParameterObject.html
- def self.filter_spam_params!(params)
+ def self.filter_spam_params!(params, request)
# NOTE: The 'captcha_response' field can be expanded to multiple fields when we move to future
# alternative captcha implementations such as FriendlyCaptcha. See
# https://gitlab.com/gitlab-org/gitlab/-/issues/273480
- captcha_response = params.delete(:captcha_response) || params.delete(:captchaResponse)
+ headers = request&.headers || {}
+ api = params.delete(:api)
+ captcha_response = read_parameter(:captcha_response, params, headers)
+ spam_log_id = read_parameter(:spam_log_id, params, headers)&.to_i
- SpamParams.new(
- api: params.delete(:api),
- captcha_response: captcha_response,
- spam_log_id: params.delete(:spam_log_id) || params.delete(:spamLogId)
- )
+ SpamParams.new(api: api, captcha_response: captcha_response, spam_log_id: spam_log_id)
+ end
+
+ def self.read_parameter(name, params, headers)
+ [
+ params.delete(name),
+ params.delete(name.to_s.camelize(:lower).to_sym),
+ headers["X-GitLab-#{name.to_s.titlecase(keep_id_suffix: true).tr(' ', '-')}"]
+ ].compact.first
end
attr_accessor :target, :request, :options
@@ -40,6 +48,7 @@ module Spam
@options = {}
end
+ # rubocop:disable Metrics/AbcSize
def execute(spam_params:)
if request
options[:ip_address] = request.env['action_dispatch.remote_ip'].to_s
@@ -58,19 +67,20 @@ module Spam
)
if recaptcha_verified
- # If it's a request which is already verified through captcha,
+ # If it's a request which is already verified through CAPTCHA,
# update the spam log accordingly.
SpamLog.verify_recaptcha!(user_id: user.id, id: spam_params.spam_log_id)
- ServiceResponse.success(message: "Captcha was successfully verified")
+ ServiceResponse.success(message: "CAPTCHA successfully verified")
else
return ServiceResponse.success(message: 'Skipped spam check because user was allowlisted') if allowlisted?(user)
return ServiceResponse.success(message: 'Skipped spam check because request was not present') unless request
return ServiceResponse.success(message: 'Skipped spam check because it was not required') unless check_for_spam?
perform_spam_service_check(spam_params.api)
- ServiceResponse.success(message: "Spam check performed, check #{target.class.name} spammable model for any errors or captcha requirement")
+ ServiceResponse.success(message: "Spam check performed. Check #{target.class.name} spammable model for any errors or CAPTCHA requirement")
end
end
+ # rubocop:enable Metrics/AbcSize
delegate :check_for_spam?, to: :target
diff --git a/app/services/spam/spam_params.rb b/app/services/spam/spam_params.rb
index fef5355c7f3..3420748822d 100644
--- a/app/services/spam/spam_params.rb
+++ b/app/services/spam/spam_params.rb
@@ -23,10 +23,10 @@ module Spam
end
def ==(other)
- other.class == self.class &&
- other.api == self.api &&
- other.captcha_response == self.captcha_response &&
- other.spam_log_id == self.spam_log_id
+ other.class <= self.class &&
+ other.api == api &&
+ other.captcha_response == captcha_response &&
+ other.spam_log_id == spam_log_id
end
end
end
diff --git a/app/services/submit_usage_ping_service.rb b/app/services/submit_usage_ping_service.rb
index 8ab1193b04f..d628b1ea7c7 100644
--- a/app/services/submit_usage_ping_service.rb
+++ b/app/services/submit_usage_ping_service.rb
@@ -35,7 +35,13 @@ class SubmitUsagePingService
raise SubmissionError.new("Unsuccessful response code: #{response.code}") unless response.success?
- raw_usage_data.update_sent_at! if raw_usage_data
+ version_usage_data_id = response.dig('conv_index', 'usage_data_id') || response.dig('dev_ops_score', 'usage_data_id')
+
+ unless version_usage_data_id.is_a?(Integer) && version_usage_data_id > 0
+ raise SubmissionError.new("Invalid usage_data_id in response: #{version_usage_data_id}")
+ end
+
+ raw_usage_data.update_version_metadata!(usage_data_id: version_usage_data_id)
store_metrics(response)
end
diff --git a/app/services/system_hooks_service.rb b/app/services/system_hooks_service.rb
index d854b95cb93..53e810035c5 100644
--- a/app/services/system_hooks_service.rb
+++ b/app/services/system_hooks_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
class SystemHooksService
- BUILDER_DRIVEN_EVENT_DATA_AVAILABLE_FOR_CLASSES = [GroupMember, Group, ProjectMember].freeze
+ BUILDER_DRIVEN_EVENT_DATA_AVAILABLE_FOR_CLASSES = [GroupMember, Group, ProjectMember, User].freeze
def execute_hooks_for(model, event)
data = build_event_data(model, event)
@@ -47,15 +47,6 @@ class SystemHooksService
if event == :rename || event == :transfer
data[:old_path_with_namespace] = model.old_path_with_namespace
end
- when User
- data.merge!(user_data(model))
-
- case event
- when :rename
- data[:old_username] = model.username_before_last_save
- when :failed_login
- data[:state] = model.state
- end
end
data
@@ -79,15 +70,6 @@ class SystemHooksService
}
end
- def user_data(model)
- {
- name: model.name,
- email: model.email,
- user_id: model.id,
- username: model.username
- }
- end
-
def builder_driven_event_data_available?(model)
model.class.in?(BUILDER_DRIVEN_EVENT_DATA_AVAILABLE_FOR_CLASSES)
end
@@ -100,10 +82,10 @@ class SystemHooksService
Gitlab::HookData::GroupBuilder
when ProjectMember
Gitlab::HookData::ProjectMemberBuilder
+ when User
+ Gitlab::HookData::UserBuilder
end
builder_class.new(model).build(event)
end
end
-
-SystemHooksService.prepend_if_ee('EE::SystemHooksService')
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 082ed93eca2..4377bd8554b 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -7,7 +7,7 @@
module SystemNoteService
extend self
- # Called when commits are added to a Merge Request
+ # Called when commits are added to a merge request
#
# noteable - Noteable object
# project - Project owning noteable
diff --git a/app/services/system_notes/alert_management_service.rb b/app/services/system_notes/alert_management_service.rb
index 27ddf2e36f1..70cdd5c6434 100644
--- a/app/services/system_notes/alert_management_service.rb
+++ b/app/services/system_notes/alert_management_service.rb
@@ -73,7 +73,7 @@ module SystemNotes
#
# Returns the created Note object
def log_resolving_alert(monitoring_tool)
- body = "logged a resolving alert from **#{monitoring_tool}**"
+ body = "logged a recovery alert from **#{monitoring_tool}**"
create_note(NoteSummary.new(noteable, project, User.alert_bot, body, action: 'new_alert_added'))
end
diff --git a/app/services/system_notes/commit_service.rb b/app/services/system_notes/commit_service.rb
index 11119956e0f..c89998f77c7 100644
--- a/app/services/system_notes/commit_service.rb
+++ b/app/services/system_notes/commit_service.rb
@@ -2,7 +2,7 @@
module SystemNotes
class CommitService < ::SystemNotes::BaseService
- # Called when commits are added to a Merge Request
+ # Called when commits are added to a merge request
#
# new_commits - Array of Commits added since last push
# existing_commits - Array of Commits added in a previous push
diff --git a/app/services/task_list_toggle_service.rb b/app/services/task_list_toggle_service.rb
index f6602a35033..32cfa198ce8 100644
--- a/app/services/task_list_toggle_service.rb
+++ b/app/services/task_list_toggle_service.rb
@@ -9,9 +9,11 @@ class TaskListToggleService
attr_reader :updated_markdown, :updated_markdown_html
def initialize(markdown, markdown_html, line_source:, line_number:, toggle_as_checked:)
- @markdown, @markdown_html = markdown, markdown_html
- @line_source, @line_number = line_source, line_number
- @toggle_as_checked = toggle_as_checked
+ @markdown = markdown
+ @markdown_html = markdown_html
+ @line_source = line_source
+ @line_number = line_number
+ @toggle_as_checked = toggle_as_checked
@updated_markdown, @updated_markdown_html = nil
end
diff --git a/app/services/todo_service.rb b/app/services/todo_service.rb
index dea116c8546..e473a6dc594 100644
--- a/app/services/todo_service.rb
+++ b/app/services/todo_service.rb
@@ -47,7 +47,7 @@ class TodoService
yield target
- todo_users.each(&:update_todos_count_cache)
+ Users::UpdateTodoCountCacheService.new(todo_users).execute if todo_users.present?
end
# When we reassign an assignable object (issuable, alert) we should:
@@ -177,7 +177,7 @@ class TodoService
def resolve_todos_for_target(target, current_user)
attributes = attributes_for_target(target)
- resolve_todos(pending_todos(current_user, attributes), current_user)
+ resolve_todos(pending_todos([current_user], attributes), current_user)
end
def resolve_todos(todos, current_user, resolution: :done, resolved_by_action: :system_done)
@@ -220,16 +220,23 @@ class TodoService
private
def create_todos(users, attributes)
- Array(users).map do |user|
- next if pending_todos(user, attributes).exists? && Feature.disabled?(:multiple_todos, user)
+ users = Array(users)
+ return if users.empty?
+
+ users_with_pending_todos = pending_todos(users, attributes).pluck_user_id
+ users.reject! { |user| users_with_pending_todos.include?(user.id) && Feature.disabled?(:multiple_todos, user) }
+
+ todos = users.map do |user|
issue_type = attributes.delete(:issue_type)
track_todo_creation(user, issue_type)
- todo = Todo.create(attributes.merge(user_id: user.id))
- user.update_todos_count_cache
- todo
+ Todo.create(attributes.merge(user_id: user.id))
end
+
+ Users::UpdateTodoCountCacheService.new(users).execute
+
+ todos
end
def new_issuable(issuable, author)
@@ -353,8 +360,8 @@ class TodoService
end
end
- def pending_todos(user, criteria = {})
- PendingTodosFinder.new(user, criteria).execute
+ def pending_todos(users, criteria = {})
+ PendingTodosFinder.new(users, criteria).execute
end
def track_todo_creation(user, issue_type)
diff --git a/app/services/todos/destroy/base_service.rb b/app/services/todos/destroy/base_service.rb
index 7378f10e7c4..4e971246185 100644
--- a/app/services/todos/destroy/base_service.rb
+++ b/app/services/todos/destroy/base_service.rb
@@ -13,7 +13,7 @@ module Todos
# rubocop: disable CodeReuse/ActiveRecord
def without_authorized(items)
- items.where('todos.user_id NOT IN (?)', authorized_users)
+ items.where.not('todos.user_id' => authorized_users)
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/todos/destroy/destroyed_issuable_service.rb b/app/services/todos/destroy/destroyed_issuable_service.rb
new file mode 100644
index 00000000000..db12965224b
--- /dev/null
+++ b/app/services/todos/destroy/destroyed_issuable_service.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module Todos
+ module Destroy
+ class DestroyedIssuableService
+ BATCH_SIZE = 100
+
+ def initialize(target_id, target_type)
+ @target_id = target_id
+ @target_type = target_type
+ end
+
+ def execute
+ inner_query = Todo.select(:id).for_target(target_id).for_type(target_type).limit(BATCH_SIZE)
+
+ delete_query = <<~SQL
+ DELETE FROM "#{Todo.table_name}"
+ WHERE id IN (#{inner_query.to_sql})
+ RETURNING user_id
+ SQL
+
+ loop do
+ result = ActiveRecord::Base.connection.execute(delete_query)
+
+ break if result.cmd_tuples == 0
+
+ user_ids = result.map { |row| row['user_id'] }.uniq
+
+ invalidate_todos_cache_counts(user_ids)
+ end
+ end
+
+ private
+
+ attr_reader :target_id, :target_type
+
+ def invalidate_todos_cache_counts(user_ids)
+ user_ids.each do |id|
+ # Only build a user instance since we only need its ID for
+ # `User#invalidate_todos_cache_counts` to work.
+ User.new(id: id).invalidate_todos_cache_counts
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/todos/destroy/entity_leave_service.rb b/app/services/todos/destroy/entity_leave_service.rb
index 7cfedc2233a..6d4fc3865ac 100644
--- a/app/services/todos/destroy/entity_leave_service.rb
+++ b/app/services/todos/destroy/entity_leave_service.rb
@@ -65,8 +65,10 @@ module Todos
end
def remove_group_todos
+ return unless entity.is_a?(Namespace)
+
Todo
- .for_group(non_authorized_groups)
+ .for_group(non_authorized_non_public_groups)
.for_user(user)
.delete_all
end
@@ -102,12 +104,19 @@ module Todos
GroupsFinder.new(user, min_access_level: Gitlab::Access::REPORTER).execute.select(:id)
end
- def non_authorized_groups
+ # since the entity is a private group, we can assume all subgroups are also
+ # private. We can therefore limit GroupsFinder with `all_available: false`.
+ # Otherwise it tries to include all public groups. This generates an expensive
+ # SQL queries: https://gitlab.com/gitlab-org/gitlab/-/issues/325133
+ # rubocop: disable CodeReuse/ActiveRecord
+ def non_authorized_non_public_groups
return [] unless entity.is_a?(Namespace)
+ return [] unless entity.private?
entity.self_and_descendants.select(:id)
- .id_not_in(GroupsFinder.new(user).execute.select(:id))
+ .id_not_in(GroupsFinder.new(user, all_available: false).execute.select(:id).reorder(nil))
end
+ # rubocop: enable CodeReuse/ActiveRecord
def non_authorized_reporter_groups
entity.self_and_descendants.select(:id)
diff --git a/app/services/todos/destroy/private_features_service.rb b/app/services/todos/destroy/private_features_service.rb
index bd49519d694..44c3ff231f8 100644
--- a/app/services/todos/destroy/private_features_service.rb
+++ b/app/services/todos/destroy/private_features_service.rb
@@ -36,7 +36,7 @@ module Todos
items = Todo.where(project_id: project_id)
items = items.where(user_id: user_id) if user_id
- items.where('user_id NOT IN (?)', authorized_users)
+ items.where.not(user_id: authorized_users)
.where(target_type: target_types)
.delete_all
end
diff --git a/app/services/two_factor/base_service.rb b/app/services/two_factor/base_service.rb
index 7d3f63f3442..0957d7ebabd 100644
--- a/app/services/two_factor/base_service.rb
+++ b/app/services/two_factor/base_service.rb
@@ -7,7 +7,8 @@ module TwoFactor
attr_reader :current_user, :params, :user
def initialize(current_user, params = {})
- @current_user, @params = current_user, params
+ @current_user = current_user
+ @params = params
@user = params.delete(:user)
end
end
diff --git a/app/services/upload_service.rb b/app/services/upload_service.rb
index ba6ead41836..39d1ffa4d6b 100644
--- a/app/services/upload_service.rb
+++ b/app/services/upload_service.rb
@@ -1,8 +1,14 @@
# frozen_string_literal: true
class UploadService
+ # Temporarily introduced for upload API: https://gitlab.com/gitlab-org/gitlab/-/issues/325788
+ attr_accessor :override_max_attachment_size
+
def initialize(model, file, uploader_class = FileUploader, **uploader_context)
- @model, @file, @uploader_class, @uploader_context = model, file, uploader_class, uploader_context
+ @model = model
+ @file = file
+ @uploader_class = uploader_class
+ @uploader_context = uploader_context
end
def execute
@@ -19,6 +25,6 @@ class UploadService
attr_reader :model, :file, :uploader_class, :uploader_context
def max_attachment_size
- Gitlab::CurrentSettings.max_attachment_size.megabytes.to_i
+ override_max_attachment_size || Gitlab::CurrentSettings.max_attachment_size.megabytes.to_i
end
end
diff --git a/app/services/user_agent_detail_service.rb b/app/services/user_agent_detail_service.rb
index 5cb42e879a0..9302c86d3e6 100644
--- a/app/services/user_agent_detail_service.rb
+++ b/app/services/user_agent_detail_service.rb
@@ -4,7 +4,8 @@ class UserAgentDetailService
attr_accessor :spammable, :request
def initialize(spammable, request)
- @spammable, @request = spammable, request
+ @spammable = spammable
+ @request = request
end
def create
diff --git a/app/services/user_preferences/update_service.rb b/app/services/user_preferences/update_service.rb
new file mode 100644
index 00000000000..a1ee35d4580
--- /dev/null
+++ b/app/services/user_preferences/update_service.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+module UserPreferences
+ class UpdateService < BaseService
+ def initialize(user, params = {})
+ @preferences = user.user_preference
+ @params = params.to_h.dup.with_indifferent_access
+ end
+
+ def execute
+ if @preferences.update(@params)
+ ServiceResponse.success(
+ message: 'Preference was updated',
+ payload: { preferences: @preferences })
+ else
+ ServiceResponse.error(message: 'Could not update preference')
+ end
+ end
+ end
+end
diff --git a/app/services/users/activity_service.rb b/app/services/users/activity_service.rb
index 85855f45e33..64844a3f002 100644
--- a/app/services/users/activity_service.rb
+++ b/app/services/users/activity_service.rb
@@ -37,3 +37,5 @@ module Users
end
end
end
+
+Users::ActivityService.prepend_ee_mod
diff --git a/app/services/users/batch_status_cleaner_service.rb b/app/services/users/batch_status_cleaner_service.rb
index ea6142f13cc..533794f8d60 100644
--- a/app/services/users/batch_status_cleaner_service.rb
+++ b/app/services/users/batch_status_cleaner_service.rb
@@ -2,7 +2,7 @@
module Users
class BatchStatusCleanerService
- BATCH_SIZE = 100.freeze
+ BATCH_SIZE = 100
# Cleanup BATCH_SIZE user_statuses records
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/services/users/refresh_authorized_projects_service.rb b/app/services/users/refresh_authorized_projects_service.rb
index 070713929e4..d28ff45bfdf 100644
--- a/app/services/users/refresh_authorized_projects_service.rb
+++ b/app/services/users/refresh_authorized_projects_service.rb
@@ -51,38 +51,12 @@ module Users
# This method returns the updated User object.
def execute_without_lease
- current = current_authorizations_per_project
- fresh = fresh_access_levels_per_project
-
- # Delete projects that have more than one authorizations associated with
- # the user. The correct authorization is added to the ``add`` array in the
- # next stage.
- remove = projects_with_duplicates
- current.except!(*projects_with_duplicates)
-
- remove |= current.each_with_object([]) do |(project_id, row), array|
- # rows not in the new list or with a different access level should be
- # removed.
- if !fresh[project_id] || fresh[project_id] != row.access_level
- if incorrect_auth_found_callback
- incorrect_auth_found_callback.call(project_id, row.access_level)
- end
-
- array << row.project_id
- end
- end
-
- add = fresh.each_with_object([]) do |(project_id, level), array|
- # rows not in the old list or with a different access level should be
- # added.
- if !current[project_id] || current[project_id].access_level != level
- if missing_auth_found_callback
- missing_auth_found_callback.call(project_id, level)
- end
-
- array << [user.id, project_id, level]
- end
- end
+ remove, add = AuthorizedProjectUpdate::FindRecordsDueForRefreshService.new(
+ user,
+ source: source,
+ incorrect_auth_found_callback: incorrect_auth_found_callback,
+ missing_auth_found_callback: missing_auth_found_callback
+ ).execute
update_authorizations(remove, add)
end
@@ -104,6 +78,10 @@ module Users
user.reset
end
+ private
+
+ attr_reader :incorrect_auth_found_callback, :missing_auth_found_callback
+
def log_refresh_details(remove, add)
Gitlab::AppJsonLogger.info(event: 'authorized_projects_refresh',
user_id: user.id,
@@ -115,34 +93,5 @@ module Users
'authorized_projects_refresh.rows_deleted_slice': remove.first(5),
'authorized_projects_refresh.rows_added_slice': add.first(5))
end
-
- def fresh_access_levels_per_project
- fresh_authorizations.each_with_object({}) do |row, hash|
- hash[row.project_id] = row.access_level
- end
- end
-
- def current_authorizations_per_project
- current_authorizations.index_by(&:project_id)
- end
-
- def current_authorizations
- @current_authorizations ||= user.project_authorizations.select(:project_id, :access_level)
- end
-
- def fresh_authorizations
- Gitlab::ProjectAuthorizations.new(user).calculate
- end
-
- private
-
- attr_reader :incorrect_auth_found_callback, :missing_auth_found_callback
-
- def projects_with_duplicates
- @projects_with_duplicates ||= current_authorizations
- .group_by(&:project_id)
- .select { |project_id, authorizations| authorizations.count > 1 }
- .keys
- end
end
end
diff --git a/app/services/users/respond_to_terms_service.rb b/app/services/users/respond_to_terms_service.rb
index 254480304f9..7cdfef1489b 100644
--- a/app/services/users/respond_to_terms_service.rb
+++ b/app/services/users/respond_to_terms_service.rb
@@ -3,7 +3,8 @@
module Users
class RespondToTermsService
def initialize(user, term)
- @user, @term = user, term
+ @user = user
+ @term = term
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/services/users/set_status_service.rb b/app/services/users/set_status_service.rb
index a907937070f..2b4be8c833b 100644
--- a/app/services/users/set_status_service.rb
+++ b/app/services/users/set_status_service.rb
@@ -7,7 +7,8 @@ module Users
attr_reader :current_user, :target_user, :params
def initialize(current_user, params)
- @current_user, @params = current_user, params.dup
+ @current_user = current_user
+ @params = params.dup
@target_user = params.delete(:user) || current_user
end
diff --git a/app/services/users/update_canonical_email_service.rb b/app/services/users/update_canonical_email_service.rb
index 1400fd58eb4..e75452f60fd 100644
--- a/app/services/users/update_canonical_email_service.rb
+++ b/app/services/users/update_canonical_email_service.rb
@@ -7,7 +7,7 @@ module Users
INCLUDED_DOMAINS_PATTERN = [/gmail.com/].freeze
def initialize(user:)
- raise ArgumentError.new("Please provide a user") unless user&.is_a?(User)
+ raise ArgumentError.new("Please provide a user") unless user.is_a?(User)
@user = user
end
diff --git a/app/services/users/update_todo_count_cache_service.rb b/app/services/users/update_todo_count_cache_service.rb
new file mode 100644
index 00000000000..03ab66bd64a
--- /dev/null
+++ b/app/services/users/update_todo_count_cache_service.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Users
+ class UpdateTodoCountCacheService < BaseService
+ QUERY_BATCH_SIZE = 10
+
+ attr_reader :users
+
+ # users - An array of User objects
+ def initialize(users)
+ @users = users
+ end
+
+ def execute
+ users.each_slice(QUERY_BATCH_SIZE) do |users_batch|
+ todo_counts = Todo.for_user(users_batch).count_grouped_by_user_id_and_state
+
+ users_batch.each do |user|
+ update_count_cache(user, todo_counts, :done)
+ update_count_cache(user, todo_counts, :pending)
+ end
+ end
+ end
+
+ private
+
+ def update_count_cache(user, todo_counts, state)
+ count = todo_counts.fetch([user.id, state.to_s], 0)
+ expiration_time = user.count_cache_validity_period
+
+ Rails.cache.write(['users', user.id, "todos_#{state}_count"], count, expires_in: expiration_time)
+ end
+ end
+end