summaryrefslogtreecommitdiff
path: root/app/services
diff options
context:
space:
mode:
Diffstat (limited to 'app/services')
-rw-r--r--app/services/admin/propagate_integration_service.rb67
-rw-r--r--app/services/admin/propagate_service_template.rb6
-rw-r--r--app/services/alert_management/alerts/update_service.rb22
-rw-r--r--app/services/alert_management/process_prometheus_alert_service.rb2
-rw-r--r--app/services/audit_event_service.rb21
-rw-r--r--app/services/boards/create_service.rb18
-rw-r--r--app/services/boards/issues/list_service.rb5
-rw-r--r--app/services/boards/lists/destroy_service.rb8
-rw-r--r--app/services/bulk_create_integration_service.rb59
-rw-r--r--app/services/bulk_update_integration_service.rb32
-rw-r--r--app/services/ci/archive_trace_service.rb2
-rw-r--r--app/services/ci/build_report_result_service.rb29
-rw-r--r--app/services/ci/create_downstream_pipeline_service.rb15
-rw-r--r--app/services/ci/create_job_artifacts_service.rb9
-rw-r--r--app/services/ci/create_pipeline_service.rb5
-rw-r--r--app/services/ci/daily_build_group_report_result_service.rb2
-rw-r--r--app/services/ci/delete_objects_service.rb62
-rw-r--r--app/services/ci/destroy_expired_job_artifacts_service.rb7
-rw-r--r--app/services/ci/expire_pipeline_cache_service.rb13
-rw-r--r--app/services/ci/list_config_variables_service.rb13
-rw-r--r--app/services/ci/pipelines/create_artifact_service.rb1
-rw-r--r--app/services/ci/play_bridge_service.rb14
-rw-r--r--app/services/ci/play_build_service.rb4
-rw-r--r--app/services/ci/play_manual_stage_service.rb8
-rw-r--r--app/services/ci/process_pipeline_service.rb4
-rw-r--r--app/services/ci/retry_build_service.rb2
-rw-r--r--app/services/ci/update_build_queue_service.rb4
-rw-r--r--app/services/ci/update_build_state_service.rb142
-rw-r--r--app/services/clusters/kubernetes/create_or_update_service_account_service.rb4
-rw-r--r--app/services/concerns/admin/propagate_service.rb51
-rw-r--r--app/services/deployments/update_environment_service.rb (renamed from app/services/deployments/after_create_service.rb)4
-rw-r--r--app/services/design_management/copy_design_collection.rb6
-rw-r--r--app/services/design_management/copy_design_collection/copy_service.rb309
-rw-r--r--app/services/design_management/copy_design_collection/queue_service.rb42
-rw-r--r--app/services/design_management/delete_designs_service.rb1
-rw-r--r--app/services/design_management/design_service.rb1
-rw-r--r--app/services/design_management/generate_image_versions_service.rb3
-rw-r--r--app/services/design_management/runs_design_actions.rb11
-rw-r--r--app/services/design_management/save_designs_service.rb21
-rw-r--r--app/services/feature_flags/base_service.rb55
-rw-r--r--app/services/feature_flags/create_service.rb52
-rw-r--r--app/services/feature_flags/destroy_service.rb33
-rw-r--r--app/services/feature_flags/disable_service.rb46
-rw-r--r--app/services/feature_flags/enable_service.rb93
-rw-r--r--app/services/feature_flags/update_service.rb87
-rw-r--r--app/services/git/branch_hooks_service.rb8
-rw-r--r--app/services/git/wiki_push_service.rb4
-rw-r--r--app/services/groups/create_service.rb19
-rw-r--r--app/services/groups/import_export/import_service.rb2
-rw-r--r--app/services/groups/transfer_service.rb35
-rw-r--r--app/services/groups/update_service.rb58
-rw-r--r--app/services/groups/update_shared_runners_service.rb32
-rw-r--r--app/services/incident_management/incidents/create_service.rb6
-rw-r--r--app/services/incident_management/incidents/update_severity_service.rb38
-rw-r--r--app/services/incident_management/pager_duty/process_webhook_service.rb2
-rw-r--r--app/services/issuable/clone/attributes_rewriter.rb13
-rw-r--r--app/services/issuable_base_service.rb3
-rw-r--r--app/services/issues/base_service.rb12
-rw-r--r--app/services/issues/build_service.rb20
-rw-r--r--app/services/issues/move_service.rb23
-rw-r--r--app/services/issues/reopen_service.rb8
-rw-r--r--app/services/jira/requests/base.rb7
-rw-r--r--app/services/lfs/push_service.rb16
-rw-r--r--app/services/members/create_service.rb2
-rw-r--r--app/services/members/invitation_reminder_email_service.rb54
-rw-r--r--app/services/merge_requests/base_service.rb9
-rw-r--r--app/services/merge_requests/cleanup_refs_service.rb2
-rw-r--r--app/services/merge_requests/export_csv_service.rb55
-rw-r--r--app/services/merge_requests/ff_merge_service.rb2
-rw-r--r--app/services/merge_requests/merge_service.rb2
-rw-r--r--app/services/merge_requests/merge_to_ref_service.rb9
-rw-r--r--app/services/merge_requests/mergeability_check_service.rb17
-rw-r--r--app/services/merge_requests/refresh_service.rb17
-rw-r--r--app/services/merge_requests/update_service.rb2
-rw-r--r--app/services/metrics/dashboard/custom_dashboard_service.rb6
-rw-r--r--app/services/metrics/dashboard/dynamic_embed_service.rb2
-rw-r--r--app/services/namespace_settings/update_service.rb25
-rw-r--r--app/services/notes/create_service.rb2
-rw-r--r--app/services/notification_recipients/build_service.rb8
-rw-r--r--app/services/notification_recipients/builder/default.rb3
-rw-r--r--app/services/notification_service.rb31
-rw-r--r--app/services/packages/composer/composer_json_service.rb6
-rw-r--r--app/services/packages/create_event_service.rb37
-rw-r--r--app/services/packages/create_package_service.rb1
-rw-r--r--app/services/packages/generic/create_package_file_service.rb38
-rw-r--r--app/services/packages/generic/find_or_create_package_service.rb15
-rw-r--r--app/services/personal_access_tokens/revoke_service.rb7
-rw-r--r--app/services/pod_logs/base_service.rb2
-rw-r--r--app/services/pod_logs/elasticsearch_service.rb1
-rw-r--r--app/services/pod_logs/kubernetes_service.rb1
-rw-r--r--app/services/projects/alerting/notify_service.rb75
-rw-r--r--app/services/projects/container_repository/cleanup_tags_service.rb5
-rw-r--r--app/services/projects/container_repository/delete_tags_service.rb4
-rw-r--r--app/services/projects/create_from_template_service.rb18
-rw-r--r--app/services/projects/create_service.rb22
-rw-r--r--app/services/projects/gitlab_projects_import_service.rb1
-rw-r--r--app/services/projects/operations/update_service.rb10
-rw-r--r--app/services/projects/overwrite_project_service.rb12
-rw-r--r--app/services/projects/prometheus/alerts/notify_service.rb14
-rw-r--r--app/services/projects/transfer_service.rb4
-rw-r--r--app/services/projects/update_pages_service.rb16
-rw-r--r--app/services/projects/update_remote_mirror_service.rb11
-rw-r--r--app/services/quick_actions/target_service.rb4
-rw-r--r--app/services/releases/base_service.rb89
-rw-r--r--app/services/releases/concerns.rb77
-rw-r--r--app/services/releases/create_evidence_service.rb2
-rw-r--r--app/services/releases/create_service.rb4
-rw-r--r--app/services/releases/destroy_service.rb4
-rw-r--r--app/services/releases/update_service.rb20
-rw-r--r--app/services/repository_archive_clean_up_service.rb24
-rw-r--r--app/services/resource_access_tokens/create_service.rb18
-rw-r--r--app/services/resource_access_tokens/revoke_service.rb27
-rw-r--r--app/services/search/global_service.rb10
-rw-r--r--app/services/search/group_service.rb3
-rw-r--r--app/services/search/project_service.rb19
-rw-r--r--app/services/search_service.rb4
-rw-r--r--app/services/snippets/base_service.rb18
-rw-r--r--app/services/snippets/create_service.rb9
-rw-r--r--app/services/snippets/repository_validation_service.rb2
-rw-r--r--app/services/snippets/update_service.rb39
-rw-r--r--app/services/spam/spam_action_service.rb2
-rw-r--r--app/services/static_site_editor/config_service.rb66
-rw-r--r--app/services/system_note_service.rb8
-rw-r--r--app/services/system_notes/incident_service.rb29
-rw-r--r--app/services/system_notes/issuables_service.rb82
-rw-r--r--app/services/system_notes/time_tracking_service.rb12
-rw-r--r--app/services/todos/destroy/entity_leave_service.rb74
-rw-r--r--app/services/users/approve_service.rb37
-rw-r--r--app/services/users/block_service.rb2
-rw-r--r--app/services/users/build_service.rb3
-rw-r--r--app/services/users/destroy_service.rb2
-rw-r--r--app/services/users/validate_otp_service.rb25
-rw-r--r--app/services/web_hooks/destroy_service.rb78
-rw-r--r--app/services/webauthn/authenticate_service.rb12
134 files changed, 2420 insertions, 634 deletions
diff --git a/app/services/admin/propagate_integration_service.rb b/app/services/admin/propagate_integration_service.rb
index 34d6008cb6a..96a6d861e47 100644
--- a/app/services/admin/propagate_integration_service.rb
+++ b/app/services/admin/propagate_integration_service.rb
@@ -7,66 +7,45 @@ module Admin
def propagate
update_inherited_integrations
- create_integration_for_groups_without_integration if Feature.enabled?(:group_level_integrations)
- create_integration_for_projects_without_integration
+ if integration.instance?
+ create_integration_for_groups_without_integration if Feature.enabled?(:group_level_integrations)
+ create_integration_for_projects_without_integration
+ else
+ create_integration_for_groups_without_integration_belonging_to_group
+ create_integration_for_projects_without_integration_belonging_to_group
+ end
end
private
# rubocop: disable Cop/InBatches
- # rubocop: disable CodeReuse/ActiveRecord
def update_inherited_integrations
- Service.where(type: integration.type, inherit_from_id: integration.id).in_batches(of: BATCH_SIZE) do |batch|
- bulk_update_from_integration(batch)
+ Service.by_type(integration.type).inherit_from_id(integration.id).in_batches(of: BATCH_SIZE) do |services|
+ min_id, max_id = services.pick("MIN(services.id), MAX(services.id)")
+ PropagateIntegrationInheritWorker.perform_async(integration.id, min_id, max_id)
end
end
# rubocop: enable Cop/InBatches
- # rubocop: enable CodeReuse/ActiveRecord
-
- # rubocop: disable CodeReuse/ActiveRecord
- def bulk_update_from_integration(batch)
- # Retrieving the IDs instantiates the ActiveRecord relation (batch)
- # into concrete models, otherwise update_all will clear the relation.
- # https://stackoverflow.com/q/34811646/462015
- batch_ids = batch.pluck(:id)
-
- Service.transaction do
- batch.update_all(service_hash)
-
- if data_fields_present?
- integration.data_fields.class.where(service_id: batch_ids).update_all(data_fields_hash)
- end
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
def create_integration_for_groups_without_integration
- loop do
- batch = Group.uncached { group_ids_without_integration(integration, BATCH_SIZE) }
-
- bulk_create_from_integration(batch, 'group') unless batch.empty?
-
- break if batch.size < BATCH_SIZE
+ Group.without_integration(integration).each_batch(of: BATCH_SIZE) do |groups|
+ min_id, max_id = groups.pick("MIN(namespaces.id), MAX(namespaces.id)")
+ PropagateIntegrationGroupWorker.perform_async(integration.id, min_id, max_id)
end
end
- def service_hash
- @service_hash ||= integration.to_service_hash
- .tap { |json| json['inherit_from_id'] = integration.id }
+ def create_integration_for_groups_without_integration_belonging_to_group
+ integration.group.descendants.without_integration(integration).each_batch(of: BATCH_SIZE) do |groups|
+ min_id, max_id = groups.pick("MIN(namespaces.id), MAX(namespaces.id)")
+ PropagateIntegrationGroupWorker.perform_async(integration.id, min_id, max_id)
+ end
end
- # rubocop:disable CodeReuse/ActiveRecord
- def group_ids_without_integration(integration, limit)
- services = Service
- .select('1')
- .where('services.group_id = namespaces.id')
- .where(type: integration.type)
-
- Group
- .where('NOT EXISTS (?)', services)
- .limit(limit)
- .pluck(:id)
+ def create_integration_for_projects_without_integration_belonging_to_group
+ Project.without_integration(integration).in_namespace(integration.group.self_and_descendants).each_batch(of: BATCH_SIZE) do |projects|
+ min_id, max_id = projects.pick("MIN(projects.id), MAX(projects.id)")
+ PropagateIntegrationProjectWorker.perform_async(integration.id, min_id, max_id)
+ end
end
- # rubocop:enable CodeReuse/ActiveRecord
end
end
diff --git a/app/services/admin/propagate_service_template.rb b/app/services/admin/propagate_service_template.rb
index cd0d2d5d03f..07be3c1027d 100644
--- a/app/services/admin/propagate_service_template.rb
+++ b/app/services/admin/propagate_service_template.rb
@@ -9,11 +9,5 @@ module Admin
create_integration_for_projects_without_integration
end
-
- private
-
- def service_hash
- @service_hash ||= integration.to_service_hash
- end
end
end
diff --git a/app/services/alert_management/alerts/update_service.rb b/app/services/alert_management/alerts/update_service.rb
index 18d615aa7e7..464d5f2ecea 100644
--- a/app/services/alert_management/alerts/update_service.rb
+++ b/app/services/alert_management/alerts/update_service.rb
@@ -13,6 +13,7 @@ module AlertManagement
@current_user = current_user
@params = params
@param_errors = []
+ @status = params.delete(:status)
end
def execute
@@ -35,7 +36,7 @@ module AlertManagement
private
- attr_reader :alert, :current_user, :params, :param_errors
+ attr_reader :alert, :current_user, :params, :param_errors, :status
delegate :resolved?, to: :alert
def allowed?
@@ -68,8 +69,12 @@ module AlertManagement
param_errors << message
end
+ def param_errors?
+ params.empty? && status.blank?
+ end
+
def filter_params
- param_errors << _('Please provide attributes to update') if params.empty?
+ param_errors << _('Please provide attributes to update') if param_errors?
filter_status
filter_assignees
@@ -110,9 +115,9 @@ module AlertManagement
# ------ Status-related behavior -------
def filter_status
- return unless params[:status]
+ return unless status
- status_event = AlertManagement::Alert::STATUS_EVENTS[status_key]
+ status_event = alert.status_event_for(status)
unless status_event
param_errors << _('Invalid status')
@@ -122,13 +127,6 @@ module AlertManagement
params[:status_event] = status_event
end
- def status_key
- strong_memoize(:status_key) do
- status = params.delete(:status)
- AlertManagement::Alert::STATUSES.key(status)
- end
- end
-
def handle_status_change
add_status_change_system_note
resolve_todos if resolved?
@@ -144,7 +142,7 @@ module AlertManagement
def filter_duplicate
# Only need to check if changing to an open status
- return unless params[:status_event] && AlertManagement::Alert::OPEN_STATUSES.include?(status_key)
+ return unless params[:status_event] && AlertManagement::Alert.open_status?(status)
param_errors << unresolved_alert_error if duplicate_alert?
end
diff --git a/app/services/alert_management/process_prometheus_alert_service.rb b/app/services/alert_management/process_prometheus_alert_service.rb
index 95ae84a85a4..5c7698f724a 100644
--- a/app/services/alert_management/process_prometheus_alert_service.rb
+++ b/app/services/alert_management/process_prometheus_alert_service.rb
@@ -47,7 +47,7 @@ module AlertManagement
def create_alert_management_alert
if alert.save
alert.execute_services
- SystemNoteService.create_new_alert(alert, Gitlab::AlertManagement::AlertParams::MONITORING_TOOLS[:prometheus])
+ SystemNoteService.create_new_alert(alert, Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus])
return
end
diff --git a/app/services/audit_event_service.rb b/app/services/audit_event_service.rb
index d7630dbdac9..3c21844ec62 100644
--- a/app/services/audit_event_service.rb
+++ b/app/services/audit_event_service.rb
@@ -53,7 +53,6 @@ class AuditEventService
private
- attr_accessor :authentication_event
attr_reader :ip_address
def build_author(author)
@@ -99,23 +98,35 @@ class AuditEventService
end
def mark_as_authentication_event!
- self.authentication_event = true
+ @authentication_event = true
end
def authentication_event?
- authentication_event
+ @authentication_event
end
def log_security_event_to_database
return if Gitlab::Database.read_only?
- AuditEvent.create(base_payload.merge(details: @details))
+ event = AuditEvent.new(base_payload.merge(details: @details))
+ save_or_track event
+
+ event
end
def log_authentication_event_to_database
return unless Gitlab::Database.read_write? && authentication_event?
- AuthenticationEvent.create(authentication_event_payload)
+ event = AuthenticationEvent.new(authentication_event_payload)
+ save_or_track event
+
+ event
+ end
+
+ def save_or_track(event)
+ event.save!
+ rescue => e
+ Gitlab::ErrorTracking.track_exception(e, audit_event_type: event.class.to_s)
end
end
diff --git a/app/services/boards/create_service.rb b/app/services/boards/create_service.rb
index 1a5dc790c41..2ccaea64d14 100644
--- a/app/services/boards/create_service.rb
+++ b/app/services/boards/create_service.rb
@@ -3,7 +3,11 @@
module Boards
class CreateService < Boards::BaseService
def execute
- create_board! if can_create_board?
+ unless can_create_board?
+ return ServiceResponse.error(message: "You don't have the permission to create a board for this resource.")
+ end
+
+ create_board!
end
private
@@ -15,12 +19,16 @@ module Boards
def create_board!
board = parent.boards.create(params)
- if board.persisted?
- board.lists.create(list_type: :backlog)
- board.lists.create(list_type: :closed)
+ unless board.persisted?
+ return ServiceResponse.error(message: "There was an error when creating a board.", payload: board)
+ end
+
+ board.tap do |created_board|
+ created_board.lists.create(list_type: :backlog)
+ created_board.lists.create(list_type: :closed)
end
- board
+ ServiceResponse.success(payload: board)
end
end
end
diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb
index 140420a32bd..ab9d11abe98 100644
--- a/app/services/boards/issues/list_service.rb
+++ b/app/services/boards/issues/list_service.rb
@@ -80,6 +80,7 @@ module Boards
set_scope
set_non_archived
set_attempt_search_optimizations
+ set_issue_types
params
end
@@ -116,6 +117,10 @@ module Boards
end
end
+ def set_issue_types
+ params[:issue_types] = Issue::TYPES_FOR_LIST
+ end
+
# rubocop: disable CodeReuse/ActiveRecord
def board_label_ids
@board_label_ids ||= board.lists.movable.pluck(:label_id)
diff --git a/app/services/boards/lists/destroy_service.rb b/app/services/boards/lists/destroy_service.rb
index e20805d0405..ebac0f07fe1 100644
--- a/app/services/boards/lists/destroy_service.rb
+++ b/app/services/boards/lists/destroy_service.rb
@@ -4,7 +4,9 @@ module Boards
module Lists
class DestroyService < Boards::BaseService
def execute(list)
- return false unless list.destroyable?
+ unless list.destroyable?
+ return ServiceResponse.error(message: "The list cannot be destroyed. Only label lists can be destroyed.")
+ end
@board = list.board
@@ -12,6 +14,8 @@ module Boards
decrement_higher_lists(list)
remove_list(list)
end
+
+ ServiceResponse.success
end
private
@@ -26,7 +30,7 @@ module Boards
# rubocop: enable CodeReuse/ActiveRecord
def remove_list(list)
- list.destroy
+ list.destroy!
end
end
end
diff --git a/app/services/bulk_create_integration_service.rb b/app/services/bulk_create_integration_service.rb
new file mode 100644
index 00000000000..23b89b0d8a9
--- /dev/null
+++ b/app/services/bulk_create_integration_service.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+class BulkCreateIntegrationService
+ def initialize(integration, batch, association)
+ @integration = integration
+ @batch = batch
+ @association = association
+ end
+
+ def execute
+ service_list = ServiceList.new(batch, service_hash, association).to_array
+
+ Service.transaction do
+ results = bulk_insert(*service_list)
+
+ if integration.data_fields_present?
+ data_list = DataList.new(results, data_fields_hash, integration.data_fields.class).to_array
+
+ bulk_insert(*data_list)
+ end
+
+ run_callbacks(batch) if association == 'project'
+ end
+ end
+
+ private
+
+ attr_reader :integration, :batch, :association
+
+ def bulk_insert(klass, columns, values_array)
+ items_to_insert = values_array.map { |array| Hash[columns.zip(array)] }
+
+ klass.insert_all(items_to_insert, returning: [:id])
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def run_callbacks(batch)
+ if integration.issue_tracker?
+ Project.where(id: batch.select(:id)).update_all(has_external_issue_tracker: true)
+ end
+
+ if integration.type == 'ExternalWikiService'
+ Project.where(id: batch.select(:id)).update_all(has_external_wiki: true)
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def service_hash
+ if integration.template?
+ integration.to_service_hash
+ else
+ integration.to_service_hash.tap { |json| json['inherit_from_id'] = integration.id }
+ end
+ end
+
+ def data_fields_hash
+ integration.to_data_fields_hash
+ end
+end
diff --git a/app/services/bulk_update_integration_service.rb b/app/services/bulk_update_integration_service.rb
new file mode 100644
index 00000000000..74d77618f2c
--- /dev/null
+++ b/app/services/bulk_update_integration_service.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+class BulkUpdateIntegrationService
+ def initialize(integration, batch)
+ @integration = integration
+ @batch = batch
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def execute
+ Service.transaction do
+ batch.update_all(service_hash)
+
+ if integration.data_fields_present?
+ integration.data_fields.class.where(service_id: batch.select(:id)).update_all(data_fields_hash)
+ end
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ private
+
+ attr_reader :integration, :batch
+
+ def service_hash
+ integration.to_service_hash.tap { |json| json['inherit_from_id'] = integration.id }
+ end
+
+ def data_fields_hash
+ integration.to_data_fields_hash
+ end
+end
diff --git a/app/services/ci/archive_trace_service.rb b/app/services/ci/archive_trace_service.rb
index 073ef465e13..9b2c7788897 100644
--- a/app/services/ci/archive_trace_service.rb
+++ b/app/services/ci/archive_trace_service.rb
@@ -27,7 +27,7 @@ module Ci
rescue => e
# Tracks this error with application logs, Sentry, and Prometheus.
# If `archive!` keeps failing for over a week, that could incur data loss.
- # (See more https://docs.gitlab.com/ee/administration/job_traces.html#new-live-trace-architecture)
+ # (See more https://docs.gitlab.com/ee/administration/job_logs.html#new-incremental-logging-architecture)
# In order to avoid interrupting the system, we do not raise an exception here.
archive_error(e, job, worker_name)
end
diff --git a/app/services/ci/build_report_result_service.rb b/app/services/ci/build_report_result_service.rb
index ca66ad8249d..76ecf428f11 100644
--- a/app/services/ci/build_report_result_service.rb
+++ b/app/services/ci/build_report_result_service.rb
@@ -2,12 +2,20 @@
module Ci
class BuildReportResultService
+ include Gitlab::Utils::UsageData
+
+ EVENT_NAME = 'i_testing_test_case_parsed'
+
def execute(build)
return unless build.has_test_reports?
+ test_suite = generate_test_suite_report(build)
+
+ track_test_cases(build, test_suite)
+
build.report_results.create!(
project_id: build.project_id,
- data: tests_params(build)
+ data: tests_params(test_suite)
)
end
@@ -17,9 +25,7 @@ module Ci
build.collect_test_reports!(Gitlab::Ci::Reports::TestReports.new)
end
- def tests_params(build)
- test_suite = generate_test_suite_report(build)
-
+ def tests_params(test_suite)
{
tests: {
name: test_suite.name,
@@ -31,5 +37,20 @@ module Ci
}
}
end
+
+ def track_test_cases(build, test_suite)
+ return if Feature.disabled?(:track_unique_test_cases_parsed, build.project)
+
+ track_usage_event(EVENT_NAME, test_case_hashes(build, test_suite))
+ end
+
+ def test_case_hashes(build, test_suite)
+ [].tap do |hashes|
+ test_suite.each_test_case do |test_case|
+ key = "#{build.project_id}-#{test_case.key}"
+ hashes << Digest::SHA256.hexdigest(key)
+ end
+ end
+ end
end
end
diff --git a/app/services/ci/create_downstream_pipeline_service.rb b/app/services/ci/create_downstream_pipeline_service.rb
index 0394cfb6119..86d0cf079fc 100644
--- a/app/services/ci/create_downstream_pipeline_service.rb
+++ b/app/services/ci/create_downstream_pipeline_service.rb
@@ -33,7 +33,7 @@ module Ci
pipeline_params.fetch(:target_revision))
downstream_pipeline = service.execute(
- pipeline_params.fetch(:source), pipeline_params[:execute_params]) do |pipeline|
+ pipeline_params.fetch(:source), **pipeline_params[:execute_params]) do |pipeline|
pipeline.variables.build(@bridge.downstream_variables)
end
@@ -77,16 +77,9 @@ module Ci
# TODO: Remove this condition if favour of model validation
# https://gitlab.com/gitlab-org/gitlab/issues/38338
- if ::Gitlab::Ci::Features.child_of_child_pipeline_enabled?(project)
- if has_max_descendants_depth?
- @bridge.drop!(:reached_max_descendant_pipelines_depth)
- return false
- end
- else
- if @bridge.triggers_child_pipeline? && @bridge.pipeline.parent_pipeline.present?
- @bridge.drop!(:bridge_pipeline_is_child_pipeline)
- return false
- end
+ if has_max_descendants_depth?
+ @bridge.drop!(:reached_max_descendant_pipelines_depth)
+ return false
end
unless can_create_downstream_pipeline?(target_ref)
diff --git a/app/services/ci/create_job_artifacts_service.rb b/app/services/ci/create_job_artifacts_service.rb
index 1fe65898d55..5efb3805bf7 100644
--- a/app/services/ci/create_job_artifacts_service.rb
+++ b/app/services/ci/create_job_artifacts_service.rb
@@ -52,24 +52,15 @@ module Ci
attr_reader :job, :project
def validate_requirements(artifact_type:, filesize:)
- return forbidden_type_error(artifact_type) if forbidden_type?(artifact_type)
return too_large_error if too_large?(artifact_type, filesize)
success
end
- def forbidden_type?(type)
- lsif?(type) && !code_navigation_enabled?
- end
-
def too_large?(type, size)
size > max_size(type) if size
end
- def code_navigation_enabled?
- Feature.enabled?(:code_navigation, project, default_enabled: true)
- end
-
def lsif?(type)
type == LSIF_ARTIFACT_TYPE
end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 70ad18e80eb..e7ede98fea4 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -70,7 +70,7 @@ module Ci
push_options: params[:push_options] || {},
chat_data: params[:chat_data],
bridge: bridge,
- **extra_options(options))
+ **extra_options(**options))
# Ensure we never persist the pipeline when dry_run: true
@pipeline.readonly! if command.dry_run?
@@ -82,8 +82,7 @@ module Ci
schedule_head_pipeline_update if pipeline.persisted?
# If pipeline is not persisted, try to recover IID
- pipeline.reset_project_iid unless pipeline.persisted? ||
- Feature.disabled?(:ci_pipeline_rewind_iid, project, default_enabled: true)
+ pipeline.reset_project_iid unless pipeline.persisted?
pipeline
end
diff --git a/app/services/ci/daily_build_group_report_result_service.rb b/app/services/ci/daily_build_group_report_result_service.rb
index 6cdf3c88f8c..c32fc27c274 100644
--- a/app/services/ci/daily_build_group_report_result_service.rb
+++ b/app/services/ci/daily_build_group_report_result_service.rb
@@ -3,8 +3,6 @@
module Ci
class DailyBuildGroupReportResultService
def execute(pipeline)
- return unless Feature.enabled?(:ci_daily_code_coverage, pipeline.project, default_enabled: true)
-
DailyBuildGroupReportResult.upsert_reports(coverage_reports(pipeline))
end
diff --git a/app/services/ci/delete_objects_service.rb b/app/services/ci/delete_objects_service.rb
new file mode 100644
index 00000000000..bac99abadc9
--- /dev/null
+++ b/app/services/ci/delete_objects_service.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+module Ci
+ class DeleteObjectsService
+ TransactionInProgressError = Class.new(StandardError)
+ TRANSACTION_MESSAGE = "can't perform network calls inside a database transaction"
+ BATCH_SIZE = 100
+ RETRY_IN = 10.minutes
+
+ def execute
+ objects = load_next_batch
+ destroy_everything(objects)
+ end
+
+ def remaining_batches_count(max_batch_count:)
+ Ci::DeletedObject
+ .ready_for_destruction(max_batch_count * BATCH_SIZE)
+ .size
+ .fdiv(BATCH_SIZE)
+ .ceil
+ end
+
+ private
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def load_next_batch
+ # `find_by_sql` performs a write in this case and we need to wrap it in
+ # a transaction to stick to the primary database.
+ Ci::DeletedObject.transaction do
+ Ci::DeletedObject.find_by_sql([
+ next_batch_sql, new_pick_up_at: RETRY_IN.from_now
+ ])
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def next_batch_sql
+ <<~SQL.squish
+ UPDATE "ci_deleted_objects"
+ SET "pick_up_at" = :new_pick_up_at
+ WHERE "ci_deleted_objects"."id" IN (#{locked_object_ids_sql})
+ RETURNING *
+ SQL
+ end
+
+ def locked_object_ids_sql
+ Ci::DeletedObject.lock_for_destruction(BATCH_SIZE).to_sql
+ end
+
+ def destroy_everything(objects)
+ raise TransactionInProgressError, TRANSACTION_MESSAGE if transaction_open?
+ return unless objects.any?
+
+ deleted = objects.select(&:delete_file_from_storage)
+ Ci::DeletedObject.id_in(deleted.map(&:id)).delete_all
+ end
+
+ def transaction_open?
+ Ci::DeletedObject.connection.transaction_open?
+ end
+ end
+end
diff --git a/app/services/ci/destroy_expired_job_artifacts_service.rb b/app/services/ci/destroy_expired_job_artifacts_service.rb
index ca6e60f819a..438b5c7496d 100644
--- a/app/services/ci/destroy_expired_job_artifacts_service.rb
+++ b/app/services/ci/destroy_expired_job_artifacts_service.rb
@@ -39,6 +39,13 @@ module Ci
return false if artifacts.empty?
artifacts.each(&:destroy!)
+ run_after_destroy(artifacts)
+
+ true # This is required because of the design of `loop_until` method.
end
+
+ def run_after_destroy(artifacts); end
end
end
+
+Ci::DestroyExpiredJobArtifactsService.prepend_if_ee('EE::Ci::DestroyExpiredJobArtifactsService')
diff --git a/app/services/ci/expire_pipeline_cache_service.rb b/app/services/ci/expire_pipeline_cache_service.rb
index 32abd1a7626..8343e0f8cd0 100644
--- a/app/services/ci/expire_pipeline_cache_service.rb
+++ b/app/services/ci/expire_pipeline_cache_service.rb
@@ -32,11 +32,18 @@ module Ci
Gitlab::Routing.url_helpers.project_new_merge_request_path(project, format: :json)
end
+ def pipelines_project_merge_request_path(merge_request)
+ Gitlab::Routing.url_helpers.pipelines_project_merge_request_path(merge_request.target_project, merge_request, format: :json)
+ end
+
+ def merge_request_widget_path(merge_request)
+ Gitlab::Routing.url_helpers.cached_widget_project_json_merge_request_path(merge_request.project, merge_request, format: :json)
+ end
+
def each_pipelines_merge_request_path(pipeline)
pipeline.all_merge_requests.each do |merge_request|
- path = Gitlab::Routing.url_helpers.pipelines_project_merge_request_path(merge_request.target_project, merge_request, format: :json)
-
- yield(path)
+ yield(pipelines_project_merge_request_path(merge_request))
+ yield(merge_request_widget_path(merge_request))
end
end
diff --git a/app/services/ci/list_config_variables_service.rb b/app/services/ci/list_config_variables_service.rb
new file mode 100644
index 00000000000..b5dc192b512
--- /dev/null
+++ b/app/services/ci/list_config_variables_service.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Ci
+ class ListConfigVariablesService < ::BaseService
+ def execute(sha)
+ config = project.ci_config_for(sha)
+ return {} unless config
+
+ result = Gitlab::Ci::YamlProcessor.new(config).execute
+ result.valid? ? result.variables_with_data : {}
+ end
+ end
+end
diff --git a/app/services/ci/pipelines/create_artifact_service.rb b/app/services/ci/pipelines/create_artifact_service.rb
index b7d334e436d..bfaf317241a 100644
--- a/app/services/ci/pipelines/create_artifact_service.rb
+++ b/app/services/ci/pipelines/create_artifact_service.rb
@@ -3,7 +3,6 @@ module Ci
module Pipelines
class CreateArtifactService
def execute(pipeline)
- return unless ::Gitlab::Ci::Features.coverage_report_view?(pipeline.project)
return unless pipeline.can_generate_coverage_reports?
return if pipeline.has_coverage_reports?
diff --git a/app/services/ci/play_bridge_service.rb b/app/services/ci/play_bridge_service.rb
new file mode 100644
index 00000000000..70c4a8e6136
--- /dev/null
+++ b/app/services/ci/play_bridge_service.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Ci
+ class PlayBridgeService < ::BaseService
+ def execute(bridge)
+ raise Gitlab::Access::AccessDeniedError unless can?(current_user, :play_job, bridge)
+
+ bridge.tap do |bridge|
+ bridge.user = current_user
+ bridge.enqueue!
+ end
+ end
+ end
+end
diff --git a/app/services/ci/play_build_service.rb b/app/services/ci/play_build_service.rb
index 9f922ffde81..6adeca624a8 100644
--- a/app/services/ci/play_build_service.rb
+++ b/app/services/ci/play_build_service.rb
@@ -3,9 +3,7 @@
module Ci
class PlayBuildService < ::BaseService
def execute(build, job_variables_attributes = nil)
- unless can?(current_user, :update_build, build)
- raise Gitlab::Access::AccessDeniedError
- end
+ raise Gitlab::Access::AccessDeniedError unless can?(current_user, :play_job, build)
# Try to enqueue the build, otherwise create a duplicate.
#
diff --git a/app/services/ci/play_manual_stage_service.rb b/app/services/ci/play_manual_stage_service.rb
index 2497fc52e6b..c6fa7803e52 100644
--- a/app/services/ci/play_manual_stage_service.rb
+++ b/app/services/ci/play_manual_stage_service.rb
@@ -9,12 +9,12 @@ module Ci
end
def execute(stage)
- stage.builds.manual.each do |build|
- next unless build.playable?
+ stage.processables.manual.each do |processable|
+ next unless processable.playable?
- build.play(current_user)
+ processable.play(current_user)
rescue Gitlab::Access::AccessDeniedError
- logger.error(message: 'Unable to play manual action', build_id: build.id)
+ logger.error(message: 'Unable to play manual action', processable_id: processable.id)
end
end
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index 18bae26613f..e511e26adfe 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -31,14 +31,14 @@ module Ci
# rubocop: disable CodeReuse/ActiveRecord
def update_retried
# find the latest builds for each name
- latest_statuses = pipeline.statuses.latest
+ latest_statuses = pipeline.latest_statuses
.group(:name)
.having('count(*) > 1')
.pluck(Arel.sql('MAX(id)'), 'name')
# mark builds that are retried
if latest_statuses.any?
- pipeline.statuses.latest
+ pipeline.latest_statuses
.where(name: latest_statuses.map(&:second))
.where.not(id: latest_statuses.map(&:first))
.update_all(retried: true)
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index 6b2e6c245f3..f397ada0696 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -58,7 +58,7 @@ module Ci
build = project.builds.new(attributes)
build.assign_attributes(::Gitlab::Ci::Pipeline::Seed::Build.environment_attributes_for(build))
build.retried = false
- BulkInsertableAssociations.with_bulk_insert(enabled: ::Gitlab::Ci::Features.bulk_insert_on_create?(project)) do
+ BulkInsertableAssociations.with_bulk_insert do
build.save!
end
build
diff --git a/app/services/ci/update_build_queue_service.rb b/app/services/ci/update_build_queue_service.rb
index 31c7178c9e7..241eba733ea 100644
--- a/app/services/ci/update_build_queue_service.rb
+++ b/app/services/ci/update_build_queue_service.rb
@@ -9,9 +9,7 @@ module Ci
private
def tick_for(build, runners)
- if Feature.enabled?(:ci_update_queues_for_online_runners, build.project, default_enabled: true)
- runners = runners.with_recent_runner_queue
- end
+ runners = runners.with_recent_runner_queue
runners.each do |runner|
runner.pick_build!(build)
diff --git a/app/services/ci/update_build_state_service.rb b/app/services/ci/update_build_state_service.rb
index 61e4c77c1e5..22a27906700 100644
--- a/app/services/ci/update_build_state_service.rb
+++ b/app/services/ci/update_build_state_service.rb
@@ -2,7 +2,11 @@
module Ci
class UpdateBuildStateService
- Result = Struct.new(:status, keyword_init: true)
+ include ::Gitlab::Utils::StrongMemoize
+ include ::Gitlab::ExclusiveLeaseHelpers
+
+ Result = Struct.new(:status, :backoff, keyword_init: true)
+ InvalidTraceError = Class.new(StandardError)
ACCEPT_TIMEOUT = 5.minutes.freeze
@@ -17,43 +21,77 @@ module Ci
def execute
overwrite_trace! if has_trace?
- if accept_request?
- accept_build_state!
- else
- check_migration_state
- update_build_state!
+ unless accept_available?
+ return update_build_state!
+ end
+
+ ensure_pending_state!
+
+ in_build_trace_lock do
+ process_build_state!
end
end
private
- def accept_build_state!
- if Time.current - ensure_pending_state.created_at > ACCEPT_TIMEOUT
- metrics.increment_trace_operation(operation: :discarded)
+ def overwrite_trace!
+ metrics.increment_trace_operation(operation: :overwrite)
- return update_build_state!
+ build.trace.set(params[:trace]) if Gitlab::Ci::Features.trace_overwrite?
+ end
+
+ def ensure_pending_state!
+ pending_state.created_at
+ end
+
+ def process_build_state!
+ if live_chunks_pending?
+ if pending_state_outdated?
+ discard_build_trace!
+ update_build_state!
+ else
+ accept_build_state!
+ end
+ else
+ validate_build_trace!
+ update_build_state!
end
+ end
+ def accept_build_state!
build.trace_chunks.live.find_each do |chunk|
chunk.schedule_to_persist!
end
metrics.increment_trace_operation(operation: :accepted)
- Result.new(status: 202)
+ ::Gitlab::Ci::Runner::Backoff.new(pending_state.created_at).then do |backoff|
+ Result.new(status: 202, backoff: backoff.to_seconds)
+ end
end
- def overwrite_trace!
- metrics.increment_trace_operation(operation: :overwrite)
+ def validate_build_trace!
+ return unless has_chunks?
- build.trace.set(params[:trace]) if Gitlab::Ci::Features.trace_overwrite?
- end
+ unless live_chunks_pending?
+ metrics.increment_trace_operation(operation: :finalized)
+ metrics.observe_migration_duration(pending_state_seconds)
+ end
- def check_migration_state
- return unless accept_available?
+ ::Gitlab::Ci::Trace::Checksum.new(build).then do |checksum|
+ unless checksum.valid?
+ metrics.increment_trace_operation(operation: :invalid)
- if has_chunks? && !live_chunks_pending?
- metrics.increment_trace_operation(operation: :finalized)
+ next unless log_invalid_chunks?
+
+ ::Gitlab::ErrorTracking.log_exception(InvalidTraceError.new,
+ project_path: build.project.full_path,
+ build_id: build.id,
+ state_crc32: checksum.state_crc32,
+ chunks_crc32: checksum.chunks_crc32,
+ chunks_count: checksum.chunks_count
+ )
+ end
end
end
@@ -76,12 +114,32 @@ module Ci
end
end
+ def discard_build_trace!
+ metrics.increment_trace_operation(operation: :discarded)
+ end
+
def accept_available?
!build_running? && has_checksum? && chunks_migration_enabled?
end
- def accept_request?
- accept_available? && live_chunks_pending?
+ def live_chunks_pending?
+ build.trace_chunks.live.any?
+ end
+
+ def has_chunks?
+ build.trace_chunks.any?
+ end
+
+ def pending_state_outdated?
+ pending_state_duration > ACCEPT_TIMEOUT
+ end
+
+ def pending_state_duration
+ Time.current - pending_state.created_at
+ end
+
+ def pending_state_seconds
+ pending_state_duration.seconds
end
def build_state
@@ -96,18 +154,14 @@ module Ci
params.dig(:checksum).present?
end
- def has_chunks?
- build.trace_chunks.any?
- end
-
- def live_chunks_pending?
- build.trace_chunks.live.any?
- end
-
def build_running?
build_state == 'running'
end
+ def pending_state
+ strong_memoize(:pending_state) { ensure_pending_state }
+ end
+
def ensure_pending_state
Ci::BuildPendingState.create_or_find_by!(
build_id: build.id,
@@ -121,8 +175,38 @@ module Ci
build.pending_state
end
+ ##
+ # This method is releasing an exclusive lock on a build trace the moment we
+ # conclude that build status has been written and the build state update
+ # has been committed to the database.
+ #
+ # Because a build state machine schedules a bunch of workers to run after
+ # build status transition to complete, we do not want to keep the lease
+ # until all the workers are scheduled because it opens a possibility of
+ # race conditions happening.
+ #
+ # Instead of keeping the lease until the transition is fully done and
+ # workers are scheduled, we immediately release the lock after the database
+ # commit happens.
+ #
+ def in_build_trace_lock(&block)
+ build.trace.lock do |_, lease| # rubocop:disable CodeReuse/ActiveRecord
+ build.run_on_status_commit { lease.cancel }
+
+ yield
+ end
+ rescue ::Gitlab::Ci::Trace::LockedError
+ metrics.increment_trace_operation(operation: :locked)
+
+ accept_build_state!
+ end
+
def chunks_migration_enabled?
::Gitlab::Ci::Features.accept_trace?(build.project)
end
+
+ def log_invalid_chunks?
+ ::Gitlab::Ci::Features.log_invalid_trace_chunks?(build.project)
+ end
end
end
diff --git a/app/services/clusters/kubernetes/create_or_update_service_account_service.rb b/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
index b1820474c9d..2725a3aeaa5 100644
--- a/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
+++ b/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
@@ -117,9 +117,11 @@ module Clusters
end
def role_binding_resource
+ role_name = Feature.enabled?(:kubernetes_cluster_namespace_role_admin) ? 'admin' : Clusters::Kubernetes::PROJECT_CLUSTER_ROLE_NAME
+
Gitlab::Kubernetes::RoleBinding.new(
name: role_binding_name,
- role_name: Clusters::Kubernetes::PROJECT_CLUSTER_ROLE_NAME,
+ role_name: role_name,
role_kind: :ClusterRole,
namespace: service_account_namespace,
service_account_name: service_account_name
diff --git a/app/services/concerns/admin/propagate_service.rb b/app/services/concerns/admin/propagate_service.rb
index 974408f678c..065ab6f7ff9 100644
--- a/app/services/concerns/admin/propagate_service.rb
+++ b/app/services/concerns/admin/propagate_service.rb
@@ -4,9 +4,7 @@ module Admin
module PropagateService
extend ActiveSupport::Concern
- BATCH_SIZE = 100
-
- delegate :data_fields_present?, to: :integration
+ BATCH_SIZE = 10_000
class_methods do
def propagate(integration)
@@ -23,51 +21,10 @@ module Admin
attr_reader :integration
def create_integration_for_projects_without_integration
- loop do
- batch_ids = Project.uncached { Project.ids_without_integration(integration, BATCH_SIZE) }
-
- bulk_create_from_integration(batch_ids, 'project') unless batch_ids.empty?
-
- break if batch_ids.size < BATCH_SIZE
- end
- end
-
- def bulk_create_from_integration(batch_ids, association)
- service_list = ServiceList.new(batch_ids, service_hash, association).to_array
-
- Service.transaction do
- results = bulk_insert(*service_list)
-
- if data_fields_present?
- data_list = DataList.new(results, data_fields_hash, integration.data_fields.class).to_array
-
- bulk_insert(*data_list)
- end
-
- run_callbacks(batch_ids) if association == 'project'
+ Project.without_integration(integration).each_batch(of: BATCH_SIZE) do |projects|
+ min_id, max_id = projects.pick("MIN(projects.id), MAX(projects.id)")
+ PropagateIntegrationProjectWorker.perform_async(integration.id, min_id, max_id)
end
end
-
- def bulk_insert(klass, columns, values_array)
- items_to_insert = values_array.map { |array| Hash[columns.zip(array)] }
-
- klass.insert_all(items_to_insert, returning: [:id])
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def run_callbacks(batch_ids)
- if integration.issue_tracker?
- Project.where(id: batch_ids).update_all(has_external_issue_tracker: true)
- end
-
- if integration.type == 'ExternalWikiService'
- Project.where(id: batch_ids).update_all(has_external_wiki: true)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def data_fields_hash
- @data_fields_hash ||= integration.to_data_fields_hash
- end
end
end
diff --git a/app/services/deployments/after_create_service.rb b/app/services/deployments/update_environment_service.rb
index 3560f9c983b..e9c2f41f626 100644
--- a/app/services/deployments/after_create_service.rb
+++ b/app/services/deployments/update_environment_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module Deployments
- class AfterCreateService
+ class UpdateEnvironmentService
attr_reader :deployment
attr_reader :deployable
@@ -64,4 +64,4 @@ module Deployments
end
end
-Deployments::AfterCreateService.prepend_if_ee('EE::Deployments::AfterCreateService')
+Deployments::UpdateEnvironmentService.prepend_if_ee('EE::Deployments::UpdateEnvironmentService')
diff --git a/app/services/design_management/copy_design_collection.rb b/app/services/design_management/copy_design_collection.rb
new file mode 100644
index 00000000000..66cf6112062
--- /dev/null
+++ b/app/services/design_management/copy_design_collection.rb
@@ -0,0 +1,6 @@
+# frozen_string_literal: true
+
+module DesignManagement
+ module CopyDesignCollection
+ end
+end
diff --git a/app/services/design_management/copy_design_collection/copy_service.rb b/app/services/design_management/copy_design_collection/copy_service.rb
new file mode 100644
index 00000000000..5099c2c5704
--- /dev/null
+++ b/app/services/design_management/copy_design_collection/copy_service.rb
@@ -0,0 +1,309 @@
+# frozen_string_literal: true
+
+# Service to copy a DesignCollection from one Issue to another.
+# Copies the DesignCollection's Designs, Versions, and Notes on Designs.
+module DesignManagement
+ module CopyDesignCollection
+ class CopyService < DesignService
+ # rubocop: disable CodeReuse/ActiveRecord
+ def initialize(project, user, params = {})
+ super
+
+ @target_issue = params.fetch(:target_issue)
+ @target_project = @target_issue.project
+ @target_repository = @target_project.design_repository
+ @target_design_collection = @target_issue.design_collection
+ @temporary_branch = "CopyDesignCollectionService_#{SecureRandom.hex}"
+ # The user who triggered the copy may not have permissions to push
+ # to the design repository.
+ @git_user = @target_project.default_owner
+
+ @designs = DesignManagement::Design.unscoped.where(issue: issue).order(:id).load
+ @versions = DesignManagement::Version.unscoped.where(issue: issue).order(:id).includes(:designs).load
+
+ @sha_attribute = Gitlab::Database::ShaAttribute.new
+ @shas = []
+ @event_enum_map = DesignManagement::DesignAction::EVENT_FOR_GITALY_ACTION.invert
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def execute
+ return error('User cannot copy design collection to issue') unless user_can_copy?
+ return error('Target design collection must first be queued') unless target_design_collection.copy_in_progress?
+ return error('Design collection has no designs') if designs.empty?
+ return error('Target design collection already has designs') unless target_design_collection.empty?
+
+ with_temporary_branch do
+ copy_commits!
+
+ ActiveRecord::Base.transaction do
+ design_ids = copy_designs!
+ version_ids = copy_versions!
+ copy_actions!(design_ids, version_ids)
+ link_lfs_files!
+ copy_notes!(design_ids)
+ finalize!
+ end
+ end
+
+ ServiceResponse.success
+ rescue => error
+ log_exception(error)
+
+ target_design_collection.error_copy!
+
+ error('Designs were unable to be copied successfully')
+ end
+
+ private
+
+ attr_reader :designs, :event_enum_map, :git_user, :sha_attribute, :shas,
+ :temporary_branch, :target_design_collection, :target_issue,
+ :target_repository, :target_project, :versions
+
+ alias_method :merge_branch, :target_branch
+
+ def log_exception(exception)
+ payload = {
+ issue_id: issue.id,
+ project_id: project.id,
+ target_issue_id: target_issue.id,
+ target_project: target_project.id
+ }
+
+ Gitlab::ErrorTracking.track_exception(exception, payload)
+ end
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+
+ def user_can_copy?
+ current_user.can?(:read_design, design_collection) &&
+ current_user.can?(:admin_issue, target_issue)
+ end
+
+ def with_temporary_branch(&block)
+ target_repository.create_if_not_exists
+
+ create_master_branch! if target_repository.empty?
+ create_temporary_branch!
+
+ yield
+ ensure
+ remove_temporary_branch!
+ end
+
+ # A project that does not have any designs will have a blank design
+ # repository. To create a temporary branch from `master` we need
+ # create `master` first by adding a file to it.
+ def create_master_branch!
+ target_repository.create_file(
+ git_user,
+ ".CopyDesignCollectionService_#{Time.now.to_i}",
+ '.gitlab',
+ message: "Commit to create #{merge_branch} branch in CopyDesignCollectionService",
+ branch_name: merge_branch
+ )
+ end
+
+ def create_temporary_branch!
+ target_repository.add_branch(
+ git_user,
+ temporary_branch,
+ target_repository.root_ref
+ )
+ end
+
+ def remove_temporary_branch!
+ return unless target_repository.branch_exists?(temporary_branch)
+
+ target_repository.rm_branch(git_user, temporary_branch)
+ end
+
+ # Merge the temporary branch containing the commits to `master`
+ # and update the state of the target_design_collection.
+ def finalize!
+ source_sha = shas.last
+
+ target_repository.raw.merge(
+ git_user,
+ source_sha,
+ merge_branch,
+ 'CopyDesignCollectionService finalize merge'
+ ) { nil }
+
+ target_design_collection.end_copy!
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def copy_commits!
+ # Execute another query to include actions and their designs
+ DesignManagement::Version.unscoped.where(id: versions).order(:id).includes(actions: :design).find_each(batch_size: 100) do |version|
+ gitaly_actions = version.actions.map do |action|
+ design = action.design
+ # Map the raw Action#event enum value to a Gitaly "action" for the
+ # `Repository#multi_action` call.
+ gitaly_action_name = @event_enum_map[action.event_before_type_cast]
+ # `content` will be the LfsPointer file and not the design file,
+ # and can be nil for deletions.
+ content = blobs.dig(version.sha, design.filename)&.data
+ file_path = DesignManagement::Design.build_full_path(target_issue, design)
+
+ {
+ action: gitaly_action_name,
+ file_path: file_path,
+ content: content
+ }.compact
+ end
+
+ sha = target_repository.multi_action(
+ git_user,
+ branch_name: temporary_branch,
+ message: commit_message(version),
+ actions: gitaly_actions
+ )
+
+ shas << sha
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def copy_designs!
+ design_attributes = attributes_config[:design_attributes]
+
+ new_rows = designs.map do |design|
+ design.attributes.slice(*design_attributes).merge(
+ issue_id: target_issue.id,
+ project_id: target_project.id
+ )
+ end
+
+ # TODO Replace `Gitlab::Database.bulk_insert` with `BulkInsertSafe`
+ # once https://gitlab.com/gitlab-org/gitlab/-/issues/247718 is fixed.
+ ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ DesignManagement::Design.table_name,
+ new_rows,
+ return_ids: true
+ )
+ end
+
+ def copy_versions!
+ version_attributes = attributes_config[:version_attributes]
+ # `shas` are the list of Git commits made during the Git copy phase,
+ # and will be ordered 1:1 with old versions
+ shas_enum = shas.to_enum
+
+ new_rows = versions.map do |version|
+ version.attributes.slice(*version_attributes).merge(
+ issue_id: target_issue.id,
+ sha: sha_attribute.serialize(shas_enum.next)
+ )
+ end
+
+ # TODO Replace `Gitlab::Database.bulk_insert` with `BulkInsertSafe`
+ # once https://gitlab.com/gitlab-org/gitlab/-/issues/247718 is fixed.
+ ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ DesignManagement::Version.table_name,
+ new_rows,
+ return_ids: true
+ )
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def copy_actions!(new_design_ids, new_version_ids)
+ # Create a map of <Old design id> => <New design id>
+ design_id_map = new_design_ids.each_with_index.to_h do |design_id, i|
+ [designs[i].id, design_id]
+ end
+
+ # Create a map of <Old version id> => <New version id>
+ version_id_map = new_version_ids.each_with_index.to_h do |version_id, i|
+ [versions[i].id, version_id]
+ end
+
+ actions = DesignManagement::Action.unscoped.select(:design_id, :version_id, :event).where(design: designs, version: versions)
+
+ new_rows = actions.map do |action|
+ {
+ design_id: design_id_map[action.design_id],
+ version_id: version_id_map[action.version_id],
+ event: action.event_before_type_cast
+ }
+ end
+
+ # We cannot use `BulkInsertSafe` because of the uploader mounted in `Action`.
+ ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ DesignManagement::Action.table_name,
+ new_rows
+ )
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def commit_message(version)
+ "Copy commit #{version.sha} from issue #{issue.to_reference(full: true)}"
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def copy_notes!(design_ids)
+ new_designs = DesignManagement::Design.unscoped.find(design_ids)
+
+ # Execute another query to filter only designs with notes
+ DesignManagement::Design.unscoped.where(id: designs).joins(:notes).distinct.find_each(batch_size: 100) do |old_design|
+ new_design = new_designs.find { |d| d.filename == old_design.filename }
+
+ Notes::CopyService.new(current_user, old_design, new_design).execute
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def link_lfs_files!
+ oids = blobs.values.flat_map(&:values).map(&:lfs_oid)
+ repository_type = LfsObjectsProject.repository_types[:design]
+
+ new_rows = LfsObject.where(oid: oids).find_each(batch_size: 1000).map do |lfs_object|
+ {
+ project_id: target_project.id,
+ lfs_object_id: lfs_object.id,
+ repository_type: repository_type
+ }
+ end
+
+ # We cannot use `BulkInsertSafe` due to the LfsObjectsProject#update_project_statistics
+ # callback that fires after_commit.
+ ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ LfsObjectsProject.table_name,
+ new_rows,
+ on_conflict: :do_nothing # Upsert
+ )
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Blob data is used to find the oids for LfsObjects and to copy to Git.
+ # Blobs are reasonably small in memory, as their data are LFS Pointer files.
+ #
+ # Returns all blobs for the designs as a Hash of `{ Blob#commit_id => { Design#filename => Blob } }`
+ def blobs
+ @blobs ||= begin
+ items = versions.flat_map { |v| v.designs.map { |d| [v.sha, DesignManagement::Design.build_full_path(issue, d)] } }
+
+ repository.blobs_at(items).each_with_object({}) do |blob, h|
+ design = designs.find { |d| DesignManagement::Design.build_full_path(issue, d) == blob.path }
+
+ h[blob.commit_id] ||= {}
+ h[blob.commit_id][design.filename] = blob
+ end
+ end
+ end
+
+ def attributes_config
+ @attributes_config ||= YAML.load_file(attributes_config_file).symbolize_keys
+ end
+
+ def attributes_config_file
+ Rails.root.join('lib/gitlab/design_management/copy_design_collection_model_attributes.yml')
+ end
+ end
+ end
+end
diff --git a/app/services/design_management/copy_design_collection/queue_service.rb b/app/services/design_management/copy_design_collection/queue_service.rb
new file mode 100644
index 00000000000..f76917dbe47
--- /dev/null
+++ b/app/services/design_management/copy_design_collection/queue_service.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+# Service for setting the initial copy_state on the target DesignCollection
+# and queuing a CopyDesignCollectionWorker.
+module DesignManagement
+ module CopyDesignCollection
+ class QueueService
+ def initialize(current_user, issue, target_issue)
+ @current_user = current_user
+ @issue = issue
+ @target_issue = target_issue
+ @target_design_collection = target_issue.design_collection
+ end
+
+ def execute
+ return error('User cannot copy designs to issue') unless user_can_copy?
+ return error('Target design collection copy state must be `ready`') unless target_design_collection.can_start_copy?
+
+ target_design_collection.start_copy!
+
+ DesignManagement::CopyDesignCollectionWorker.perform_async(current_user.id, issue.id, target_issue.id)
+
+ ServiceResponse.success
+ end
+
+ private
+
+ delegate :design_collection, to: :issue
+
+ attr_reader :current_user, :issue, :target_design_collection, :target_issue
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+
+ def user_can_copy?
+ current_user.can?(:read_design, issue) &&
+ current_user.can?(:admin_issue, target_issue)
+ end
+ end
+ end
+end
diff --git a/app/services/design_management/delete_designs_service.rb b/app/services/design_management/delete_designs_service.rb
index 5d875c630a0..a90c34d4e34 100644
--- a/app/services/design_management/delete_designs_service.rb
+++ b/app/services/design_management/delete_designs_service.rb
@@ -16,6 +16,7 @@ module DesignManagement
version = delete_designs!
EventCreateService.new.destroy_designs(designs, current_user)
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_removed_action(author: current_user)
success(version: version)
end
diff --git a/app/services/design_management/design_service.rb b/app/services/design_management/design_service.rb
index 54e53609646..5aa2a2f73bc 100644
--- a/app/services/design_management/design_service.rb
+++ b/app/services/design_management/design_service.rb
@@ -19,6 +19,7 @@ module DesignManagement
def collection
issue.design_collection
end
+ alias_method :design_collection, :collection
def repository
collection.repository
diff --git a/app/services/design_management/generate_image_versions_service.rb b/app/services/design_management/generate_image_versions_service.rb
index 213aac164ff..e56d163c461 100644
--- a/app/services/design_management/generate_image_versions_service.rb
+++ b/app/services/design_management/generate_image_versions_service.rb
@@ -48,6 +48,9 @@ module DesignManagement
# Store and process the file
action.image_v432x230.store!(raw_file)
action.save!
+ rescue CarrierWave::IntegrityError => e
+ Gitlab::ErrorTracking.log_exception(e, project_id: project.id, design_id: action.design_id, version_id: action.version_id)
+ log_error(e.message)
rescue CarrierWave::UploadError => e
Gitlab::ErrorTracking.track_exception(e, project_id: project.id, design_id: action.design_id, version_id: action.version_id)
log_error(e.message)
diff --git a/app/services/design_management/runs_design_actions.rb b/app/services/design_management/runs_design_actions.rb
index 4bd6bb45658..ee6aa9286d3 100644
--- a/app/services/design_management/runs_design_actions.rb
+++ b/app/services/design_management/runs_design_actions.rb
@@ -4,14 +4,15 @@ module DesignManagement
module RunsDesignActions
NoActions = Class.new(StandardError)
- # this concern requires the following methods to be implemented:
+ # This concern requires the following methods to be implemented:
# current_user, target_branch, repository, commit_message
#
# Before calling `run_actions`, you should ensure the repository exists, by
# calling `repository.create_if_not_exists`.
#
# @raise [NoActions] if actions are empty
- def run_actions(actions)
+ # @return [DesignManagement::Version]
+ def run_actions(actions, skip_system_notes: false)
raise NoActions if actions.empty?
sha = repository.multi_action(current_user,
@@ -21,14 +22,14 @@ module DesignManagement
::DesignManagement::Version
.create_for_designs(actions, sha, current_user)
- .tap { |version| post_process(version) }
+ .tap { |version| post_process(version, skip_system_notes) }
end
private
- def post_process(version)
+ def post_process(version, skip_system_notes)
version.run_after_commit_or_now do
- ::DesignManagement::NewVersionWorker.perform_async(id)
+ ::DesignManagement::NewVersionWorker.perform_async(id, skip_system_notes)
end
end
end
diff --git a/app/services/design_management/save_designs_service.rb b/app/services/design_management/save_designs_service.rb
index 0446d2f1ee8..c26d2e7ab47 100644
--- a/app/services/design_management/save_designs_service.rb
+++ b/app/services/design_management/save_designs_service.rb
@@ -16,11 +16,15 @@ module DesignManagement
def execute
return error("Not allowed!") unless can_create_designs?
return error("Only #{MAX_FILES} files are allowed simultaneously") if files.size > MAX_FILES
+ return error("Duplicate filenames are not allowed!") if files.map(&:original_filename).uniq.length != files.length
+ return error("Design copy is in progress") if design_collection.copy_in_progress?
uploaded_designs, version = upload_designs!
skipped_designs = designs - uploaded_designs
create_events
+ design_collection.reset_copy!
+
success({ designs: uploaded_designs, version: version, skipped_designs: skipped_designs })
rescue ::ActiveRecord::RecordInvalid => e
error(e.message)
@@ -34,7 +38,10 @@ module DesignManagement
::DesignManagement::Version.with_lock(project.id, repository) do
actions = build_actions
- [actions.map(&:design), actions.presence && run_actions(actions)]
+ [
+ actions.map(&:design),
+ actions.presence && run_actions(actions)
+ ]
end
end
@@ -59,7 +66,7 @@ module DesignManagement
action = new_file?(design) ? :create : :update
on_success do
- ::Gitlab::UsageDataCounters::DesignsCounter.count(action)
+ track_usage_metrics(action)
end
DesignManagement::DesignAction.new(design, action, content)
@@ -121,6 +128,16 @@ module DesignManagement
end
end
end
+
+ def track_usage_metrics(action)
+ if action == :update
+ ::Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_modified_action(author: current_user)
+ else
+ ::Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_added_action(author: current_user)
+ end
+
+ ::Gitlab::UsageDataCounters::DesignsCounter.count(action)
+ end
end
end
diff --git a/app/services/feature_flags/base_service.rb b/app/services/feature_flags/base_service.rb
new file mode 100644
index 00000000000..9b27df90992
--- /dev/null
+++ b/app/services/feature_flags/base_service.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+module FeatureFlags
+ class BaseService < ::BaseService
+ include Gitlab::Utils::StrongMemoize
+
+ AUDITABLE_ATTRIBUTES = %w(name description active).freeze
+
+ protected
+
+ def audit_event(feature_flag)
+ message = audit_message(feature_flag)
+
+ return if message.blank?
+
+ details =
+ {
+ custom_message: message,
+ target_id: feature_flag.id,
+ target_type: feature_flag.class.name,
+ target_details: feature_flag.name
+ }
+
+ ::AuditEventService.new(
+ current_user,
+ feature_flag.project,
+ details
+ )
+ end
+
+ def save_audit_event(audit_event)
+ return unless audit_event
+
+ audit_event.security_event
+ end
+
+ def created_scope_message(scope)
+ "Created rule <strong>#{scope.environment_scope}</strong> "\
+ "and set it as <strong>#{scope.active ? "active" : "inactive"}</strong> "\
+ "with strategies <strong>#{scope.strategies}</strong>."
+ end
+
+ def feature_flag_by_name
+ strong_memoize(:feature_flag_by_name) do
+ project.operations_feature_flags.find_by_name(params[:name])
+ end
+ end
+
+ def feature_flag_scope_by_environment_scope
+ strong_memoize(:feature_flag_scope_by_environment_scope) do
+ feature_flag_by_name.scopes.find_by_environment_scope(params[:environment_scope])
+ end
+ end
+ end
+end
diff --git a/app/services/feature_flags/create_service.rb b/app/services/feature_flags/create_service.rb
new file mode 100644
index 00000000000..b4ca90f7aae
--- /dev/null
+++ b/app/services/feature_flags/create_service.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+module FeatureFlags
+ class CreateService < FeatureFlags::BaseService
+ def execute
+ return error('Access Denied', 403) unless can_create?
+ return error('Version is invalid', :bad_request) unless valid_version?
+ return error('New version feature flags are not enabled for this project', :bad_request) unless flag_version_enabled?
+
+ ActiveRecord::Base.transaction do
+ feature_flag = project.operations_feature_flags.new(params)
+
+ if feature_flag.save
+ save_audit_event(audit_event(feature_flag))
+
+ success(feature_flag: feature_flag)
+ else
+ error(feature_flag.errors.full_messages, 400)
+ end
+ end
+ end
+
+ private
+
+ def audit_message(feature_flag)
+ message_parts = ["Created feature flag <strong>#{feature_flag.name}</strong>",
+ "with description <strong>\"#{feature_flag.description}\"</strong>."]
+
+ message_parts += feature_flag.scopes.map do |scope|
+ created_scope_message(scope)
+ end
+
+ message_parts.join(" ")
+ end
+
+ def can_create?
+ Ability.allowed?(current_user, :create_feature_flag, project)
+ end
+
+ def valid_version?
+ !params.key?(:version) || Operations::FeatureFlag.versions.key?(params[:version])
+ end
+
+ def flag_version_enabled?
+ params[:version] != 'new_version_flag' || new_version_feature_flags_enabled?
+ end
+
+ def new_version_feature_flags_enabled?
+ ::Feature.enabled?(:feature_flags_new_version, project, default_enabled: true)
+ end
+ end
+end
diff --git a/app/services/feature_flags/destroy_service.rb b/app/services/feature_flags/destroy_service.rb
new file mode 100644
index 00000000000..c77e3e03ec3
--- /dev/null
+++ b/app/services/feature_flags/destroy_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module FeatureFlags
+ class DestroyService < FeatureFlags::BaseService
+ def execute(feature_flag)
+ destroy_feature_flag(feature_flag)
+ end
+
+ private
+
+ def destroy_feature_flag(feature_flag)
+ return error('Access Denied', 403) unless can_destroy?(feature_flag)
+
+ ActiveRecord::Base.transaction do
+ if feature_flag.destroy
+ save_audit_event(audit_event(feature_flag))
+
+ success(feature_flag: feature_flag)
+ else
+ error(feature_flag.errors.full_messages)
+ end
+ end
+ end
+
+ def audit_message(feature_flag)
+ "Deleted feature flag <strong>#{feature_flag.name}</strong>."
+ end
+
+ def can_destroy?(feature_flag)
+ Ability.allowed?(current_user, :destroy_feature_flag, feature_flag)
+ end
+ end
+end
diff --git a/app/services/feature_flags/disable_service.rb b/app/services/feature_flags/disable_service.rb
new file mode 100644
index 00000000000..8a443ac1795
--- /dev/null
+++ b/app/services/feature_flags/disable_service.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module FeatureFlags
+ class DisableService < BaseService
+ def execute
+ return error('Feature Flag not found', 404) unless feature_flag_by_name
+ return error('Feature Flag Scope not found', 404) unless feature_flag_scope_by_environment_scope
+ return error('Strategy not found', 404) unless strategy_exist_in_persisted_data?
+
+ ::FeatureFlags::UpdateService
+ .new(project, current_user, update_params)
+ .execute(feature_flag_by_name)
+ end
+
+ private
+
+ def update_params
+ if remaining_strategies.empty?
+ params_to_destroy_scope
+ else
+ params_to_update_scope
+ end
+ end
+
+ def remaining_strategies
+ strong_memoize(:remaining_strategies) do
+ feature_flag_scope_by_environment_scope.strategies.reject do |strategy|
+ strategy['name'] == params[:strategy]['name'] &&
+ strategy['parameters'] == params[:strategy]['parameters']
+ end
+ end
+ end
+
+ def strategy_exist_in_persisted_data?
+ feature_flag_scope_by_environment_scope.strategies != remaining_strategies
+ end
+
+ def params_to_destroy_scope
+ { scopes_attributes: [{ id: feature_flag_scope_by_environment_scope.id, _destroy: true }] }
+ end
+
+ def params_to_update_scope
+ { scopes_attributes: [{ id: feature_flag_scope_by_environment_scope.id, strategies: remaining_strategies }] }
+ end
+ end
+end
diff --git a/app/services/feature_flags/enable_service.rb b/app/services/feature_flags/enable_service.rb
new file mode 100644
index 00000000000..b4cbb32e003
--- /dev/null
+++ b/app/services/feature_flags/enable_service.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+module FeatureFlags
+ class EnableService < BaseService
+ def execute
+ if feature_flag_by_name
+ update_feature_flag
+ else
+ create_feature_flag
+ end
+ end
+
+ private
+
+ def create_feature_flag
+ ::FeatureFlags::CreateService
+ .new(project, current_user, create_params)
+ .execute
+ end
+
+ def update_feature_flag
+ ::FeatureFlags::UpdateService
+ .new(project, current_user, update_params)
+ .execute(feature_flag_by_name)
+ end
+
+ def create_params
+ if params[:environment_scope] == '*'
+ params_to_create_flag_with_default_scope
+ else
+ params_to_create_flag_with_additional_scope
+ end
+ end
+
+ def update_params
+ if feature_flag_scope_by_environment_scope
+ params_to_update_scope
+ else
+ params_to_create_scope
+ end
+ end
+
+ def params_to_create_flag_with_default_scope
+ {
+ name: params[:name],
+ scopes_attributes: [
+ {
+ active: true,
+ environment_scope: '*',
+ strategies: [params[:strategy]]
+ }
+ ]
+ }
+ end
+
+ def params_to_create_flag_with_additional_scope
+ {
+ name: params[:name],
+ scopes_attributes: [
+ {
+ active: false,
+ environment_scope: '*'
+ },
+ {
+ active: true,
+ environment_scope: params[:environment_scope],
+ strategies: [params[:strategy]]
+ }
+ ]
+ }
+ end
+
+ def params_to_create_scope
+ {
+ scopes_attributes: [{
+ active: true,
+ environment_scope: params[:environment_scope],
+ strategies: [params[:strategy]]
+ }]
+ }
+ end
+
+ def params_to_update_scope
+ {
+ scopes_attributes: [{
+ id: feature_flag_scope_by_environment_scope.id,
+ active: true,
+ strategies: feature_flag_scope_by_environment_scope.strategies | [params[:strategy]]
+ }]
+ }
+ end
+ end
+end
diff --git a/app/services/feature_flags/update_service.rb b/app/services/feature_flags/update_service.rb
new file mode 100644
index 00000000000..c837e50b104
--- /dev/null
+++ b/app/services/feature_flags/update_service.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+module FeatureFlags
+ class UpdateService < FeatureFlags::BaseService
+ AUDITABLE_SCOPE_ATTRIBUTES_HUMAN_NAMES = {
+ 'active' => 'active state',
+ 'environment_scope' => 'environment scope',
+ 'strategies' => 'strategies'
+ }.freeze
+
+ def execute(feature_flag)
+ return error('Access Denied', 403) unless can_update?(feature_flag)
+
+ ActiveRecord::Base.transaction do
+ feature_flag.assign_attributes(params)
+
+ feature_flag.strategies.each do |strategy|
+ if strategy.name_changed? && strategy.name_was == ::Operations::FeatureFlags::Strategy::STRATEGY_GITLABUSERLIST
+ strategy.user_list = nil
+ end
+ end
+
+ audit_event = audit_event(feature_flag)
+
+ if feature_flag.save
+ save_audit_event(audit_event)
+
+ success(feature_flag: feature_flag)
+ else
+ error(feature_flag.errors.full_messages, :bad_request)
+ end
+ end
+ end
+
+ private
+
+ def audit_message(feature_flag)
+ changes = changed_attributes_messages(feature_flag)
+ changes += changed_scopes_messages(feature_flag)
+
+ return if changes.empty?
+
+ "Updated feature flag <strong>#{feature_flag.name}</strong>. " + changes.join(" ")
+ end
+
+ def changed_attributes_messages(feature_flag)
+ feature_flag.changes.slice(*AUDITABLE_ATTRIBUTES).map do |attribute_name, changes|
+ "Updated #{attribute_name} "\
+ "from <strong>\"#{changes.first}\"</strong> to "\
+ "<strong>\"#{changes.second}\"</strong>."
+ end
+ end
+
+ def changed_scopes_messages(feature_flag)
+ feature_flag.scopes.map do |scope|
+ if scope.new_record?
+ created_scope_message(scope)
+ elsif scope.marked_for_destruction?
+ deleted_scope_message(scope)
+ else
+ updated_scope_message(scope)
+ end
+ end.compact # updated_scope_message can return nil if nothing has been changed
+ end
+
+ def deleted_scope_message(scope)
+ "Deleted rule <strong>#{scope.environment_scope}</strong>."
+ end
+
+ def updated_scope_message(scope)
+ changes = scope.changes.slice(*AUDITABLE_SCOPE_ATTRIBUTES_HUMAN_NAMES.keys)
+ return if changes.empty?
+
+ message = "Updated rule <strong>#{scope.environment_scope}</strong> "
+ message += changes.map do |attribute_name, change|
+ name = AUDITABLE_SCOPE_ATTRIBUTES_HUMAN_NAMES[attribute_name]
+ "#{name} from <strong>#{change.first}</strong> to <strong>#{change.second}</strong>"
+ end.join(' ')
+
+ message + '.'
+ end
+
+ def can_update?(feature_flag)
+ Ability.allowed?(current_user, :update_feature_flag, feature_flag)
+ end
+ end
+end
diff --git a/app/services/git/branch_hooks_service.rb b/app/services/git/branch_hooks_service.rb
index dcb32b4c84b..93a0d139001 100644
--- a/app/services/git/branch_hooks_service.rb
+++ b/app/services/git/branch_hooks_service.rb
@@ -76,12 +76,20 @@ module Git
def branch_change_hooks
enqueue_process_commit_messages
enqueue_jira_connect_sync_messages
+ enqueue_metrics_dashboard_sync
end
def branch_remove_hooks
project.repository.after_remove_branch(expire_cache: false)
end
+ def enqueue_metrics_dashboard_sync
+ return unless Feature.enabled?(:sync_metrics_dashboards, project)
+ return unless default_branch?
+
+ ::Metrics::Dashboard::SyncDashboardsWorker.perform_async(project.id)
+ end
+
# Schedules processing of commit messages
def enqueue_process_commit_messages
referencing_commits = limited_commits.select(&:matches_cross_reference_regex?)
diff --git a/app/services/git/wiki_push_service.rb b/app/services/git/wiki_push_service.rb
index fa3019ee9d6..87e2be858c0 100644
--- a/app/services/git/wiki_push_service.rb
+++ b/app/services/git/wiki_push_service.rb
@@ -34,9 +34,7 @@ module Git
def can_process_wiki_events?
# TODO: Support activity events for group wikis
# https://gitlab.com/gitlab-org/gitlab/-/issues/209306
- return false unless wiki.is_a?(ProjectWiki)
-
- Feature.enabled?(:wiki_events_on_git_push, wiki.container)
+ wiki.is_a?(ProjectWiki)
end
def push_changes
diff --git a/app/services/groups/create_service.rb b/app/services/groups/create_service.rb
index ce583095168..cf843d92862 100644
--- a/app/services/groups/create_service.rb
+++ b/app/services/groups/create_service.rb
@@ -15,6 +15,8 @@ module Groups
after_build_hook(@group, params)
+ inherit_group_shared_runners_settings
+
unless can_use_visibility_level? && can_create_group?
return @group
end
@@ -28,9 +30,12 @@ module Groups
@group.build_chat_team(name: response['name'], team_id: response['id'])
end
- if @group.save
- @group.add_owner(current_user)
- add_settings_record
+ Group.transaction do
+ if @group.save
+ @group.add_owner(current_user)
+ @group.create_namespace_settings
+ Service.create_from_active_default_integrations(@group, :group_id) if Feature.enabled?(:group_level_integrations)
+ end
end
@group
@@ -44,6 +49,7 @@ module Groups
def remove_unallowed_params
params.delete(:default_branch_protection) unless can?(current_user, :create_group_with_default_branch_protection)
+ params.delete(:allow_mfa_for_subgroups)
end
def create_chat_team?
@@ -84,8 +90,11 @@ module Groups
params[:visibility_level] = Gitlab::CurrentSettings.current_application_settings.default_group_visibility
end
- def add_settings_record
- @group.create_namespace_settings
+ def inherit_group_shared_runners_settings
+ return unless @group.parent
+
+ @group.shared_runners_enabled = @group.parent.shared_runners_enabled
+ @group.allow_descendants_override_disabled_shared_runners = @group.parent.allow_descendants_override_disabled_shared_runners
end
end
end
diff --git a/app/services/groups/import_export/import_service.rb b/app/services/groups/import_export/import_service.rb
index a5c776f8fc2..a0ddc50e5e0 100644
--- a/app/services/groups/import_export/import_service.rb
+++ b/app/services/groups/import_export/import_service.rb
@@ -13,7 +13,7 @@ module Groups
end
def async_execute
- group_import_state = GroupImportState.safe_find_or_create_by!(group: group)
+ group_import_state = GroupImportState.safe_find_or_create_by!(group: group, user: current_user)
jid = GroupImportWorker.perform_async(current_user.id, group.id)
if jid.present?
diff --git a/app/services/groups/transfer_service.rb b/app/services/groups/transfer_service.rb
index 2bd571f60af..aad574aeaf5 100644
--- a/app/services/groups/transfer_service.rb
+++ b/app/services/groups/transfer_service.rb
@@ -38,6 +38,7 @@ module Groups
# Overridden in EE
def post_update_hooks(updated_project_ids)
refresh_project_authorizations
+ refresh_descendant_groups if @new_parent_group
end
def ensure_allowed_transfer
@@ -101,8 +102,13 @@ module Groups
@group.visibility_level = @new_parent_group.visibility_level
end
+ update_two_factor_authentication if @new_parent_group
+
@group.parent = @new_parent_group
@group.clear_memoization(:self_and_ancestors_ids)
+
+ inherit_group_shared_runners_settings
+
@group.save!
end
@@ -126,8 +132,26 @@ module Groups
projects_to_update
.update_all(visibility_level: @new_parent_group.visibility_level)
end
+
+ def update_two_factor_authentication
+ return if namespace_parent_allows_two_factor_auth
+
+ @group.require_two_factor_authentication = false
+ end
+
+ def refresh_descendant_groups
+ return if namespace_parent_allows_two_factor_auth
+
+ if @group.descendants.where(require_two_factor_authentication: true).any?
+ DisallowTwoFactorForSubgroupsWorker.perform_async(@group.id)
+ end
+ end
# rubocop: enable CodeReuse/ActiveRecord
+ def namespace_parent_allows_two_factor_auth
+ @new_parent_group.namespace_settings.allow_mfa_for_subgroups
+ end
+
def ensure_ownership
return if @new_parent_group
return unless @group.owners.empty?
@@ -161,6 +185,17 @@ module Groups
group_contains_npm_packages: s_('TransferGroup|Group contains projects with NPM packages.')
}.freeze
end
+
+ def inherit_group_shared_runners_settings
+ parent_setting = @group.parent&.shared_runners_setting
+ return unless parent_setting
+
+ if @group.shared_runners_setting_higher_than?(parent_setting)
+ result = Groups::UpdateSharedRunnersService.new(@group, current_user, shared_runners_setting: parent_setting).execute
+
+ raise TransferError, result[:message] unless result[:status] == :success
+ end
+ end
end
end
diff --git a/app/services/groups/update_service.rb b/app/services/groups/update_service.rb
index 81393681dc0..84385f5da25 100644
--- a/app/services/groups/update_service.rb
+++ b/app/services/groups/update_service.rb
@@ -4,6 +4,8 @@ module Groups
class UpdateService < Groups::BaseService
include UpdateVisibilityLevel
+ SETTINGS_PARAMS = [:allow_mfa_for_subgroups].freeze
+
def execute
reject_parent_id!
remove_unallowed_params
@@ -19,8 +21,14 @@ module Groups
return false unless valid_path_change_with_npm_packages?
+ return false unless update_shared_runners
+
+ handle_changes
+
before_assignment_hook(group, params)
+ handle_namespace_settings
+
group.assign_attributes(params)
begin
@@ -38,6 +46,18 @@ module Groups
private
+ def handle_namespace_settings
+ settings_params = params.slice(*::NamespaceSetting::NAMESPACE_SETTINGS_PARAMS)
+
+ return if settings_params.empty?
+
+ ::NamespaceSetting::NAMESPACE_SETTINGS_PARAMS.each do |nsp|
+ params.delete(nsp)
+ end
+
+ ::NamespaceSettings::UpdateService.new(current_user, group, settings_params).execute
+ end
+
def valid_path_change_with_npm_packages?
return true unless group.packages_feature_enabled?
return true if params[:path].blank?
@@ -73,6 +93,18 @@ module Groups
# don't enqueue immediately to prevent todos removal in case of a mistake
TodosDestroyer::GroupPrivateWorker.perform_in(Todo::WAIT_FOR_DELETE, group.id)
end
+
+ update_two_factor_requirement_for_subgroups
+ end
+
+ def update_two_factor_requirement_for_subgroups
+ settings = group.namespace_settings
+ return if settings.allow_mfa_for_subgroups
+
+ if settings.previous_changes.include?(:allow_mfa_for_subgroups)
+ # enque in batches members update
+ DisallowTwoFactorForSubgroupsWorker.perform_async(group.id)
+ end
end
def reject_parent_id!
@@ -85,6 +117,21 @@ module Groups
params.delete(:default_branch_protection) unless can?(current_user, :update_default_branch_protection, group)
end
+ def handle_changes
+ handle_settings_update
+ end
+
+ def handle_settings_update
+ settings_params = params.slice(*allowed_settings_params)
+ allowed_settings_params.each { |param| params.delete(param) }
+
+ ::NamespaceSettings::UpdateService.new(current_user, group, settings_params).execute
+ end
+
+ def allowed_settings_params
+ SETTINGS_PARAMS
+ end
+
def valid_share_with_group_lock_change?
return true unless changing_share_with_group_lock?
return true if can?(current_user, :change_share_with_group_lock, group)
@@ -98,6 +145,17 @@ module Groups
params[:share_with_group_lock] != group.share_with_group_lock
end
+
+ def update_shared_runners
+ return true if params[:shared_runners_setting].nil?
+
+ result = Groups::UpdateSharedRunnersService.new(group, current_user, shared_runners_setting: params.delete(:shared_runners_setting)).execute
+
+ return true if result[:status] == :success
+
+ group.errors.add(:update_shared_runners, result[:message])
+ false
+ end
end
end
diff --git a/app/services/groups/update_shared_runners_service.rb b/app/services/groups/update_shared_runners_service.rb
index 63f57104510..639c5bf6ae0 100644
--- a/app/services/groups/update_shared_runners_service.rb
+++ b/app/services/groups/update_shared_runners_service.rb
@@ -7,44 +7,24 @@ module Groups
validate_params
- enable_or_disable_shared_runners!
- allow_or_disallow_descendants_override_disabled_shared_runners!
+ update_shared_runners
success
- rescue Group::UpdateSharedRunnersError => error
+ rescue ActiveRecord::RecordInvalid, ArgumentError => error
error(error.message)
end
private
def validate_params
- if Gitlab::Utils.to_boolean(params[:shared_runners_enabled]) && !params[:allow_descendants_override_disabled_shared_runners].nil?
- raise Group::UpdateSharedRunnersError, 'Cannot set shared_runners_enabled to true and allow_descendants_override_disabled_shared_runners'
+ unless Namespace::SHARED_RUNNERS_SETTINGS.include?(params[:shared_runners_setting])
+ raise ArgumentError, "state must be one of: #{Namespace::SHARED_RUNNERS_SETTINGS.join(', ')}"
end
end
- def enable_or_disable_shared_runners!
- return if params[:shared_runners_enabled].nil?
-
- if Gitlab::Utils.to_boolean(params[:shared_runners_enabled])
- group.enable_shared_runners!
- else
- group.disable_shared_runners!
- end
- end
-
- def allow_or_disallow_descendants_override_disabled_shared_runners!
- return if params[:allow_descendants_override_disabled_shared_runners].nil?
-
- # Needs to reset group because if both params are present could result in error
- group.reset
-
- if Gitlab::Utils.to_boolean(params[:allow_descendants_override_disabled_shared_runners])
- group.allow_descendants_override_disabled_shared_runners!
- else
- group.disallow_descendants_override_disabled_shared_runners!
- end
+ def update_shared_runners
+ group.update_shared_runners_setting!(params[:shared_runners_setting])
end
end
end
diff --git a/app/services/incident_management/incidents/create_service.rb b/app/services/incident_management/incidents/create_service.rb
index 5b925e0f440..cff288d602b 100644
--- a/app/services/incident_management/incidents/create_service.rb
+++ b/app/services/incident_management/incidents/create_service.rb
@@ -24,7 +24,7 @@ module IncidentManagement
return error(issue.errors.full_messages.to_sentence, issue) unless issue.valid?
- issue.update_severity(severity)
+ update_severity_for(issue)
success(issue)
end
@@ -40,6 +40,10 @@ module IncidentManagement
def error(message, issue = nil)
ServiceResponse.error(payload: { issue: issue }, message: message)
end
+
+ def update_severity_for(issue)
+ ::IncidentManagement::Incidents::UpdateSeverityService.new(issue, current_user, severity).execute
+ end
end
end
end
diff --git a/app/services/incident_management/incidents/update_severity_service.rb b/app/services/incident_management/incidents/update_severity_service.rb
new file mode 100644
index 00000000000..5b150f3f02e
--- /dev/null
+++ b/app/services/incident_management/incidents/update_severity_service.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module IncidentManagement
+ module Incidents
+ class UpdateSeverityService < BaseService
+ def initialize(issuable, current_user, severity)
+ super(issuable.project, current_user)
+
+ @issuable = issuable
+ @severity = severity.to_s.downcase
+ @severity = IssuableSeverity::DEFAULT unless IssuableSeverity.severities.key?(@severity)
+ end
+
+ def execute
+ return unless issuable.incident?
+
+ update_severity!
+ add_system_note
+ end
+
+ private
+
+ attr_reader :issuable, :severity
+
+ def issuable_severity
+ issuable.issuable_severity || issuable.build_issuable_severity(issue_id: issuable.id)
+ end
+
+ def update_severity!
+ issuable_severity.update!(severity: severity)
+ end
+
+ def add_system_note
+ ::IncidentManagement::AddSeveritySystemNoteWorker.perform_async(issuable.id, current_user.id)
+ end
+ end
+ end
+end
diff --git a/app/services/incident_management/pager_duty/process_webhook_service.rb b/app/services/incident_management/pager_duty/process_webhook_service.rb
index fd8252f75fb..027425e4aaa 100644
--- a/app/services/incident_management/pager_duty/process_webhook_service.rb
+++ b/app/services/incident_management/pager_duty/process_webhook_service.rb
@@ -34,7 +34,7 @@ module IncidentManagement
strong_memoize(:pager_duty_processable_events) do
::PagerDuty::WebhookPayloadParser
.call(params.to_h)
- .filter { |msg| msg['event'].in?(PAGER_DUTY_PROCESSABLE_EVENT_TYPES) }
+ .filter { |msg| msg['event'].to_s.in?(PAGER_DUTY_PROCESSABLE_EVENT_TYPES) }
end
end
diff --git a/app/services/issuable/clone/attributes_rewriter.rb b/app/services/issuable/clone/attributes_rewriter.rb
index c84074039ea..3861d88bce9 100644
--- a/app/services/issuable/clone/attributes_rewriter.rb
+++ b/app/services/issuable/clone/attributes_rewriter.rb
@@ -18,7 +18,6 @@ module Issuable
new_entity.update(update_attributes)
copy_resource_label_events
- copy_resource_weight_events
copy_resource_milestone_events
copy_resource_state_events
end
@@ -55,16 +54,6 @@ module Issuable
end
end
- def copy_resource_weight_events
- return unless both_respond_to?(:resource_weight_events)
-
- copy_events(ResourceWeightEvent.table_name, original_entity.resource_weight_events) do |event|
- event.attributes
- .except('id', 'reference', 'reference_html')
- .merge('issue_id' => new_entity.id)
- end
- end
-
def copy_resource_milestone_events
return unless milestone_events_supported?
@@ -128,3 +117,5 @@ module Issuable
end
end
end
+
+Issuable::Clone::AttributesRewriter.prepend_if_ee('EE::Issuable::Clone::AttributesRewriter')
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 1672ba2830a..60e5293e218 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -322,7 +322,7 @@ class IssuableBaseService < BaseService
def change_severity(issuable)
if severity = params.delete(:severity)
- issuable.update_severity(severity)
+ ::IncidentManagement::Incidents::UpdateSeverityService.new(issuable, current_user, severity).execute
end
end
@@ -366,6 +366,7 @@ class IssuableBaseService < BaseService
}
associations[:total_time_spent] = issuable.total_time_spent if issuable.respond_to?(:total_time_spent)
associations[:description] = issuable.description
+ associations[:reviewers] = issuable.reviewers.to_a if issuable.allows_reviewers?
associations
end
diff --git a/app/services/issues/base_service.rb b/app/services/issues/base_service.rb
index 0ed2b08b7b1..978ea6fe9bc 100644
--- a/app/services/issues/base_service.rb
+++ b/app/services/issues/base_service.rb
@@ -34,6 +34,18 @@ module Issues
private
+ def filter_params(merge_request)
+ super
+
+ moved_issue = params.delete(:moved_issue)
+
+ # Setting created_at, updated_at and iid is allowed only for admins and owners or
+ # when moving an issue as we preserve the original issue attributes except id and iid.
+ params.delete(:iid) unless current_user.can?(:set_issue_iid, project)
+ params.delete(:created_at) unless moved_issue || current_user.can?(:set_issue_created_at, project)
+ params.delete(:updated_at) unless moved_issue || current_user.can?(:set_issue_updated_at, project)
+ end
+
def create_assignee_note(issue, old_assignees)
SystemNoteService.change_issuable_assignees(
issue, issue.project, current_user, old_assignees)
diff --git a/app/services/issues/build_service.rb b/app/services/issues/build_service.rb
index 2de6ed9fa1c..3145739fe91 100644
--- a/app/services/issues/build_service.rb
+++ b/app/services/issues/build_service.rb
@@ -64,20 +64,26 @@ module Issues
private
- def whitelisted_issue_params
- base_params = [:title, :description, :confidential]
- admin_params = [:milestone_id, :issue_type]
+ def allowed_issue_base_params
+ [:title, :description, :confidential, :issue_type]
+ end
+
+ def allowed_issue_admin_params
+ [:milestone_id]
+ end
+ def allowed_issue_params
if can?(current_user, :admin_issue, project)
- params.slice(*(base_params + admin_params))
+ params.slice(*(allowed_issue_base_params + allowed_issue_admin_params))
else
- params.slice(*base_params)
+ params.slice(*allowed_issue_base_params)
end
end
def build_issue_params
- { author: current_user }.merge(issue_params_with_info_from_discussions)
- .merge(whitelisted_issue_params)
+ { author: current_user }
+ .merge(issue_params_with_info_from_discussions)
+ .merge(allowed_issue_params)
end
end
end
diff --git a/app/services/issues/move_service.rb b/app/services/issues/move_service.rb
index 60e0d1eec3d..90ccbd8ed21 100644
--- a/app/services/issues/move_service.rb
+++ b/app/services/issues/move_service.rb
@@ -23,11 +23,15 @@ module Issues
# to receive service desk emails on the new moved issue.
update_service_desk_sent_notifications
+ queue_copy_designs
+
new_entity
end
private
+ attr_reader :target_project
+
def update_service_desk_sent_notifications
return unless original_entity.from_service_desk?
@@ -46,9 +50,10 @@ module Issues
new_params = {
id: nil,
iid: nil,
- project: @target_project,
+ project: target_project,
author: original_entity.author,
- assignee_ids: original_entity.assignee_ids
+ assignee_ids: original_entity.assignee_ids,
+ moved_issue: true
}
new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params)
@@ -58,6 +63,18 @@ module Issues
CreateService.new(@target_project, @current_user, new_params).execute(skip_system_notes: true)
end
+ def queue_copy_designs
+ return unless original_entity.designs.present?
+
+ response = DesignManagement::CopyDesignCollection::QueueService.new(
+ current_user,
+ original_entity,
+ new_entity
+ ).execute
+
+ log_error(response.message) if response.error?
+ end
+
def mark_as_moved
original_entity.update(moved_to: new_entity)
end
@@ -75,7 +92,7 @@ module Issues
end
def add_note_from
- SystemNoteService.noteable_moved(new_entity, @target_project,
+ SystemNoteService.noteable_moved(new_entity, target_project,
original_entity, current_user,
direction: :from)
end
diff --git a/app/services/issues/reopen_service.rb b/app/services/issues/reopen_service.rb
index e2b1b5400c7..12dbff57ec5 100644
--- a/app/services/issues/reopen_service.rb
+++ b/app/services/issues/reopen_service.rb
@@ -5,6 +5,8 @@ module Issues
def execute(issue)
return issue unless can?(current_user, :reopen_issue, issue)
+ before_reopen(issue)
+
if issue.reopen
event_service.reopen_issue(issue, current_user)
create_note(issue, 'reopened')
@@ -21,8 +23,14 @@ module Issues
private
+ def before_reopen(issue)
+ # Overriden in EE
+ end
+
def create_note(issue, state = issue.state)
SystemNoteService.change_status(issue, issue.project, current_user, state, nil)
end
end
end
+
+Issues::ReopenService.prepend_if_ee('EE::Issues::ReopenService')
diff --git a/app/services/jira/requests/base.rb b/app/services/jira/requests/base.rb
index 7c6db372257..4ed8df0f235 100644
--- a/app/services/jira/requests/base.rb
+++ b/app/services/jira/requests/base.rb
@@ -40,7 +40,12 @@ module Jira
build_service_response(response)
rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET, Errno::ECONNREFUSED, URI::InvalidURIError, JIRA::HTTPError, OpenSSL::SSL::SSLError => error
error_message = "Jira request error: #{error.message}"
- log_error("Error sending message", client_url: client.options[:site], error: error_message)
+ log_error("Error sending message", client_url: client.options[:site],
+ error: {
+ exception_class: error.class.name,
+ exception_message: error.message,
+ exception_backtrace: Gitlab::BacktraceCleaner.clean_backtrace(error.backtrace)
+ })
ServiceResponse.error(message: error_message)
end
diff --git a/app/services/lfs/push_service.rb b/app/services/lfs/push_service.rb
index 6e1a11ebff8..9b947fbed07 100644
--- a/app/services/lfs/push_service.rb
+++ b/app/services/lfs/push_service.rb
@@ -12,7 +12,7 @@ module Lfs
def execute
lfs_objects_relation.each_batch(of: BATCH_SIZE) do |objects|
- push_objects(objects)
+ push_objects!(objects)
end
success
@@ -30,8 +30,8 @@ module Lfs
project.lfs_objects_for_repository_types(nil, :project)
end
- def push_objects(objects)
- rsp = lfs_client.batch('upload', objects)
+ def push_objects!(objects)
+ rsp = lfs_client.batch!('upload', objects)
objects = objects.index_by(&:oid)
rsp.fetch('objects', []).each do |spec|
@@ -53,14 +53,14 @@ module Lfs
return
end
- lfs_client.upload(object, upload, authenticated: authenticated)
+ lfs_client.upload!(object, upload, authenticated: authenticated)
end
def verify_object!(object, spec)
- # TODO: the remote has requested that we make another call to verify that
- # the object has been sent correctly.
- # https://gitlab.com/gitlab-org/gitlab/-/issues/250654
- log_error("LFS upload verification requested, but not supported for #{object.oid}")
+ authenticated = spec['authenticated']
+ verify = spec.dig('actions', 'verify')
+
+ lfs_client.verify!(object, verify, authenticated: authenticated)
end
def url
diff --git a/app/services/members/create_service.rb b/app/services/members/create_service.rb
index 610288c5e76..088e6f031c8 100644
--- a/app/services/members/create_service.rb
+++ b/app/services/members/create_service.rb
@@ -7,7 +7,7 @@ module Members
def execute(source)
return error(s_('AddMember|No users specified.')) if params[:user_ids].blank?
- user_ids = params[:user_ids].split(',').uniq
+ user_ids = params[:user_ids].split(',').uniq.flatten
return error(s_("AddMember|Too many users specified (limit is %{user_limit})") % { user_limit: user_limit }) if
user_limit && user_ids.size > user_limit
diff --git a/app/services/members/invitation_reminder_email_service.rb b/app/services/members/invitation_reminder_email_service.rb
new file mode 100644
index 00000000000..e589cdc2fa3
--- /dev/null
+++ b/app/services/members/invitation_reminder_email_service.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+module Members
+ class InvitationReminderEmailService
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :invitation
+
+ MAX_INVITATION_LIFESPAN = 14.0
+ REMINDER_RATIO = [2, 5, 10].freeze
+
+ def initialize(invitation)
+ @invitation = invitation
+ end
+
+ def execute
+ return unless experiment_enabled?
+
+ reminder_index = days_on_which_to_send_reminders.index(days_after_invitation_sent)
+ return unless reminder_index
+
+ invitation.send_invitation_reminder(reminder_index)
+ end
+
+ private
+
+ def experiment_enabled?
+ Gitlab::Experimentation.enabled_for_attribute?(:invitation_reminders, invitation.invite_email)
+ end
+
+ def days_after_invitation_sent
+ (Date.today - invitation.created_at.to_date).to_i
+ end
+
+ def days_on_which_to_send_reminders
+ # Don't send any reminders if the invitation has expired or expires today
+ return [] if invitation.expires_at && invitation.expires_at <= Date.today
+
+ # Calculate the number of days on which to send reminders based on the MAX_INVITATION_LIFESPAN and the REMINDER_RATIO
+ REMINDER_RATIO.map { |number_of_days| ((number_of_days * invitation_lifespan_in_days) / MAX_INVITATION_LIFESPAN).ceil }.uniq
+ end
+
+ def invitation_lifespan_in_days
+ # When the invitation lifespan is more than 14 days or does not expire, send the reminders within 14 days
+ strong_memoize(:invitation_lifespan_in_days) do
+ if invitation.expires_at
+ [(invitation.expires_at - invitation.created_at.to_date).to_i, MAX_INVITATION_LIFESPAN].min
+ else
+ MAX_INVITATION_LIFESPAN
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index abc3f99797d..aa591312c6a 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -110,6 +110,10 @@ module MergeRequests
return
end
+ unless merge_request.allows_multiple_reviewers?
+ params[:reviewer_ids] = params[:reviewer_ids].first(1)
+ end
+
reviewer_ids = params[:reviewer_ids].select { |reviewer_id| user_can_read?(merge_request, reviewer_id) }
if params[:reviewer_ids].map(&:to_s) == [IssuableFinder::Params::NONE]
@@ -130,6 +134,11 @@ module MergeRequests
merge_request, merge_request.project, current_user, old_assignees)
end
+ def create_reviewer_note(merge_request, old_reviewers)
+ SystemNoteService.change_issuable_reviewers(
+ merge_request, merge_request.project, current_user, old_reviewers)
+ end
+
def create_pipeline_for(merge_request, user)
MergeRequests::CreatePipelineService.new(project, user).execute(merge_request)
end
diff --git a/app/services/merge_requests/cleanup_refs_service.rb b/app/services/merge_requests/cleanup_refs_service.rb
index 0f03f5f09b4..d003124a112 100644
--- a/app/services/merge_requests/cleanup_refs_service.rb
+++ b/app/services/merge_requests/cleanup_refs_service.rb
@@ -17,7 +17,7 @@ module MergeRequests
@repository = merge_request.project.repository
@ref_path = merge_request.ref_path
@merge_ref_path = merge_request.merge_ref_path
- @ref_head_sha = @repository.commit(merge_request.ref_path).id
+ @ref_head_sha = @repository.commit(merge_request.ref_path)&.id
@merge_ref_sha = merge_request.merge_ref_head&.id
end
diff --git a/app/services/merge_requests/export_csv_service.rb b/app/services/merge_requests/export_csv_service.rb
new file mode 100644
index 00000000000..1e7f0c8e722
--- /dev/null
+++ b/app/services/merge_requests/export_csv_service.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ class ExportCsvService
+ include Gitlab::Routing.url_helpers
+ include GitlabRoutingHelper
+
+ # Target attachment size before base64 encoding
+ TARGET_FILESIZE = 15.megabytes
+
+ def initialize(merge_requests, project)
+ @project = project
+ @merge_requests = merge_requests
+ end
+
+ def csv_data
+ csv_builder.render(TARGET_FILESIZE)
+ end
+
+ def email(user)
+ Notify.merge_requests_csv_email(user, @project, csv_data, csv_builder.status).deliver_now
+ end
+
+ private
+
+ def csv_builder
+ @csv_builder ||= CsvBuilder.new(@merge_requests.with_csv_entity_associations, header_to_value_hash)
+ end
+
+ def header_to_value_hash
+ {
+ 'MR IID' => 'iid',
+ 'URL' => -> (merge_request) { merge_request_url(merge_request) },
+ 'Title' => 'title',
+ 'State' => 'state',
+ 'Description' => 'description',
+ 'Source Branch' => 'source_branch',
+ 'Target Branch' => 'target_branch',
+ 'Source Project ID' => 'source_project_id',
+ 'Target Project ID' => 'target_project_id',
+ 'Author' => -> (merge_request) { merge_request.author.name },
+ 'Author Username' => -> (merge_request) { merge_request.author.username },
+ 'Assignees' => -> (merge_request) { merge_request.assignees.map(&:name).join(', ') },
+ 'Assignee Usernames' => -> (merge_request) { merge_request.assignees.map(&:username).join(', ') },
+ 'Approvers' => -> (merge_request) { merge_request.approved_by_users.map(&:name).join(', ') },
+ 'Approver Usernames' => -> (merge_request) { merge_request.approved_by_users.map(&:username).join(', ') },
+ 'Merged User' => -> (merge_request) { merge_request.metrics&.merged_by&.name.to_s },
+ 'Merged Username' => -> (merge_request) { merge_request.metrics&.merged_by&.username.to_s },
+ 'Milestone ID' => -> (merge_request) { merge_request&.milestone&.id || '' },
+ 'Created At (UTC)' => -> (merge_request) { merge_request.created_at.utc },
+ 'Updated At (UTC)' => -> (merge_request) { merge_request.updated_at.utc }
+ }
+ end
+ end
+end
diff --git a/app/services/merge_requests/ff_merge_service.rb b/app/services/merge_requests/ff_merge_service.rb
index 79011094e88..c5640047899 100644
--- a/app/services/merge_requests/ff_merge_service.rb
+++ b/app/services/merge_requests/ff_merge_service.rb
@@ -27,7 +27,7 @@ module MergeRequests
rescue StandardError => e
raise MergeError, "Something went wrong during merge: #{e.message}"
ensure
- merge_request.update(in_progress_merge_commit_sha: nil)
+ merge_request.update_and_mark_in_progress_merge_commit_sha(nil)
end
end
end
diff --git a/app/services/merge_requests/merge_service.rb b/app/services/merge_requests/merge_service.rb
index 437e87dadf7..ba22b458777 100644
--- a/app/services/merge_requests/merge_service.rb
+++ b/app/services/merge_requests/merge_service.rb
@@ -84,7 +84,7 @@ module MergeRequests
merge_request.update!(merge_commit_sha: commit_id)
ensure
- merge_request.update_column(:in_progress_merge_commit_sha, nil)
+ merge_request.update_and_mark_in_progress_merge_commit_sha(nil)
end
def try_merge
diff --git a/app/services/merge_requests/merge_to_ref_service.rb b/app/services/merge_requests/merge_to_ref_service.rb
index 1876b1096fe..c0115e94903 100644
--- a/app/services/merge_requests/merge_to_ref_service.rb
+++ b/app/services/merge_requests/merge_to_ref_service.rb
@@ -58,8 +58,15 @@ module MergeRequests
params[:first_parent_ref] || merge_request.target_branch_ref
end
+ ##
+ # The parameter `allow_conflicts` is a flag whether merge conflicts should be merged into diff
+ # Default is false
+ def allow_conflicts
+ params[:allow_conflicts] || false
+ end
+
def commit
- repository.merge_to_ref(current_user, source, merge_request, target_ref, commit_message, first_parent_ref)
+ repository.merge_to_ref(current_user, source, merge_request, target_ref, commit_message, first_parent_ref, allow_conflicts)
rescue Gitlab::Git::PreReceiveError, Gitlab::Git::CommandError => error
raise MergeError, error.message
end
diff --git a/app/services/merge_requests/mergeability_check_service.rb b/app/services/merge_requests/mergeability_check_service.rb
index a3c39fa2e32..627c747203c 100644
--- a/app/services/merge_requests/mergeability_check_service.rb
+++ b/app/services/merge_requests/mergeability_check_service.rb
@@ -88,7 +88,7 @@ module MergeRequests
sleep_sec: retry_lease ? 1.second : 0
}
- in_lock(lease_key, lease_opts, &block)
+ in_lock(lease_key, **lease_opts, &block)
end
def payload
@@ -115,18 +115,20 @@ module MergeRequests
def update_merge_status
return unless merge_request.recheck_merge_status?
+ return merge_request.mark_as_unmergeable if merge_request.broken?
- if can_git_merge? && merge_to_ref
+ merge_to_ref_success = merge_to_ref
+
+ update_diff_discussion_positions! if merge_to_ref_success
+
+ if merge_to_ref_success && can_git_merge?
merge_request.mark_as_mergeable
- update_diff_discussion_positions!
else
merge_request.mark_as_unmergeable
end
end
def update_diff_discussion_positions!
- return if Feature.disabled?(:merge_ref_head_comments, merge_request.target_project, default_enabled: true)
-
Discussions::CaptureDiffNotePositionsService.new(merge_request).execute
end
@@ -151,13 +153,14 @@ module MergeRequests
end
def can_git_merge?
- !merge_request.broken? && repository.can_be_merged?(merge_request.diff_head_sha, merge_request.target_branch)
+ repository.can_be_merged?(merge_request.diff_head_sha, merge_request.target_branch)
end
def merge_to_ref
return true unless merge_ref_auto_sync_enabled?
- result = MergeRequests::MergeToRefService.new(project, merge_request.author).execute(merge_request)
+ params = { allow_conflicts: Feature.enabled?(:display_merge_conflicts_in_diff, project) }
+ result = MergeRequests::MergeToRefService.new(project, merge_request.author, params).execute(merge_request)
result[:status] == :success
end
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index 405b8fe9c9e..e5d0b216d6c 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -85,18 +85,13 @@ module MergeRequests
return if merge_requests.empty?
- commit_analyze_enabled = Feature.enabled?(:branch_push_merge_commit_analyze, @project, default_enabled: true)
- if commit_analyze_enabled
- analyzer = Gitlab::BranchPushMergeCommitAnalyzer.new(
- @commits.reverse,
- relevant_commit_ids: merge_requests.map(&:diff_head_sha)
- )
- end
+ analyzer = Gitlab::BranchPushMergeCommitAnalyzer.new(
+ @commits.reverse,
+ relevant_commit_ids: merge_requests.map(&:diff_head_sha)
+ )
merge_requests.each do |merge_request|
- if commit_analyze_enabled
- merge_request.merge_commit_sha = analyzer.get_merge_commit(merge_request.diff_head_sha)
- end
+ merge_request.merge_commit_sha = analyzer.get_merge_commit(merge_request.diff_head_sha)
MergeRequests::PostMergeService
.new(merge_request.target_project, @current_user)
@@ -184,7 +179,7 @@ module MergeRequests
def abort_auto_merge_with_todo(merge_request, reason)
response = abort_auto_merge(merge_request, reason)
- response = ServiceResponse.new(response)
+ response = ServiceResponse.new(**response)
return unless response.success?
todo_service.merge_request_became_unmergeable(merge_request)
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index 1468bfd6bb6..8c069ea5bb0 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -112,6 +112,8 @@ module MergeRequests
end
def handle_reviewers_change(merge_request, old_reviewers)
+ create_reviewer_note(merge_request, old_reviewers)
+ notification_service.async.changed_reviewer_of_merge_request(merge_request, current_user, old_reviewers)
todo_service.reassigned_reviewable(merge_request, current_user, old_reviewers)
end
diff --git a/app/services/metrics/dashboard/custom_dashboard_service.rb b/app/services/metrics/dashboard/custom_dashboard_service.rb
index f0f19bf2ba3..bde8e86851a 100644
--- a/app/services/metrics/dashboard/custom_dashboard_service.rb
+++ b/app/services/metrics/dashboard/custom_dashboard_service.rb
@@ -42,6 +42,12 @@ module Metrics
def cache_key
"project_#{project.id}_metrics_dashboard_#{dashboard_path}"
end
+
+ def sequence
+ [
+ ::Gitlab::Metrics::Dashboard::Stages::CustomDashboardMetricsInserter
+ ] + super
+ end
end
end
end
diff --git a/app/services/metrics/dashboard/dynamic_embed_service.rb b/app/services/metrics/dashboard/dynamic_embed_service.rb
index 0b198ecbbe9..a94538668c1 100644
--- a/app/services/metrics/dashboard/dynamic_embed_service.rb
+++ b/app/services/metrics/dashboard/dynamic_embed_service.rb
@@ -18,7 +18,7 @@ module Metrics
# Determines whether the provided params are sufficient
# to uniquely identify a panel from a yml-defined dashboard.
#
- # See https://docs.gitlab.com/ee/operations/metrics/dashboards/index.html#defining-custom-dashboards-per-project
+ # See https://docs.gitlab.com/ee/operations/metrics/dashboards/index.html
# for additional info on defining custom dashboards.
def valid_params?(params)
[
diff --git a/app/services/namespace_settings/update_service.rb b/app/services/namespace_settings/update_service.rb
new file mode 100644
index 00000000000..3c9b7b637ac
--- /dev/null
+++ b/app/services/namespace_settings/update_service.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module NamespaceSettings
+ class UpdateService
+ include ::Gitlab::Allowable
+
+ attr_reader :current_user, :group, :settings_params
+
+ def initialize(current_user, group, settings)
+ @current_user = current_user
+ @group = group
+ @settings_params = settings
+ end
+
+ def execute
+ if group.namespace_settings
+ group.namespace_settings.attributes = settings_params
+ else
+ group.build_namespace_settings(settings_params)
+ end
+ end
+ end
+end
+
+NamespaceSettings::UpdateService.prepend_if_ee('EE::NamespaceSettings::UpdateService')
diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb
index e26f662a697..48f44affb23 100644
--- a/app/services/notes/create_service.rb
+++ b/app/services/notes/create_service.rb
@@ -70,7 +70,7 @@ module Notes
Gitlab::Tracking.event('Notes::CreateService', 'execute', tracking_data_for(note))
end
- if Feature.enabled?(:merge_ref_head_comments, project, default_enabled: true) && note.for_merge_request? && note.diff_note? && note.start_of_discussion?
+ if note.for_merge_request? && note.diff_note? && note.start_of_discussion?
Discussions::CaptureDiffNotePositionService.new(note.noteable, note.diff_file&.paths).execute(note.discussion)
end
end
diff --git a/app/services/notification_recipients/build_service.rb b/app/services/notification_recipients/build_service.rb
index 0fe0d26d7b2..040ecc29d3a 100644
--- a/app/services/notification_recipients/build_service.rb
+++ b/app/services/notification_recipients/build_service.rb
@@ -13,8 +13,8 @@ module NotificationRecipients
NotificationRecipient.new(user, *args).notifiable?
end
- def self.build_recipients(*args)
- ::NotificationRecipients::Builder::Default.new(*args).notification_recipients
+ def self.build_recipients(target, current_user, **args)
+ ::NotificationRecipients::Builder::Default.new(target, current_user, **args).notification_recipients
end
def self.build_new_note_recipients(*args)
@@ -25,8 +25,8 @@ module NotificationRecipients
::NotificationRecipients::Builder::MergeRequestUnmergeable.new(*args).notification_recipients
end
- def self.build_project_maintainers_recipients(*args)
- ::NotificationRecipients::Builder::ProjectMaintainers.new(*args).notification_recipients
+ def self.build_project_maintainers_recipients(target, **args)
+ ::NotificationRecipients::Builder::ProjectMaintainers.new(target, **args).notification_recipients
end
def self.build_new_release_recipients(*args)
diff --git a/app/services/notification_recipients/builder/default.rb b/app/services/notification_recipients/builder/default.rb
index 790ce57452c..19527ba84e6 100644
--- a/app/services/notification_recipients/builder/default.rb
+++ b/app/services/notification_recipients/builder/default.rb
@@ -34,6 +34,9 @@ module NotificationRecipients
when :reassign_merge_request, :reassign_issue
add_recipients(previous_assignees, :mention, nil)
add_recipients(target.assignees, :mention, NotificationReason::ASSIGNED)
+ when :change_reviewer_merge_request
+ add_recipients(previous_assignees, :mention, nil)
+ add_recipients(target.reviewers, :mention, NotificationReason::REVIEW_REQUESTED)
end
add_subscribed_users
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index 731d72c41d4..7853ad11c64 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -238,6 +238,33 @@ class NotificationService
end
end
+ # When we change reviewer in a merge_request we should send an email to:
+ #
+ # * merge_request old reviewers if their notification level is not Disabled
+ # * merge_request new reviewers if their notification level is not Disabled
+ # * users with custom level checked with "change reviewer merge request"
+ #
+ def changed_reviewer_of_merge_request(merge_request, current_user, previous_reviewers = [])
+ recipients = NotificationRecipients::BuildService.build_recipients(
+ merge_request,
+ current_user,
+ action: "change_reviewer",
+ previous_assignees: previous_reviewers
+ )
+
+ previous_reviewer_ids = previous_reviewers.map(&:id)
+
+ recipients.each do |recipient|
+ mailer.changed_reviewer_of_merge_request_email(
+ recipient.user.id,
+ merge_request.id,
+ previous_reviewer_ids,
+ current_user.id,
+ recipient.reason
+ ).deliver_later
+ end
+ end
+
# When we add labels to a merge request we should send an email to:
#
# * watchers of the mr's labels
@@ -408,6 +435,10 @@ class NotificationService
mailer.member_invited_email(group_member.real_source_type, group_member.id, token).deliver_later
end
+ def invite_member_reminder(group_member, token, reminder_index)
+ mailer.member_invited_reminder_email(group_member.real_source_type, group_member.id, token, reminder_index).deliver_later
+ end
+
def accept_group_invite(group_member)
mailer.member_invite_accepted_email(group_member.real_source_type, group_member.id).deliver_later
end
diff --git a/app/services/packages/composer/composer_json_service.rb b/app/services/packages/composer/composer_json_service.rb
index 6ffb5a77da3..98aabd84d3d 100644
--- a/app/services/packages/composer/composer_json_service.rb
+++ b/app/services/packages/composer/composer_json_service.rb
@@ -3,6 +3,8 @@
module Packages
module Composer
class ComposerJsonService
+ InvalidJson = Class.new(StandardError)
+
def initialize(project, target)
@project, @target = project, target
end
@@ -20,11 +22,11 @@ module Packages
Gitlab::Json.parse(composer_file.data)
rescue JSON::ParserError
- raise 'Could not parse composer.json file. Invalid JSON.'
+ raise InvalidJson, 'Could not parse composer.json file. Invalid JSON.'
end
def composer_file_not_found!
- raise 'The file composer.json was not found.'
+ raise InvalidJson, 'The file composer.json was not found.'
end
end
end
diff --git a/app/services/packages/create_event_service.rb b/app/services/packages/create_event_service.rb
new file mode 100644
index 00000000000..d009cba2812
--- /dev/null
+++ b/app/services/packages/create_event_service.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Packages
+ class CreateEventService < BaseService
+ def execute
+ event_scope = scope.is_a?(::Packages::Package) ? scope.package_type : scope
+
+ ::Packages::Event.create!(
+ event_type: event_name,
+ originator: current_user&.id,
+ originator_type: originator_type,
+ event_scope: event_scope
+ )
+ end
+
+ private
+
+ def scope
+ params[:scope]
+ end
+
+ def event_name
+ params[:event_name]
+ end
+
+ def originator_type
+ case current_user
+ when User
+ :user
+ when DeployToken
+ :deploy_token
+ else
+ :guest
+ end
+ end
+ end
+end
diff --git a/app/services/packages/create_package_service.rb b/app/services/packages/create_package_service.rb
index 397a5f74e0a..e3b0ad218e2 100644
--- a/app/services/packages/create_package_service.rb
+++ b/app/services/packages/create_package_service.rb
@@ -10,6 +10,7 @@ module Packages
.with_package_type(package_type)
.safe_find_or_create_by!(name: name, version: version) do |pkg|
pkg.creator = package_creator
+ yield pkg if block_given?
end
end
diff --git a/app/services/packages/generic/create_package_file_service.rb b/app/services/packages/generic/create_package_file_service.rb
new file mode 100644
index 00000000000..4d49c63799f
--- /dev/null
+++ b/app/services/packages/generic/create_package_file_service.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module Packages
+ module Generic
+ class CreatePackageFileService < BaseService
+ def execute
+ ::Packages::Package.transaction do
+ create_package_file(find_or_create_package)
+ end
+ end
+
+ private
+
+ def find_or_create_package
+ package_params = {
+ name: params[:package_name],
+ version: params[:package_version],
+ build: params[:build]
+ }
+
+ ::Packages::Generic::FindOrCreatePackageService
+ .new(project, current_user, package_params)
+ .execute
+ end
+
+ def create_package_file(package)
+ file_params = {
+ file: params[:file],
+ size: params[:file].size,
+ file_sha256: params[:file].sha256,
+ file_name: params[:file_name]
+ }
+
+ ::Packages::CreatePackageFileService.new(package, file_params).execute
+ end
+ end
+ end
+end
diff --git a/app/services/packages/generic/find_or_create_package_service.rb b/app/services/packages/generic/find_or_create_package_service.rb
new file mode 100644
index 00000000000..8a8459d167e
--- /dev/null
+++ b/app/services/packages/generic/find_or_create_package_service.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Packages
+ module Generic
+ class FindOrCreatePackageService < ::Packages::CreatePackageService
+ def execute
+ find_or_create_package!(::Packages::Package.package_types['generic']) do |package|
+ if params[:build].present?
+ package.build_info = Packages::BuildInfo.new(pipeline: params[:build].pipeline)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/personal_access_tokens/revoke_service.rb b/app/services/personal_access_tokens/revoke_service.rb
index 16ba42bd317..17405002d8d 100644
--- a/app/services/personal_access_tokens/revoke_service.rb
+++ b/app/services/personal_access_tokens/revoke_service.rb
@@ -2,11 +2,12 @@
module PersonalAccessTokens
class RevokeService
- attr_reader :token, :current_user
+ attr_reader :token, :current_user, :group
- def initialize(current_user = nil, params = { token: nil })
+ def initialize(current_user = nil, params = { token: nil, group: nil })
@current_user = current_user
@token = params[:token]
+ @group = params[:group]
end
def execute
@@ -34,3 +35,5 @@ module PersonalAccessTokens
end
end
end
+
+PersonalAccessTokens::RevokeService.prepend_if_ee('EE::PersonalAccessTokens::RevokeService')
diff --git a/app/services/pod_logs/base_service.rb b/app/services/pod_logs/base_service.rb
index 8936f9b67a5..e4b6ad31e33 100644
--- a/app/services/pod_logs/base_service.rb
+++ b/app/services/pod_logs/base_service.rb
@@ -10,6 +10,8 @@ module PodLogs
CACHE_KEY_GET_POD_LOG = 'get_pod_log'
K8S_NAME_MAX_LENGTH = 253
+ self.reactive_cache_work_type = :external_dependency
+
def id
cluster.id
end
diff --git a/app/services/pod_logs/elasticsearch_service.rb b/app/services/pod_logs/elasticsearch_service.rb
index f79562c8ab3..58d1bfbf835 100644
--- a/app/services/pod_logs/elasticsearch_service.rb
+++ b/app/services/pod_logs/elasticsearch_service.rb
@@ -11,7 +11,6 @@ module PodLogs
:pod_logs,
:filter_return_keys
- self.reactive_cache_work_type = :external_dependency
self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) }
private
diff --git a/app/services/pod_logs/kubernetes_service.rb b/app/services/pod_logs/kubernetes_service.rb
index b573ceae1aa..03b84f98973 100644
--- a/app/services/pod_logs/kubernetes_service.rb
+++ b/app/services/pod_logs/kubernetes_service.rb
@@ -17,7 +17,6 @@ module PodLogs
:split_logs,
:filter_return_keys
- self.reactive_cache_work_type = :external_dependency
self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) }
private
diff --git a/app/services/projects/alerting/notify_service.rb b/app/services/projects/alerting/notify_service.rb
index bfce5f1ad63..affac45fc3d 100644
--- a/app/services/projects/alerting/notify_service.rb
+++ b/app/services/projects/alerting/notify_service.rb
@@ -7,43 +7,34 @@ module Projects
include ::IncidentManagement::Settings
def execute(token)
+ return bad_request unless valid_payload_size?
return forbidden unless alerts_service_activated?
return unauthorized unless valid_token?(token)
- alert = process_alert
+ process_alert
return bad_request unless alert.persisted?
- process_incident_issues(alert) if process_issues?
+ process_incident_issues if process_issues?
send_alert_email if send_email?
ServiceResponse.success
- rescue Gitlab::Alerting::NotificationPayloadParser::BadPayloadError
- bad_request
end
private
delegate :alerts_service, :alerts_service_activated?, to: :project
- def am_alert_params
- strong_memoize(:am_alert_params) do
- Gitlab::AlertManagement::AlertParams.from_generic_alert(project: project, payload: params.to_h)
- end
- end
-
def process_alert
- existing_alert = find_alert_by_fingerprint(am_alert_params[:fingerprint])
-
- if existing_alert
- process_existing_alert(existing_alert)
+ if alert.persisted?
+ process_existing_alert
else
create_alert
end
end
- def process_existing_alert(alert)
- if am_alert_params[:ended_at].present?
- process_resolved_alert(alert)
+ def process_existing_alert
+ if incoming_payload.ends_at.present?
+ process_resolved_alert
else
alert.register_new_event!
end
@@ -51,10 +42,10 @@ module Projects
alert
end
- def process_resolved_alert(alert)
+ def process_resolved_alert
return unless auto_close_incident?
- if alert.resolve(am_alert_params[:ended_at])
+ if alert.resolve(incoming_payload.ends_at)
close_issue(alert.issue)
end
@@ -72,20 +63,16 @@ module Projects
end
def create_alert
- alert = AlertManagement::Alert.create(am_alert_params.except(:ended_at))
- alert.execute_services if alert.persisted?
- SystemNoteService.create_new_alert(alert, 'Generic Alert Endpoint')
-
- alert
- end
-
- def find_alert_by_fingerprint(fingerprint)
- return unless fingerprint
+ return unless alert.save
- AlertManagement::Alert.not_resolved.for_fingerprint(project, fingerprint).first
+ alert.execute_services
+ SystemNoteService.create_new_alert(
+ alert,
+ alert.monitoring_tool || 'Generic Alert Endpoint'
+ )
end
- def process_incident_issues(alert)
+ def process_incident_issues
return if alert.issue
::IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id)
@@ -94,11 +81,33 @@ module Projects
def send_alert_email
notification_service
.async
- .prometheus_alerts_fired(project, [parsed_payload])
+ .prometheus_alerts_fired(project, [alert.attributes])
+ end
+
+ def alert
+ strong_memoize(:alert) do
+ existing_alert || new_alert
+ end
+ end
+
+ def existing_alert
+ return unless incoming_payload.gitlab_fingerprint
+
+ AlertManagement::Alert.not_resolved.for_fingerprint(project, incoming_payload.gitlab_fingerprint).first
+ end
+
+ def new_alert
+ AlertManagement::Alert.new(**incoming_payload.alert_params, ended_at: nil)
+ end
+
+ def incoming_payload
+ strong_memoize(:incoming_payload) do
+ Gitlab::AlertManagement::Payload.parse(project, params.to_h)
+ end
end
- def parsed_payload
- Gitlab::Alerting::NotificationPayloadParser.call(params.to_h, project)
+ def valid_payload_size?
+ Gitlab::Utils::DeepSize.new(params).valid?
end
def valid_token?(token)
diff --git a/app/services/projects/container_repository/cleanup_tags_service.rb b/app/services/projects/container_repository/cleanup_tags_service.rb
index 31500043544..b8047a1ad71 100644
--- a/app/services/projects/container_repository/cleanup_tags_service.rb
+++ b/app/services/projects/container_repository/cleanup_tags_service.rb
@@ -4,7 +4,6 @@ module Projects
module ContainerRepository
class CleanupTagsService < BaseService
def execute(container_repository)
- return error('feature disabled') unless can_use?
return error('access denied') unless can_destroy?
return error('invalid regex') unless valid_regex?
@@ -74,10 +73,6 @@ module Projects
can?(current_user, :destroy_container_image, project)
end
- def can_use?
- Feature.enabled?(:container_registry_cleanup, project, default_enabled: true)
- end
-
def valid_regex?
%w(name_regex_delete name_regex name_regex_keep).each do |param_name|
regex = params[param_name]
diff --git a/app/services/projects/container_repository/delete_tags_service.rb b/app/services/projects/container_repository/delete_tags_service.rb
index fa3de4ae2ac..9fc3ec0aafb 100644
--- a/app/services/projects/container_repository/delete_tags_service.rb
+++ b/app/services/projects/container_repository/delete_tags_service.rb
@@ -26,9 +26,7 @@ module Projects
end
def delete_service
- fast_delete_enabled = Feature.enabled?(:container_registry_fast_tag_delete, default_enabled: true)
-
- if fast_delete_enabled && @container_repository.client.supports_tag_delete?
+ if @container_repository.client.supports_tag_delete?
::Projects::ContainerRepository::Gitlab::DeleteTagsService.new(@container_repository, @tag_names)
else
::Projects::ContainerRepository::ThirdParty::DeleteTagsService.new(@container_repository, @tag_names)
diff --git a/app/services/projects/create_from_template_service.rb b/app/services/projects/create_from_template_service.rb
index a207fd2c574..45b52a1861c 100644
--- a/app/services/projects/create_from_template_service.rb
+++ b/app/services/projects/create_from_template_service.rb
@@ -14,10 +14,16 @@ module Projects
def execute
return project unless validate_template!
- file = built_in_template&.file
+ file = built_in_template&.file || sample_data_template&.file
override_params = params.dup
- params[:file] = file
+
+ if built_in_template
+ params[:file] = built_in_template.file
+ elsif sample_data_template
+ params[:file] = sample_data_template.file
+ params[:sample_data] = true
+ end
GitlabProjectsImportService.new(current_user, params, override_params).execute
ensure
@@ -27,7 +33,7 @@ module Projects
private
def validate_template!
- return true if built_in_template
+ return true if built_in_template || sample_data_template
project.errors.add(:template_name, _("'%{template_name}' is unknown or invalid" % { template_name: template_name }))
false
@@ -39,6 +45,12 @@ module Projects
end
end
+ def sample_data_template
+ strong_memoize(:sample_data_template) do
+ Gitlab::SampleDataTemplate.find(template_name)
+ end
+ end
+
def project
@project ||= ::Project.new(namespace_id: params[:namespace_id])
end
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 68b40fdd8f1..8f18a23aa0f 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -19,6 +19,10 @@ module Projects
@project = Project.new(params)
+ # If a project is newly created it should have shared runners settings
+ # based on its group having it enabled. This is like the "default value"
+ @project.shared_runners_enabled = false if !params.key?(:shared_runners_enabled) && @project.group && @project.group.shared_runners_setting != 'enabled'
+
# Make sure that the user is allowed to use the specified visibility level
if project_visibility.restricted?
deny_visibility_level(@project, project_visibility.visibility_level)
@@ -143,7 +147,7 @@ module Projects
def create_readme
commit_attrs = {
- branch_name: Gitlab::CurrentSettings.default_branch_name.presence || 'master',
+ branch_name: @project.default_branch || 'master',
commit_message: 'Initial commit',
file_path: 'README.md',
file_content: "# #{@project.name}\n\n#{@project.description}"
@@ -161,10 +165,9 @@ module Projects
@project.create_or_update_import_data(data: @import_data[:data], credentials: @import_data[:credentials]) if @import_data
if @project.save
- unless @project.gitlab_project_import?
- create_services_from_active_instances_or_templates(@project)
- @project.create_labels
- end
+ Service.create_from_active_default_integrations(@project, :project_id, with_templates: true)
+
+ @project.create_labels unless @project.gitlab_project_import?
unless @project.import?
raise 'Failed to create repository' unless @project.create_repository
@@ -228,15 +231,6 @@ module Projects
private
- # rubocop: disable CodeReuse/ActiveRecord
- def create_services_from_active_instances_or_templates(project)
- Service.active.where(instance: true).or(Service.active.where(template: true)).group_by(&:type).each do |type, records|
- service = records.find(&:instance?) || records.find(&:template?)
- Service.build_from_integration(project.id, service).save!
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
def project_namespace
@project_namespace ||= Namespace.find_by_id(@params[:namespace_id]) || current_user.namespace
end
diff --git a/app/services/projects/gitlab_projects_import_service.rb b/app/services/projects/gitlab_projects_import_service.rb
index 2e192942b9c..27cce15f97d 100644
--- a/app/services/projects/gitlab_projects_import_service.rb
+++ b/app/services/projects/gitlab_projects_import_service.rb
@@ -66,6 +66,7 @@ module Projects
end
if template_file
+ data[:sample_data] = params.delete(:sample_data) if params.key?(:sample_data)
params[:import_type] = 'gitlab_project'
end
diff --git a/app/services/projects/operations/update_service.rb b/app/services/projects/operations/update_service.rb
index 7af489c3751..7dfe7fffa1b 100644
--- a/app/services/projects/operations/update_service.rb
+++ b/app/services/projects/operations/update_service.rb
@@ -18,6 +18,7 @@ module Projects
.merge(grafana_integration_params)
.merge(prometheus_integration_params)
.merge(incident_management_setting_params)
+ .merge(tracing_setting_params)
end
def alerting_setting_params
@@ -121,6 +122,15 @@ module Projects
{ incident_management_setting_attributes: attrs }
end
+
+ def tracing_setting_params
+ attr = params[:tracing_setting_attributes]
+ return {} unless attr
+
+ destroy = attr[:external_url].blank?
+
+ { tracing_setting_attributes: attr.merge(_destroy: destroy) }
+ end
end
end
end
diff --git a/app/services/projects/overwrite_project_service.rb b/app/services/projects/overwrite_project_service.rb
index 958a00afbb8..e681b5643ee 100644
--- a/app/services/projects/overwrite_project_service.rb
+++ b/app/services/projects/overwrite_project_service.rb
@@ -32,12 +32,12 @@ module Projects
def move_before_destroy_relationships(source_project)
options = { remove_remaining_elements: false }
- ::Projects::MoveUsersStarProjectsService.new(@project, @current_user).execute(source_project, options)
- ::Projects::MoveAccessService.new(@project, @current_user).execute(source_project, options)
- ::Projects::MoveDeployKeysProjectsService.new(@project, @current_user).execute(source_project, options)
- ::Projects::MoveNotificationSettingsService.new(@project, @current_user).execute(source_project, options)
- ::Projects::MoveForksService.new(@project, @current_user).execute(source_project, options)
- ::Projects::MoveLfsObjectsProjectsService.new(@project, @current_user).execute(source_project, options)
+ ::Projects::MoveUsersStarProjectsService.new(@project, @current_user).execute(source_project, **options)
+ ::Projects::MoveAccessService.new(@project, @current_user).execute(source_project, **options)
+ ::Projects::MoveDeployKeysProjectsService.new(@project, @current_user).execute(source_project, **options)
+ ::Projects::MoveNotificationSettingsService.new(@project, @current_user).execute(source_project, **options)
+ ::Projects::MoveForksService.new(@project, @current_user).execute(source_project, **options)
+ ::Projects::MoveLfsObjectsProjectsService.new(@project, @current_user).execute(source_project, **options)
add_source_project_to_fork_network(source_project)
end
diff --git a/app/services/projects/prometheus/alerts/notify_service.rb b/app/services/projects/prometheus/alerts/notify_service.rb
index d32ead76d00..c002aca32db 100644
--- a/app/services/projects/prometheus/alerts/notify_service.rb
+++ b/app/services/projects/prometheus/alerts/notify_service.rb
@@ -125,7 +125,7 @@ module Projects
notification_service
.async
- .prometheus_alerts_fired(project, firings)
+ .prometheus_alerts_fired(project, alerts_attributes)
end
def process_prometheus_alerts
@@ -136,6 +136,18 @@ module Projects
end
end
+ def alerts_attributes
+ firings.map do |payload|
+ alert_params = Gitlab::AlertManagement::Payload.parse(
+ project,
+ payload,
+ monitoring_tool: Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus]
+ ).alert_params
+
+ AlertManagement::Alert.new(alert_params).attributes
+ end
+ end
+
def bad_request
ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
end
diff --git a/app/services/projects/transfer_service.rb b/app/services/projects/transfer_service.rb
index dba5177718d..013861631a1 100644
--- a/app/services/projects/transfer_service.rb
+++ b/app/services/projects/transfer_service.rb
@@ -88,6 +88,10 @@ module Projects
# Move uploads
move_project_uploads(project)
+ # If a project is being transferred to another group it means it can already
+ # have shared runners enabled but we need to check whether the new group allows that.
+ project.shared_runners_enabled = false if project.group && project.group.shared_runners_setting == 'disabled_and_unoverridable'
+
project.old_path_with_namespace = @old_path
update_repository_configuration(@new_path)
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index ea37f2e4ec0..64b9eca9014 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -97,6 +97,7 @@ module Projects
build.artifacts_file.use_file do |artifacts_path|
SafeZip::Extract.new(artifacts_path)
.extract(directories: [PUBLIC_DIR], to: temp_path)
+ create_pages_deployment(artifacts_path)
end
rescue SafeZip::Extract::Error => e
raise FailedToExtractError, e.message
@@ -118,6 +119,21 @@ module Projects
FileUtils.rm_r(previous_public_path, force: true)
end
+ def create_pages_deployment(artifacts_path)
+ return unless Feature.enabled?(:zip_pages_deployments, project)
+
+ File.open(artifacts_path) do |file|
+ deployment = project.pages_deployments.create!(file: file)
+ project.pages_metadatum.update!(pages_deployment: deployment)
+ end
+
+ # TODO: schedule old deployment removal https://gitlab.com/gitlab-org/gitlab/-/issues/235730
+ rescue => e
+ # we don't want to break current pages deployment process if something goes wrong
+ # TODO: remove this rescue as part of https://gitlab.com/gitlab-org/gitlab/-/issues/245308
+ Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
+ end
+
def latest?
# check if sha for the ref is still the most recent one
# this helps in case when multiple deployments happens
diff --git a/app/services/projects/update_remote_mirror_service.rb b/app/services/projects/update_remote_mirror_service.rb
index 5c41f00aac2..6115db54829 100644
--- a/app/services/projects/update_remote_mirror_service.rb
+++ b/app/services/projects/update_remote_mirror_service.rb
@@ -2,12 +2,14 @@
module Projects
class UpdateRemoteMirrorService < BaseService
+ include Gitlab::Utils::StrongMemoize
+
MAX_TRIES = 3
def execute(remote_mirror, tries)
return success unless remote_mirror.enabled?
- if Gitlab::UrlBlocker.blocked_url?(CGI.unescape(Gitlab::UrlSanitizer.sanitize(remote_mirror.url)))
+ if Gitlab::UrlBlocker.blocked_url?(normalized_url(remote_mirror.url))
return error("The remote mirror URL is invalid.")
end
@@ -27,6 +29,12 @@ module Projects
private
+ def normalized_url(url)
+ strong_memoize(:normalized_url) do
+ CGI.unescape(Gitlab::UrlSanitizer.sanitize(url))
+ end
+ end
+
def update_mirror(remote_mirror)
remote_mirror.update_start!
remote_mirror.ensure_remote!
@@ -47,7 +55,6 @@ module Projects
end
def send_lfs_objects!(remote_mirror)
- return unless Feature.enabled?(:push_mirror_syncs_lfs, project)
return unless project.lfs_enabled?
# TODO: Support LFS sync over SSH
diff --git a/app/services/quick_actions/target_service.rb b/app/services/quick_actions/target_service.rb
index 4273acfbf8b..a465632ccfb 100644
--- a/app/services/quick_actions/target_service.rb
+++ b/app/services/quick_actions/target_service.rb
@@ -17,12 +17,16 @@ module QuickActions
# rubocop: disable CodeReuse/ActiveRecord
def issue(type_id)
+ return project.issues.build if type_id.nil?
+
IssuesFinder.new(current_user, project_id: project.id).find_by(iid: type_id) || project.issues.build
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def merge_request(type_id)
+ return project.merge_requests.build if type_id.nil?
+
MergeRequestsFinder.new(current_user, project_id: project.id).find_by(iid: type_id) || project.merge_requests.build
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/releases/base_service.rb b/app/services/releases/base_service.rb
new file mode 100644
index 00000000000..15d040287a3
--- /dev/null
+++ b/app/services/releases/base_service.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+module Releases
+ class BaseService
+ include BaseServiceUtility
+ include Gitlab::Utils::StrongMemoize
+
+ attr_accessor :project, :current_user, :params
+
+ def initialize(project, user = nil, params = {})
+ @project, @current_user, @params = project, user, params.dup
+ end
+
+ delegate :repository, to: :project
+
+ def tag_name
+ params[:tag]
+ end
+
+ def ref
+ params[:ref]
+ end
+
+ def name
+ params[:name] || tag_name
+ end
+
+ def description
+ params[:description]
+ end
+
+ def released_at
+ params[:released_at]
+ end
+
+ def release
+ strong_memoize(:release) do
+ project.releases.find_by_tag(tag_name)
+ end
+ end
+
+ def existing_tag
+ strong_memoize(:existing_tag) do
+ repository.find_tag(tag_name)
+ end
+ end
+
+ def tag_exist?
+ existing_tag.present?
+ end
+
+ def repository
+ strong_memoize(:repository) do
+ project.repository
+ end
+ end
+
+ def milestones
+ return [] unless param_for_milestone_titles_provided?
+
+ strong_memoize(:milestones) do
+ MilestonesFinder.new(
+ project: project,
+ current_user: current_user,
+ project_ids: Array(project.id),
+ group_ids: Array(project_group_id),
+ state: 'all',
+ title: params[:milestones]
+ ).execute
+ end
+ end
+
+ def inexistent_milestones
+ return [] unless param_for_milestone_titles_provided?
+
+ existing_milestone_titles = milestones.map(&:title)
+ Array(params[:milestones]) - existing_milestone_titles
+ end
+
+ def param_for_milestone_titles_provided?
+ params.key?(:milestones)
+ end
+
+ # overridden in EE
+ def project_group_id; end
+ end
+end
+
+Releases::BaseService.prepend_if_ee('EE::Releases::BaseService')
diff --git a/app/services/releases/concerns.rb b/app/services/releases/concerns.rb
deleted file mode 100644
index a0ebaea77c8..00000000000
--- a/app/services/releases/concerns.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-module Releases
- module Concerns
- extend ActiveSupport::Concern
- include Gitlab::Utils::StrongMemoize
-
- included do
- def tag_name
- params[:tag]
- end
-
- def ref
- params[:ref]
- end
-
- def name
- params[:name] || tag_name
- end
-
- def description
- params[:description]
- end
-
- def released_at
- params[:released_at]
- end
-
- def release
- strong_memoize(:release) do
- project.releases.find_by_tag(tag_name)
- end
- end
-
- def existing_tag
- strong_memoize(:existing_tag) do
- repository.find_tag(tag_name)
- end
- end
-
- def tag_exist?
- existing_tag.present?
- end
-
- def repository
- strong_memoize(:repository) do
- project.repository
- end
- end
-
- def milestones
- return [] unless param_for_milestone_titles_provided?
-
- strong_memoize(:milestones) do
- MilestonesFinder.new(
- project: project,
- current_user: current_user,
- project_ids: Array(project.id),
- state: 'all',
- title: params[:milestones]
- ).execute
- end
- end
-
- def inexistent_milestones
- return [] unless param_for_milestone_titles_provided?
-
- existing_milestone_titles = milestones.map(&:title)
- Array(params[:milestones]) - existing_milestone_titles
- end
-
- def param_for_milestone_titles_provided?
- params.key?(:milestones)
- end
- end
- end
-end
diff --git a/app/services/releases/create_evidence_service.rb b/app/services/releases/create_evidence_service.rb
index 9c370722d2c..78b6d77c2cb 100644
--- a/app/services/releases/create_evidence_service.rb
+++ b/app/services/releases/create_evidence_service.rb
@@ -12,7 +12,7 @@ module Releases
summary = ::Evidences::EvidenceSerializer.new.represent(evidence, evidence_options) # rubocop: disable CodeReuse/Serializer
evidence.summary = summary
- # TODO: fix the sha generating https://gitlab.com/gitlab-org/gitlab/-/issues/209000
+ # TODO: fix the sha generation https://gitlab.com/groups/gitlab-org/-/epics/3683
evidence.summary_sha = Gitlab::CryptoHelper.sha256(summary)
evidence.save!
diff --git a/app/services/releases/create_service.rb b/app/services/releases/create_service.rb
index 92a0b875dd4..887c2d355ee 100644
--- a/app/services/releases/create_service.rb
+++ b/app/services/releases/create_service.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
module Releases
- class CreateService < BaseService
- include Releases::Concerns
-
+ class CreateService < Releases::BaseService
def execute
return error('Access Denied', 403) unless allowed?
return error('Release already exists', 409) if release
diff --git a/app/services/releases/destroy_service.rb b/app/services/releases/destroy_service.rb
index f9f6101abdd..8abf9308689 100644
--- a/app/services/releases/destroy_service.rb
+++ b/app/services/releases/destroy_service.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
module Releases
- class DestroyService < BaseService
- include Releases::Concerns
-
+ class DestroyService < Releases::BaseService
def execute
return error('Release does not exist', 404) unless release
return error('Access Denied', 403) unless allowed?
diff --git a/app/services/releases/update_service.rb b/app/services/releases/update_service.rb
index a452f7aa17a..4786d35f31e 100644
--- a/app/services/releases/update_service.rb
+++ b/app/services/releases/update_service.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
module Releases
- class UpdateService < BaseService
- include Releases::Concerns
-
+ class UpdateService < Releases::BaseService
def execute
return error('Tag does not exist', 404) unless existing_tag
return error('Release does not exist', 404) unless release
@@ -16,10 +14,18 @@ module Releases
params[:milestones] = milestones
end
- if release.update(params)
- success(tag: existing_tag, release: release, milestones_updated: milestones_updated?(previous_milestones))
- else
- error(release.errors.messages || '400 Bad request', 400)
+ # transaction needed as Rails applies `save!` to milestone_releases
+ # when it does assign_attributes instead of actual saving
+ # this leads to the validation error being raised
+ # see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/43385
+ ActiveRecord::Base.transaction do
+ if release.update(params)
+ success(tag: existing_tag, release: release, milestones_updated: milestones_updated?(previous_milestones))
+ else
+ error(release.errors.messages || '400 Bad request', 400)
+ end
+ rescue ActiveRecord::RecordInvalid => e
+ error(e.message || '400 Bad request', 400)
end
end
diff --git a/app/services/repository_archive_clean_up_service.rb b/app/services/repository_archive_clean_up_service.rb
index 99a9c834352..0fb7dfdb85f 100644
--- a/app/services/repository_archive_clean_up_service.rb
+++ b/app/services/repository_archive_clean_up_service.rb
@@ -1,8 +1,23 @@
# frozen_string_literal: true
+# RepositoryArchiveCleanUpService removes cached repository archives
+# that are generated on-the-fly by Gitaly. These files are stored in the
+# following form (as defined in lib/gitlab/git/repository.rb) and served
+# by GitLab Workhorse:
+#
+# /path/to/repository/downloads/project-N/sha/@v2/archive.format
+#
+# Legacy paths omit the @v2 prefix.
+#
+# For example:
+#
+# /var/opt/gitlab/gitlab-rails/shared/cache/archive/project-1/master/@v2/archive.zip
class RepositoryArchiveCleanUpService
LAST_MODIFIED_TIME_IN_MINUTES = 120
+ # For `/path/project-N/sha/@v2/archive.zip`, `find /path -maxdepth 4` will find this file
+ MAX_ARCHIVE_DEPTH = 4
+
attr_reader :mmin, :path
def initialize(mmin = LAST_MODIFIED_TIME_IN_MINUTES)
@@ -22,12 +37,15 @@ class RepositoryArchiveCleanUpService
private
def clean_up_old_archives
- run(%W(find #{path} -mindepth 1 -maxdepth 3 -type f \( -name \*.tar -o -name \*.bz2 -o -name \*.tar.gz -o -name \*.zip \) -mmin +#{mmin} -delete))
+ run(%W(find #{path} -mindepth 1 -maxdepth #{MAX_ARCHIVE_DEPTH} -type f \( -name \*.tar -o -name \*.bz2 -o -name \*.tar.gz -o -name \*.zip \) -mmin +#{mmin} -delete))
end
def clean_up_empty_directories
- run(%W(find #{path} -mindepth 2 -maxdepth 2 -type d -empty -delete))
- run(%W(find #{path} -mindepth 1 -maxdepth 1 -type d -empty -delete))
+ (1...MAX_ARCHIVE_DEPTH).reverse_each { |depth| clean_up_empty_directories_with_depth(depth) }
+ end
+
+ def clean_up_empty_directories_with_depth(depth)
+ run(%W(find #{path} -mindepth #{depth} -maxdepth #{depth} -type d -empty -delete))
end
def run(cmd)
diff --git a/app/services/resource_access_tokens/create_service.rb b/app/services/resource_access_tokens/create_service.rb
index c253154c1b7..cdeb57627a8 100644
--- a/app/services/resource_access_tokens/create_service.rb
+++ b/app/services/resource_access_tokens/create_service.rb
@@ -10,7 +10,6 @@ module ResourceAccessTokens
end
def execute
- return unless feature_enabled?
return error("User does not have permission to create #{resource_type} Access Token") unless has_permission_to_create?
user = create_user
@@ -31,21 +30,8 @@ module ResourceAccessTokens
attr_reader :resource_type, :resource
- def feature_enabled?
- return false if ::Gitlab.com?
-
- ::Feature.enabled?(:resource_access_token, resource, default_enabled: true)
- end
-
def has_permission_to_create?
- case resource_type
- when 'project'
- can?(current_user, :admin_project, resource)
- when 'group'
- can?(current_user, :admin_group, resource)
- else
- false
- end
+ %w(project group).include?(resource_type) && can?(current_user, :admin_resource_access_tokens, resource)
end
def create_user
@@ -103,7 +89,7 @@ module ResourceAccessTokens
end
def provision_access(resource, user)
- resource.add_maintainer(user)
+ resource.add_user(user, :maintainer, expires_at: params[:expires_at])
end
def error(message)
diff --git a/app/services/resource_access_tokens/revoke_service.rb b/app/services/resource_access_tokens/revoke_service.rb
index efeb0bfb8d5..ece928dac31 100644
--- a/app/services/resource_access_tokens/revoke_service.rb
+++ b/app/services/resource_access_tokens/revoke_service.rb
@@ -14,18 +14,15 @@ module ResourceAccessTokens
end
def execute
+ return error("#{current_user.name} cannot delete #{bot_user.name}") unless can_destroy_bot_member?
return error("Failed to find bot user") unless find_member
- PersonalAccessToken.transaction do
- access_token.revoke!
+ access_token.revoke!
- raise RevokeAccessTokenError, "Failed to remove #{bot_user.name} member from: #{resource.name}" unless remove_member
+ destroy_bot_user
- raise RevokeAccessTokenError, "Migration to ghost user failed" unless migrate_to_ghost_user
- end
-
- success("Revoked access token: #{access_token.name}")
- rescue ActiveRecord::ActiveRecordError, RevokeAccessTokenError => error
+ success("Access token #{access_token.name} has been revoked and the bot user has been scheduled for deletion.")
+ rescue StandardError => error
log_error("Failed to revoke access token for #{bot_user.name}: #{error.message}")
error(error.message)
end
@@ -34,12 +31,18 @@ module ResourceAccessTokens
attr_reader :current_user, :access_token, :bot_user, :resource
- def remove_member
- ::Members::DestroyService.new(current_user).execute(find_member, destroy_bot: true)
+ def destroy_bot_user
+ DeleteUserWorker.perform_async(current_user.id, bot_user.id, skip_authorization: true)
end
- def migrate_to_ghost_user
- ::Users::MigrateToGhostUserService.new(bot_user).execute
+ def can_destroy_bot_member?
+ if resource.is_a?(Project)
+ can?(current_user, :admin_project_member, @resource)
+ elsif resource.is_a?(Group)
+ can?(current_user, :admin_group_member, @resource)
+ else
+ false
+ end
end
def find_member
diff --git a/app/services/search/global_service.rb b/app/services/search/global_service.rb
index fab02697cf0..5f80b07aa59 100644
--- a/app/services/search/global_service.rb
+++ b/app/services/search/global_service.rb
@@ -4,6 +4,8 @@ module Search
class GlobalService
include Gitlab::Utils::StrongMemoize
+ ALLOWED_SCOPES = %w(issues merge_requests milestones users).freeze
+
attr_accessor :current_user, :params
def initialize(user, params)
@@ -14,7 +16,8 @@ module Search
Gitlab::SearchResults.new(current_user,
params[:search],
projects,
- filters: { state: params[:state] })
+ sort: params[:sort],
+ filters: { state: params[:state], confidential: params[:confidential] })
end
def projects
@@ -22,10 +25,7 @@ module Search
end
def allowed_scopes
- strong_memoize(:allowed_scopes) do
- allowed_scopes = %w[issues merge_requests milestones]
- allowed_scopes << 'users' if Feature.enabled?(:users_search, default_enabled: true)
- end
+ ALLOWED_SCOPES
end
def scope
diff --git a/app/services/search/group_service.rb b/app/services/search/group_service.rb
index 68778aa2768..e17522dcd68 100644
--- a/app/services/search/group_service.rb
+++ b/app/services/search/group_service.rb
@@ -16,7 +16,8 @@ module Search
params[:search],
projects,
group: group,
- filters: { state: params[:state] }
+ sort: params[:sort],
+ filters: { state: params[:state], confidential: params[:confidential] }
)
end
diff --git a/app/services/search/project_service.rb b/app/services/search/project_service.rb
index 5eba909c23b..d32534303be 100644
--- a/app/services/search/project_service.rb
+++ b/app/services/search/project_service.rb
@@ -2,6 +2,10 @@
module Search
class ProjectService
+ include Gitlab::Utils::StrongMemoize
+
+ ALLOWED_SCOPES = %w(notes issues merge_requests milestones wiki_blobs commits users).freeze
+
attr_accessor :project, :current_user, :params
def initialize(project, user, params)
@@ -13,15 +17,18 @@ module Search
params[:search],
project: project,
repository_ref: params[:repository_ref],
- filters: { state: params[:state] })
+ sort: params[:sort],
+ filters: { confidential: params[:confidential], state: params[:state] }
+ )
end
- def scope
- @scope ||= begin
- allowed_scopes = %w[notes issues merge_requests milestones wiki_blobs commits]
- allowed_scopes << 'users' if Feature.enabled?(:users_search, default_enabled: true)
+ def allowed_scopes
+ ALLOWED_SCOPES
+ end
- allowed_scopes.delete(params[:scope]) { 'blobs' }
+ def scope
+ strong_memoize(:scope) do
+ allowed_scopes.include?(params[:scope]) ? params[:scope] : 'blobs'
end
end
end
diff --git a/app/services/search_service.rb b/app/services/search_service.rb
index 278cf389e07..3ccd67c8d30 100644
--- a/app/services/search_service.rb
+++ b/app/services/search_service.rb
@@ -65,6 +65,10 @@ class SearchService
@search_objects ||= redact_unauthorized_results(search_results.objects(scope, page: params[:page], per_page: per_page, preload_method: preload_method))
end
+ def search_highlight
+ search_results.highlight_map(scope)
+ end
+
private
def per_page
diff --git a/app/services/snippets/base_service.rb b/app/services/snippets/base_service.rb
index 53a04e5a398..278857b7933 100644
--- a/app/services/snippets/base_service.rb
+++ b/app/services/snippets/base_service.rb
@@ -4,6 +4,9 @@ module Snippets
class BaseService < ::BaseService
include SpamCheckMethods
+ UPDATE_COMMIT_MSG = 'Update snippet'
+ INITIAL_COMMIT_MSG = 'Initial commit'
+
CreateRepositoryError = Class.new(StandardError)
attr_reader :uploaded_assets, :snippet_actions
@@ -85,5 +88,20 @@ module Snippets
def restricted_files_actions
nil
end
+
+ def commit_attrs(snippet, msg)
+ {
+ branch_name: snippet.default_branch,
+ message: msg
+ }
+ end
+
+ def delete_repository(snippet)
+ snippet.repository.remove
+ snippet.snippet_repository&.delete
+
+ # Purge any existing value for repository_exists?
+ snippet.repository.expire_exists_cache
+ end
end
end
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
index 5c9b2eb1aea..d7181883c39 100644
--- a/app/services/snippets/create_service.rb
+++ b/app/services/snippets/create_service.rb
@@ -59,7 +59,7 @@ module Snippets
log_error(e.message)
# If the commit action failed we need to remove the repository if exists
- @snippet.repository.remove if @snippet.repository_exists?
+ delete_repository(@snippet) if @snippet.repository_exists?
# If the snippet was created, we need to remove it as we
# would do like if it had had any validation error
@@ -81,12 +81,9 @@ module Snippets
end
def create_commit
- commit_attrs = {
- branch_name: @snippet.default_branch,
- message: 'Initial commit'
- }
+ attrs = commit_attrs(@snippet, INITIAL_COMMIT_MSG)
- @snippet.snippet_repository.multi_files_action(current_user, files_to_commit(@snippet), commit_attrs)
+ @snippet.snippet_repository.multi_files_action(current_user, files_to_commit(@snippet), **attrs)
end
def move_temporary_files
diff --git a/app/services/snippets/repository_validation_service.rb b/app/services/snippets/repository_validation_service.rb
index 5bf5e692ef4..7e9b2eded16 100644
--- a/app/services/snippets/repository_validation_service.rb
+++ b/app/services/snippets/repository_validation_service.rb
@@ -52,7 +52,7 @@ module Snippets
def check_file_count!
file_count = repository.ls_files(snippet.default_branch).size
- limit = Snippet.max_file_limit(current_user)
+ limit = Snippet.max_file_limit
if file_count > limit
raise RepositoryValidationError, _('Repository files count over the limit')
diff --git a/app/services/snippets/update_service.rb b/app/services/snippets/update_service.rb
index a0e9ab6ffda..0115cd19287 100644
--- a/app/services/snippets/update_service.rb
+++ b/app/services/snippets/update_service.rb
@@ -37,7 +37,10 @@ module Snippets
# is implemented.
# Once we can perform different operations through this service
# we won't need to keep track of the `content` and `file_name` fields
- if snippet_actions.any?
+ #
+ # If the repository does not exist we don't need to update `params`
+ # because we need to commit the information from the database
+ if snippet_actions.any? && snippet.repository_exists?
params[:content] = snippet_actions[0].content if snippet_actions[0].content
params[:file_name] = snippet_actions[0].file_path
end
@@ -52,7 +55,11 @@ module Snippets
# the repository we can just return
return true unless committable_attributes?
- create_repository_for(snippet)
+ unless snippet.repository_exists?
+ create_repository_for(snippet)
+ create_first_commit_using_db_data(snippet)
+ end
+
create_commit(snippet)
true
@@ -72,13 +79,7 @@ module Snippets
# If the commit action failed we remove it because
# we don't want to leave empty repositories
# around, to allow cloning them.
- if repository_empty?(snippet)
- snippet.repository.remove
- snippet.snippet_repository&.delete
- end
-
- # Purge any existing value for repository_exists?
- snippet.repository.expire_exists_cache
+ delete_repository(snippet) if repository_empty?(snippet)
false
end
@@ -89,15 +90,25 @@ module Snippets
raise CreateRepositoryError, 'Repository could not be created' unless snippet.repository_exists?
end
+ # If the user provides `snippet_actions` and the repository
+ # does not exist, we need to commit first the snippet info stored
+ # in the database. Mostly because the content inside `snippet_actions`
+ # would assume that the file is already in the repository.
+ def create_first_commit_using_db_data(snippet)
+ return if snippet_actions.empty?
+
+ attrs = commit_attrs(snippet, INITIAL_COMMIT_MSG)
+ actions = [{ file_path: snippet.file_name, content: snippet.content }]
+
+ snippet.snippet_repository.multi_files_action(current_user, actions, **attrs)
+ end
+
def create_commit(snippet)
raise UpdateError unless snippet.snippet_repository
- commit_attrs = {
- branch_name: snippet.default_branch,
- message: 'Update snippet'
- }
+ attrs = commit_attrs(snippet, UPDATE_COMMIT_MSG)
- snippet.snippet_repository.multi_files_action(current_user, files_to_commit(snippet), commit_attrs)
+ snippet.snippet_repository.multi_files_action(current_user, files_to_commit(snippet), **attrs)
end
# Because we are removing repositories we don't want to remove
diff --git a/app/services/spam/spam_action_service.rb b/app/services/spam/spam_action_service.rb
index b745b67f566..b3d617256ff 100644
--- a/app/services/spam/spam_action_service.rb
+++ b/app/services/spam/spam_action_service.rb
@@ -45,7 +45,7 @@ module Spam
attr_reader :user, :context
def allowlisted?(user)
- user.respond_to?(:gitlab_employee) && user.gitlab_employee?
+ user.try(:gitlab_employee?) || user.try(:gitlab_bot?) || user.try(:gitlab_service_user?)
end
def perform_spam_service_check(api)
diff --git a/app/services/static_site_editor/config_service.rb b/app/services/static_site_editor/config_service.rb
index 987ee071976..7b3115468a5 100644
--- a/app/services/static_site_editor/config_service.rb
+++ b/app/services/static_site_editor/config_service.rb
@@ -4,18 +4,38 @@ module StaticSiteEditor
class ConfigService < ::BaseContainerService
ValidationError = Class.new(StandardError)
- def execute
+ def initialize(container:, current_user: nil, params: {})
+ super
+
@project = container
+ @repository = project.repository
+ @ref = params.fetch(:ref)
+ end
+
+ def execute
check_access!
+ file_config = load_file_config!
+ file_data = file_config.to_hash_with_defaults
+ generated_data = load_generated_config.data
+
+ check_for_duplicate_keys!(generated_data, file_data)
+ data = merged_data(generated_data, file_data)
+
ServiceResponse.success(payload: data)
rescue ValidationError => e
ServiceResponse.error(message: e.message)
+ rescue => e
+ Gitlab::ErrorTracking.track_and_raise_exception(e)
end
private
- attr_reader :project
+ attr_reader :project, :repository, :ref
+
+ def static_site_editor_config_file
+ '.gitlab/static-site-editor.yml'
+ end
def check_access!
unless can?(current_user, :download_code, project)
@@ -23,27 +43,43 @@ module StaticSiteEditor
end
end
- def data
- check_for_duplicate_keys!
- generated_data.merge(file_data)
+ def load_file_config!
+ yaml = yaml_from_repo.presence || '{}'
+ file_config = Gitlab::StaticSiteEditor::Config::FileConfig.new(yaml)
+
+ unless file_config.valid?
+ raise ValidationError, file_config.errors.first
+ end
+
+ file_config
+ rescue Gitlab::StaticSiteEditor::Config::FileConfig::ConfigError => e
+ raise ValidationError, e.message
end
- def generated_data
- @generated_data ||= Gitlab::StaticSiteEditor::Config::GeneratedConfig.new(
- project.repository,
- params.fetch(:ref),
+ def load_generated_config
+ Gitlab::StaticSiteEditor::Config::GeneratedConfig.new(
+ repository,
+ ref,
params.fetch(:path),
params[:return_url]
- ).data
- end
-
- def file_data
- @file_data ||= Gitlab::StaticSiteEditor::Config::FileConfig.new.data
+ )
end
- def check_for_duplicate_keys!
+ def check_for_duplicate_keys!(generated_data, file_data)
duplicate_keys = generated_data.keys & file_data.keys
raise ValidationError.new("Duplicate key(s) '#{duplicate_keys}' found.") if duplicate_keys.present?
end
+
+ def merged_data(generated_data, file_data)
+ generated_data.merge(file_data)
+ end
+
+ def yaml_from_repo
+ repository.blob_data_at(ref, static_site_editor_config_file)
+ rescue GRPC::NotFound
+ # Return nil in the case of a GRPC::NotFound exception, so the default config will be used.
+ # Allow any other unexpected exception will be tracked and re-raised.
+ nil
+ end
end
end
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index df042fdc393..1a4374f2e94 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -41,6 +41,10 @@ module SystemNoteService
::SystemNotes::IssuablesService.new(noteable: issuable, project: project, author: author).change_issuable_assignees(old_assignees)
end
+ def change_issuable_reviewers(issuable, project, author, old_reviewers)
+ ::SystemNotes::IssuablesService.new(noteable: issuable, project: project, author: author).change_issuable_reviewers(old_reviewers)
+ end
+
def relate_issue(noteable, noteable_ref, user)
::SystemNotes::IssuablesService.new(noteable: noteable, project: noteable.project, author: user).relate_issue(noteable_ref)
end
@@ -308,6 +312,10 @@ module SystemNoteService
::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project).create_new_alert(monitoring_tool)
end
+ def change_incident_severity(incident, author)
+ ::SystemNotes::IncidentService.new(noteable: incident, project: incident.project, author: author).change_incident_severity
+ end
+
private
def merge_requests_service(noteable, project, author)
diff --git a/app/services/system_notes/incident_service.rb b/app/services/system_notes/incident_service.rb
new file mode 100644
index 00000000000..4628662f0e9
--- /dev/null
+++ b/app/services/system_notes/incident_service.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module SystemNotes
+ class IncidentService < ::SystemNotes::BaseService
+ # Called when the severity of an Incident has changed
+ #
+ # Example Note text:
+ #
+ # "changed the severity to Medium - S3"
+ #
+ # Returns the created Note object
+ def change_incident_severity
+ severity = noteable.severity
+
+ if severity_label = IssuableSeverity::SEVERITY_LABELS[severity.to_sym]
+ body = "changed the severity to **#{severity_label}**"
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'severity'))
+ else
+ Gitlab::AppLogger.error(
+ message: 'Cannot create a system note for severity change',
+ noteable_class: noteable.class.to_s,
+ noteable_id: noteable.id,
+ severity: severity
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index 2252503d97e..7a73af0a81a 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -13,6 +13,8 @@ module SystemNotes
def relate_issue(noteable_ref)
body = "marked this issue as related to #{noteable_ref.to_reference(noteable.project)}"
+ issue_activity_counter.track_issue_related_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'relate'))
end
@@ -27,6 +29,8 @@ module SystemNotes
def unrelate_issue(noteable_ref)
body = "removed the relation with #{noteable_ref.to_reference(noteable.project)}"
+ issue_activity_counter.track_issue_unrelated_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'unrelate'))
end
@@ -81,6 +85,32 @@ module SystemNotes
create_note(NoteSummary.new(noteable, project, author, body, action: 'assignee'))
end
+ # Called when the reviewers of an issuable is changed or removed
+ #
+ # reviewers - Users being requested to review, or nil
+ #
+ # Example Note text:
+ #
+ # "requested review from @user1 and @user2"
+ #
+ # "requested review from @user1, @user2 and @user3 and removed review request for @user4 and @user5"
+ #
+ # Returns the created Note object
+ def change_issuable_reviewers(old_reviewers)
+ unassigned_users = old_reviewers - noteable.reviewers
+ added_users = noteable.reviewers - old_reviewers
+ text_parts = []
+
+ Gitlab::I18n.with_default_locale do
+ text_parts << "requested review from #{added_users.map(&:to_reference).to_sentence}" if added_users.any?
+ text_parts << "removed review request for #{unassigned_users.map(&:to_reference).to_sentence}" if unassigned_users.any?
+ end
+
+ body = text_parts.join(' and ')
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'reviewer'))
+ end
+
# Called when the title of a Noteable is changed
#
# old_title - Previous String title
@@ -148,6 +178,8 @@ module SystemNotes
if noteable.is_a?(ExternalIssue)
noteable.project.external_issue_tracker.create_cross_reference_note(noteable, mentioner, author)
else
+ issue_activity_counter.track_issue_cross_referenced_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, noteable.project, author, body, action: 'cross_reference'))
end
end
@@ -182,6 +214,8 @@ module SystemNotes
status_label = new_task.complete? ? Taskable::COMPLETED : Taskable::INCOMPLETE
body = "marked the task **#{new_task.source}** as #{status_label}"
+ issue_activity_counter.track_issue_description_changed_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'task'))
end
@@ -203,6 +237,8 @@ module SystemNotes
cross_reference = noteable_ref.to_reference(project)
body = "moved #{direction} #{cross_reference}"
+ issue_activity_counter.track_issue_moved_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'moved'))
end
@@ -242,19 +278,7 @@ module SystemNotes
#
# Returns the created Note object
def change_status(status, source = nil)
- body = status.dup
- body << " via #{source.gfm_reference(project)}" if source
-
- action = status == 'reopened' ? 'opened' : status
-
- # A state event which results in a synthetic note will be
- # created by EventCreateService if change event tracking
- # is enabled.
- if state_change_tracking_enabled?
- create_resource_state_event(status: status, mentionable_source: source)
- else
- create_note(NoteSummary.new(noteable, project, author, body, action: action))
- end
+ create_resource_state_event(status: status, mentionable_source: source)
end
# Check if a cross reference to a noteable from a mentioner already exists
@@ -285,6 +309,9 @@ module SystemNotes
# Returns the created Note object
def mark_duplicate_issue(canonical_issue)
body = "marked this issue as a duplicate of #{canonical_issue.to_reference(project)}"
+
+ issue_activity_counter.track_issue_marked_as_duplicate_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'duplicate'))
end
@@ -308,27 +335,23 @@ module SystemNotes
action = noteable.discussion_locked? ? 'locked' : 'unlocked'
body = "#{action} this #{noteable.class.to_s.titleize.downcase}"
+ if noteable.is_a?(Issue)
+ if action == 'locked'
+ issue_activity_counter.track_issue_locked_action(author: author)
+ else
+ issue_activity_counter.track_issue_unlocked_action(author: author)
+ end
+ end
+
create_note(NoteSummary.new(noteable, project, author, body, action: action))
end
def close_after_error_tracking_resolve
- if state_change_tracking_enabled?
- create_resource_state_event(status: 'closed', close_after_error_tracking_resolve: true)
- else
- body = 'resolved the corresponding error and closed the issue.'
-
- create_note(NoteSummary.new(noteable, project, author, body, action: 'closed'))
- end
+ create_resource_state_event(status: 'closed', close_after_error_tracking_resolve: true)
end
def auto_resolve_prometheus_alert
- if state_change_tracking_enabled?
- create_resource_state_event(status: 'closed', close_auto_resolve_prometheus_alert: true)
- else
- body = 'automatically closed this issue because the alert resolved.'
-
- create_note(NoteSummary.new(noteable, project, author, body, action: 'closed'))
- end
+ create_resource_state_event(status: 'closed', close_auto_resolve_prometheus_alert: true)
end
private
@@ -361,11 +384,6 @@ module SystemNotes
.execute(params)
end
- def state_change_tracking_enabled?
- noteable.respond_to?(:resource_state_events) &&
- ::Feature.enabled?(:track_resource_state_change_events, noteable.project, default_enabled: true)
- end
-
def issue_activity_counter
Gitlab::UsageDataCounters::IssueActivityUniqueCounter
end
diff --git a/app/services/system_notes/time_tracking_service.rb b/app/services/system_notes/time_tracking_service.rb
index 8de42bd3225..650e40680b1 100644
--- a/app/services/system_notes/time_tracking_service.rb
+++ b/app/services/system_notes/time_tracking_service.rb
@@ -16,6 +16,8 @@ module SystemNotes
def change_due_date(due_date)
body = due_date ? "changed due date to #{due_date.to_s(:long)}" : 'removed due date'
+ issue_activity_counter.track_issue_due_date_changed_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'due_date'))
end
@@ -38,6 +40,8 @@ module SystemNotes
"changed time estimate to #{parsed_time}"
end
+ issue_activity_counter.track_issue_time_estimate_changed_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
end
@@ -67,7 +71,15 @@ module SystemNotes
body = text_parts.join(' ')
end
+ issue_activity_counter.track_issue_time_spent_changed_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
end
+
+ private
+
+ def issue_activity_counter
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter
+ end
end
end
diff --git a/app/services/todos/destroy/entity_leave_service.rb b/app/services/todos/destroy/entity_leave_service.rb
index 0c0548a17a1..97c56b84434 100644
--- a/app/services/todos/destroy/entity_leave_service.rb
+++ b/app/services/todos/destroy/entity_leave_service.rb
@@ -7,16 +7,14 @@ module Todos
attr_reader :user, :entity
- # rubocop: disable CodeReuse/ActiveRecord
def initialize(user_id, entity_id, entity_type)
unless %w(Group Project).include?(entity_type)
raise ArgumentError.new("#{entity_type} is not an entity user can leave")
end
- @user = User.find_by(id: user_id)
- @entity = entity_type.constantize.find_by(id: entity_id)
+ @user = UserFinder.new(user_id).find_by_id
+ @entity = entity_type.constantize.find_by(id: entity_id) # rubocop: disable CodeReuse/ActiveRecord
end
- # rubocop: enable CodeReuse/ActiveRecord
def execute
return unless entity && user
@@ -42,34 +40,37 @@ module Todos
end
end
- # rubocop: disable CodeReuse/ActiveRecord
def remove_confidential_issue_todos
- Todo.where(
- target_id: confidential_issues.select(:id), target_type: Issue.name, user_id: user.id
- ).delete_all
+ Todo
+ .for_target(confidential_issues.select(:id))
+ .for_type(Issue.name)
+ .for_user(user)
+ .delete_all
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def remove_project_todos
# Issues are viewable by guests (even in private projects), so remove those todos
# from projects without guest access
- Todo.where(project_id: non_authorized_guest_projects, user_id: user.id)
+ Todo
+ .for_project(non_authorized_guest_projects)
+ .for_user(user)
.delete_all
# MRs require reporter access, so remove those todos that are not authorized
- Todo.where(project_id: non_authorized_reporter_projects, target_type: MergeRequest.name, user_id: user.id)
+ Todo
+ .for_project(non_authorized_reporter_projects)
+ .for_type(MergeRequest.name)
+ .for_user(user)
.delete_all
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def remove_group_todos
- Todo.where(group_id: non_authorized_groups, user_id: user.id).delete_all
+ Todo
+ .for_group(non_authorized_groups)
+ .for_user(user)
+ .delete_all
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def projects
condition = case entity
when Project
@@ -78,55 +79,40 @@ module Todos
{ namespace_id: non_authorized_reporter_groups }
end
- Project.where(condition)
+ Project.where(condition) # rubocop: disable CodeReuse/ActiveRecord
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def authorized_reporter_projects
user.authorized_projects(Gitlab::Access::REPORTER).select(:id)
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def authorized_guest_projects
user.authorized_projects(Gitlab::Access::GUEST).select(:id)
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def non_authorized_reporter_projects
- projects.where('id NOT IN (?)', authorized_reporter_projects)
+ projects.id_not_in(authorized_reporter_projects)
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def non_authorized_guest_projects
- projects.where('id NOT IN (?)', authorized_guest_projects)
+ projects.id_not_in(authorized_guest_projects)
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def authorized_reporter_groups
GroupsFinder.new(user, min_access_level: Gitlab::Access::REPORTER).execute.select(:id)
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def non_authorized_groups
return [] unless entity.is_a?(Namespace)
entity.self_and_descendants.select(:id)
- .where('id NOT IN (?)', GroupsFinder.new(user).execute.select(:id))
+ .id_not_in(GroupsFinder.new(user).execute.select(:id))
end
- # rubocop: enable CodeReuse/ActiveRecord
- # rubocop: disable CodeReuse/ActiveRecord
def non_authorized_reporter_groups
entity.self_and_descendants.select(:id)
- .where('id NOT IN (?)', authorized_reporter_groups)
+ .id_not_in(authorized_reporter_groups)
end
- # rubocop: enable CodeReuse/ActiveRecord
def user_has_reporter_access?
return unless entity.is_a?(Namespace)
@@ -134,16 +120,16 @@ module Todos
entity.member?(User.find(user.id), Gitlab::Access::REPORTER)
end
- # rubocop: disable CodeReuse/ActiveRecord
def confidential_issues
- assigned_ids = IssueAssignee.select(:issue_id).where(user_id: user.id)
+ assigned_ids = IssueAssignee.select(:issue_id).for_assignee(user)
- Issue.where(project_id: projects, confidential: true)
- .where('project_id NOT IN(?)', authorized_reporter_projects)
- .where('author_id != ?', user.id)
- .where('id NOT IN (?)', assigned_ids)
+ Issue
+ .in_projects(projects)
+ .confidential_only
+ .not_in_projects(authorized_reporter_projects)
+ .not_authored_by(user)
+ .id_not_in(assigned_ids)
end
- # rubocop: enable CodeReuse/ActiveRecord
end
end
end
diff --git a/app/services/users/approve_service.rb b/app/services/users/approve_service.rb
new file mode 100644
index 00000000000..228cfbd6947
--- /dev/null
+++ b/app/services/users/approve_service.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Users
+ class ApproveService < BaseService
+ def initialize(current_user)
+ @current_user = current_user
+ end
+
+ def execute(user)
+ return error(_('You are not allowed to approve a user')) unless allowed?
+ return error(_('The user you are trying to approve is not pending an approval')) unless approval_required?(user)
+
+ if user.activate
+ # Resends confirmation email if the user isn't confirmed yet.
+ # Please see Devise's implementation of `resend_confirmation_instructions` for detail.
+ user.resend_confirmation_instructions
+ user.accept_pending_invitations! if user.active_for_authentication?
+
+ success
+ else
+ error(user.errors.full_messages.uniq.join('. '))
+ end
+ end
+
+ private
+
+ attr_reader :current_user
+
+ def allowed?
+ can?(current_user, :approve_user)
+ end
+
+ def approval_required?(user)
+ user.blocked_pending_approval?
+ end
+ end
+end
diff --git a/app/services/users/block_service.rb b/app/services/users/block_service.rb
index 041db731875..8513664ee85 100644
--- a/app/services/users/block_service.rb
+++ b/app/services/users/block_service.rb
@@ -7,6 +7,8 @@ module Users
end
def execute(user)
+ return error('An internal user cannot be blocked', 403) if user.internal?
+
if user.block
after_block_hook(user)
success
diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb
index 2fc46f033dd..e3f02bf85f0 100644
--- a/app/services/users/build_service.rb
+++ b/app/services/users/build_service.rb
@@ -104,7 +104,6 @@ module Users
def build_user_params(skip_authorization:)
if current_user&.admin?
user_params = params.slice(*admin_create_params)
- user_params[:created_by_id] = current_user&.id
if params[:reset_password]
user_params.merge!(force_random_password: true, password_expires_at: nil)
@@ -125,6 +124,8 @@ module Users
end
end
+ user_params[:created_by_id] = current_user&.id
+
if user_default_internal_regex_enabled? && !user_params.key?(:external)
user_params[:external] = user_external?
end
diff --git a/app/services/users/destroy_service.rb b/app/services/users/destroy_service.rb
index 436d4fb3985..613d2e4ad82 100644
--- a/app/services/users/destroy_service.rb
+++ b/app/services/users/destroy_service.rb
@@ -26,7 +26,7 @@ module Users
def execute(user, options = {})
delete_solo_owned_groups = options.fetch(:delete_solo_owned_groups, options[:hard_delete])
- unless Ability.allowed?(current_user, :destroy_user, user)
+ unless Ability.allowed?(current_user, :destroy_user, user) || options[:skip_authorization]
raise Gitlab::Access::AccessDeniedError, "#{current_user} tried to destroy user #{user}!"
end
diff --git a/app/services/users/validate_otp_service.rb b/app/services/users/validate_otp_service.rb
new file mode 100644
index 00000000000..a9ce7959aea
--- /dev/null
+++ b/app/services/users/validate_otp_service.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Users
+ class ValidateOtpService < BaseService
+ def initialize(current_user)
+ @current_user = current_user
+ @strategy = if Feature.enabled?(:forti_authenticator, current_user)
+ ::Gitlab::Auth::Otp::Strategies::FortiAuthenticator.new(current_user)
+ else
+ ::Gitlab::Auth::Otp::Strategies::Devise.new(current_user)
+ end
+ end
+
+ def execute(otp_code)
+ strategy.validate(otp_code)
+ rescue StandardError => ex
+ Gitlab::ErrorTracking.log_exception(ex)
+ error(message: ex.message)
+ end
+
+ private
+
+ attr_reader :strategy
+ end
+end
diff --git a/app/services/web_hooks/destroy_service.rb b/app/services/web_hooks/destroy_service.rb
new file mode 100644
index 00000000000..58117985b56
--- /dev/null
+++ b/app/services/web_hooks/destroy_service.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+module WebHooks
+ class DestroyService
+ include BaseServiceUtility
+
+ BATCH_SIZE = 1000
+ LOG_COUNT_THRESHOLD = 10000
+
+ DestroyError = Class.new(StandardError)
+
+ attr_accessor :current_user, :web_hook
+
+ def initialize(current_user)
+ @current_user = current_user
+ end
+
+ def execute(web_hook)
+ @web_hook = web_hook
+
+ async = false
+ # For a better user experience, it's better if the Web hook is
+ # destroyed right away without waiting for Sidekiq. However, if
+ # there are a lot of Web hook logs, we will need more time to
+ # clean them up, so schedule a Sidekiq job to do this.
+ if needs_async_destroy?
+ Gitlab::AppLogger.info("User #{current_user&.id} scheduled a deletion of hook ID #{web_hook.id}")
+ async_destroy(web_hook)
+ async = true
+ else
+ sync_destroy(web_hook)
+ end
+
+ success({ async: async })
+ end
+
+ def sync_destroy(web_hook)
+ @web_hook = web_hook
+
+ delete_web_hook_logs
+ result = web_hook.destroy
+
+ if result
+ success({ async: false })
+ else
+ error("Unable to destroy #{web_hook.model_name.human}")
+ end
+ end
+
+ private
+
+ def async_destroy(web_hook)
+ WebHooks::DestroyWorker.perform_async(current_user.id, web_hook.id)
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def needs_async_destroy?
+ web_hook.web_hook_logs.limit(LOG_COUNT_THRESHOLD).count == LOG_COUNT_THRESHOLD
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def delete_web_hook_logs
+ loop do
+ count = delete_web_hook_logs_in_batches
+ break if count < BATCH_SIZE
+ end
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def delete_web_hook_logs_in_batches
+ # We can't use EachBatch because that does an ORDER BY id, which can
+ # easily time out. We don't actually care about ordering when
+ # we are deleting these rows.
+ web_hook.web_hook_logs.limit(BATCH_SIZE).delete_all
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+end
diff --git a/app/services/webauthn/authenticate_service.rb b/app/services/webauthn/authenticate_service.rb
index a4513c62c2d..a575a853995 100644
--- a/app/services/webauthn/authenticate_service.rb
+++ b/app/services/webauthn/authenticate_service.rb
@@ -11,12 +11,6 @@ module Webauthn
def execute
parsed_device_response = Gitlab::Json.parse(@device_response)
- # appid is set for legacy U2F devices, will be used in a future iteration
- # rp_id = @app_id
- # unless parsed_device_response['clientExtensionResults'] && parsed_device_response['clientExtensionResults']['appid']
- # rp_id = URI(@app_id).host
- # end
-
webauthn_credential = WebAuthn::Credential.from_get(parsed_device_response)
encoded_raw_id = Base64.strict_encode64(webauthn_credential.raw_id)
stored_webauthn_credential = @user.webauthn_registrations.find_by_credential_xid(encoded_raw_id)
@@ -52,10 +46,14 @@ module Webauthn
# Verifies that webauthn_credential matches stored_credential with the given challenge
#
def verify_webauthn_credential(webauthn_credential, stored_credential, challenge, encoder)
+ # We need to adjust the relaying party id (RP id) we verify against if the registration in question
+ # is a migrated U2F registration. This is beacuse the appid of U2F and the rp id of WebAuthn differ.
+ rp_id = webauthn_credential.client_extension_outputs['appid'] ? WebAuthn.configuration.origin : URI(WebAuthn.configuration.origin).host
webauthn_credential.response.verify(
encoder.decode(challenge),
public_key: encoder.decode(stored_credential.public_key),
- sign_count: stored_credential.counter)
+ sign_count: stored_credential.counter,
+ rp_id: rp_id)
end
end
end