summaryrefslogtreecommitdiff
path: root/app/services
diff options
context:
space:
mode:
Diffstat (limited to 'app/services')
-rw-r--r--app/services/admin/propagate_integration_service.rb47
-rw-r--r--app/services/alert_management/http_integrations/create_service.rb60
-rw-r--r--app/services/alert_management/http_integrations/destroy_service.rb44
-rw-r--r--app/services/alert_management/http_integrations/update_service.rb48
-rw-r--r--app/services/alert_management/process_prometheus_alert_service.rb15
-rw-r--r--app/services/alert_management/sync_alert_service_data_service.rb56
-rw-r--r--app/services/audit_event_service.rb8
-rw-r--r--app/services/bulk_create_integration_service.rb10
-rw-r--r--app/services/bulk_import_service.rb63
-rw-r--r--app/services/bulk_update_integration_service.rb4
-rw-r--r--app/services/ci/append_build_trace_service.rb65
-rw-r--r--app/services/ci/build_report_result_service.rb2
-rw-r--r--app/services/ci/compare_reports_base_service.rb8
-rw-r--r--app/services/ci/compare_test_reports_service.rb14
-rw-r--r--app/services/ci/create_pipeline_service.rb1
-rw-r--r--app/services/ci/daily_build_group_report_result_service.rb3
-rw-r--r--app/services/ci/destroy_expired_job_artifacts_service.rb73
-rw-r--r--app/services/ci/list_config_variables_service.rb5
-rw-r--r--app/services/ci/parse_dotenv_artifact_service.rb2
-rw-r--r--app/services/ci/test_cases_service.rb44
-rw-r--r--app/services/ci/update_build_state_service.rb10
-rw-r--r--app/services/clusters/aws/authorize_role_service.rb7
-rw-r--r--app/services/clusters/aws/fetch_credentials_service.rb7
-rw-r--r--app/services/clusters/kubernetes.rb2
-rw-r--r--app/services/clusters/kubernetes/create_or_update_service_account_service.rb12
-rw-r--r--app/services/concerns/admin/propagate_service.rb13
-rw-r--r--app/services/concerns/integrations/project_test_data.rb7
-rw-r--r--app/services/concerns/users/participable_service.rb3
-rw-r--r--app/services/container_expiration_policies/cleanup_service.rb53
-rw-r--r--app/services/container_expiration_policy_service.rb10
-rw-r--r--app/services/dependency_proxy/base_service.rb17
-rw-r--r--app/services/dependency_proxy/download_blob_service.rb48
-rw-r--r--app/services/dependency_proxy/find_or_create_blob_service.rb45
-rw-r--r--app/services/dependency_proxy/pull_manifest_service.rb29
-rw-r--r--app/services/dependency_proxy/request_token_service.rb29
-rw-r--r--app/services/deploy_keys/collect_keys_service.rb27
-rw-r--r--app/services/design_management/copy_design_collection/copy_service.rb30
-rw-r--r--app/services/discussions/capture_diff_note_position_service.rb3
-rw-r--r--app/services/feature_flags/update_service.rb4
-rw-r--r--app/services/git/branch_hooks_service.rb1
-rw-r--r--app/services/groups/create_service.rb2
-rw-r--r--app/services/import/github_service.rb23
-rw-r--r--app/services/integrations/test/project_service.rb2
-rw-r--r--app/services/issuable/common_system_notes_service.rb6
-rw-r--r--app/services/issuable/import_csv/base_service.rb84
-rw-r--r--app/services/issues/import_csv_service.rb61
-rw-r--r--app/services/issues/reopen_service.rb8
-rw-r--r--app/services/jira_connect/sync_service.rb4
-rw-r--r--app/services/jira_connect_subscriptions/create_service.rb17
-rw-r--r--app/services/jira_import/users_importer.rb8
-rw-r--r--app/services/jira_import/users_mapper_service.rb54
-rw-r--r--app/services/labels/promote_service.rb62
-rw-r--r--app/services/members/invite_service.rb103
-rw-r--r--app/services/merge_requests/cleanup_refs_service.rb16
-rw-r--r--app/services/merge_requests/refresh_service.rb6
-rw-r--r--app/services/merge_requests/reopen_service.rb2
-rw-r--r--app/services/notes/create_service.rb6
-rw-r--r--app/services/notes/destroy_service.rb7
-rw-r--r--app/services/notes/update_service.rb6
-rw-r--r--app/services/notification_service.rb12
-rw-r--r--app/services/packages/composer/create_package_service.rb4
-rw-r--r--app/services/packages/composer/version_parser_service.rb2
-rw-r--r--app/services/packages/create_event_service.rb28
-rw-r--r--app/services/packages/create_package_file_service.rb9
-rw-r--r--app/services/packages/debian/extract_deb_metadata_service.rb41
-rw-r--r--app/services/packages/debian/parse_debian822_service.rb66
-rw-r--r--app/services/packages/generic/create_package_file_service.rb3
-rw-r--r--app/services/packages/generic/find_or_create_package_service.rb2
-rw-r--r--app/services/packages/maven/create_package_service.rb7
-rw-r--r--app/services/packages/maven/find_or_create_package_service.rb5
-rw-r--r--app/services/packages/npm/create_package_service.rb5
-rw-r--r--app/services/pages/destroy_deployments_service.rb16
-rw-r--r--app/services/personal_access_tokens/create_service.rb27
-rw-r--r--app/services/personal_access_tokens/revoke_service.rb11
-rw-r--r--app/services/post_receive_service.rb20
-rw-r--r--app/services/projects/alerting/notify_service.rb27
-rw-r--r--app/services/projects/cleanup_service.rb22
-rw-r--r--app/services/projects/container_repository/delete_tags_service.rb6
-rw-r--r--app/services/projects/container_repository/gitlab/delete_tags_service.rb11
-rw-r--r--app/services/projects/hashed_storage/base_repository_service.rb11
-rw-r--r--app/services/projects/lfs_pointers/lfs_download_service.rb2
-rw-r--r--app/services/projects/prometheus/alerts/notify_service.rb23
-rw-r--r--app/services/projects/update_pages_service.rb34
-rw-r--r--app/services/projects/update_repository_storage_service.rb16
-rw-r--r--app/services/releases/base_service.rb4
-rw-r--r--app/services/releases/create_service.rb2
-rw-r--r--app/services/releases/update_service.rb17
-rw-r--r--app/services/resource_access_tokens/create_service.rb19
-rw-r--r--app/services/search/global_service.rb1
-rw-r--r--app/services/search/group_service.rb1
-rw-r--r--app/services/search/project_service.rb1
-rw-r--r--app/services/search_service.rb8
-rw-r--r--app/services/snippets/create_service.rb2
-rw-r--r--app/services/snippets/update_service.rb2
-rw-r--r--app/services/system_note_service.rb8
-rw-r--r--app/services/system_notes/merge_requests_service.rb10
-rw-r--r--app/services/test_hooks/project_service.rb2
-rw-r--r--app/services/users/approve_service.rb8
-rw-r--r--app/services/users/set_status_service.rb5
-rw-r--r--app/services/web_hook_service.rb1
100 files changed, 1539 insertions, 368 deletions
diff --git a/app/services/admin/propagate_integration_service.rb b/app/services/admin/propagate_integration_service.rb
index 96a6d861e47..ddd5add42bd 100644
--- a/app/services/admin/propagate_integration_service.rb
+++ b/app/services/admin/propagate_integration_service.rb
@@ -5,12 +5,12 @@ module Admin
include PropagateService
def propagate
- update_inherited_integrations
-
if integration.instance?
- create_integration_for_groups_without_integration if Feature.enabled?(:group_level_integrations)
+ update_inherited_integrations
+ create_integration_for_groups_without_integration if Feature.enabled?(:group_level_integrations, default_enabled: true)
create_integration_for_projects_without_integration
else
+ update_inherited_descendant_integrations
create_integration_for_groups_without_integration_belonging_to_group
create_integration_for_projects_without_integration_belonging_to_group
end
@@ -18,34 +18,39 @@ module Admin
private
- # rubocop: disable Cop/InBatches
def update_inherited_integrations
- Service.by_type(integration.type).inherit_from_id(integration.id).in_batches(of: BATCH_SIZE) do |services|
- min_id, max_id = services.pick("MIN(services.id), MAX(services.id)")
- PropagateIntegrationInheritWorker.perform_async(integration.id, min_id, max_id)
- end
+ propagate_integrations(
+ Service.by_type(integration.type).inherit_from_id(integration.id),
+ PropagateIntegrationInheritWorker
+ )
+ end
+
+ def update_inherited_descendant_integrations
+ propagate_integrations(
+ Service.inherited_descendants_from_self_or_ancestors_from(integration),
+ PropagateIntegrationInheritDescendantWorker
+ )
end
- # rubocop: enable Cop/InBatches
def create_integration_for_groups_without_integration
- Group.without_integration(integration).each_batch(of: BATCH_SIZE) do |groups|
- min_id, max_id = groups.pick("MIN(namespaces.id), MAX(namespaces.id)")
- PropagateIntegrationGroupWorker.perform_async(integration.id, min_id, max_id)
- end
+ propagate_integrations(
+ Group.without_integration(integration),
+ PropagateIntegrationGroupWorker
+ )
end
def create_integration_for_groups_without_integration_belonging_to_group
- integration.group.descendants.without_integration(integration).each_batch(of: BATCH_SIZE) do |groups|
- min_id, max_id = groups.pick("MIN(namespaces.id), MAX(namespaces.id)")
- PropagateIntegrationGroupWorker.perform_async(integration.id, min_id, max_id)
- end
+ propagate_integrations(
+ integration.group.descendants.without_integration(integration),
+ PropagateIntegrationGroupWorker
+ )
end
def create_integration_for_projects_without_integration_belonging_to_group
- Project.without_integration(integration).in_namespace(integration.group.self_and_descendants).each_batch(of: BATCH_SIZE) do |projects|
- min_id, max_id = projects.pick("MIN(projects.id), MAX(projects.id)")
- PropagateIntegrationProjectWorker.perform_async(integration.id, min_id, max_id)
- end
+ propagate_integrations(
+ Project.without_integration(integration).in_namespace(integration.group.self_and_descendants),
+ PropagateIntegrationProjectWorker
+ )
end
end
end
diff --git a/app/services/alert_management/http_integrations/create_service.rb b/app/services/alert_management/http_integrations/create_service.rb
new file mode 100644
index 00000000000..576e38c23aa
--- /dev/null
+++ b/app/services/alert_management/http_integrations/create_service.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+module AlertManagement
+ module HttpIntegrations
+ class CreateService
+ # @param project [Project]
+ # @param current_user [User]
+ # @param params [Hash]
+ def initialize(project, current_user, params)
+ @project = project
+ @current_user = current_user
+ @params = params
+ end
+
+ def execute
+ return error_no_permissions unless allowed?
+ return error_multiple_integrations unless creation_allowed?
+
+ integration = project.alert_management_http_integrations.create(params)
+ return error_in_create(integration) unless integration.valid?
+
+ success(integration)
+ end
+
+ private
+
+ attr_reader :project, :current_user, :params
+
+ def allowed?
+ current_user&.can?(:admin_operations, project)
+ end
+
+ def creation_allowed?
+ project.alert_management_http_integrations.empty?
+ end
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+
+ def success(integration)
+ ServiceResponse.success(payload: { integration: integration })
+ end
+
+ def error_no_permissions
+ error(_('You have insufficient permissions to create an HTTP integration for this project'))
+ end
+
+ def error_multiple_integrations
+ error(_('Multiple HTTP integrations are not supported for this project'))
+ end
+
+ def error_in_create(integration)
+ error(integration.errors.full_messages.to_sentence)
+ end
+ end
+ end
+end
+
+::AlertManagement::HttpIntegrations::CreateService.prepend_if_ee('::EE::AlertManagement::HttpIntegrations::CreateService')
diff --git a/app/services/alert_management/http_integrations/destroy_service.rb b/app/services/alert_management/http_integrations/destroy_service.rb
new file mode 100644
index 00000000000..aeb3f6cb807
--- /dev/null
+++ b/app/services/alert_management/http_integrations/destroy_service.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module AlertManagement
+ module HttpIntegrations
+ class DestroyService
+ # @param integration [AlertManagement::HttpIntegration]
+ # @param current_user [User]
+ def initialize(integration, current_user)
+ @integration = integration
+ @current_user = current_user
+ end
+
+ def execute
+ return error_no_permissions unless allowed?
+
+ if integration.destroy
+ success
+ else
+ error(integration.errors.full_messages.to_sentence)
+ end
+ end
+
+ private
+
+ attr_reader :integration, :current_user
+
+ def allowed?
+ current_user&.can?(:admin_operations, integration)
+ end
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+
+ def success
+ ServiceResponse.success(payload: { integration: integration })
+ end
+
+ def error_no_permissions
+ error(_('You have insufficient permissions to remove this HTTP integration'))
+ end
+ end
+ end
+end
diff --git a/app/services/alert_management/http_integrations/update_service.rb b/app/services/alert_management/http_integrations/update_service.rb
new file mode 100644
index 00000000000..220c4e759f0
--- /dev/null
+++ b/app/services/alert_management/http_integrations/update_service.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module AlertManagement
+ module HttpIntegrations
+ class UpdateService
+ # @param integration [AlertManagement::HttpIntegration]
+ # @param current_user [User]
+ # @param params [Hash]
+ def initialize(integration, current_user, params)
+ @integration = integration
+ @current_user = current_user
+ @params = params
+ end
+
+ def execute
+ return error_no_permissions unless allowed?
+
+ params[:token] = nil if params.delete(:regenerate_token)
+
+ if integration.update(params)
+ success
+ else
+ error(integration.errors.full_messages.to_sentence)
+ end
+ end
+
+ private
+
+ attr_reader :integration, :current_user, :params
+
+ def allowed?
+ current_user&.can?(:admin_operations, integration)
+ end
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+
+ def success
+ ServiceResponse.success(payload: { integration: integration.reset })
+ end
+
+ def error_no_permissions
+ error(_('You have insufficient permissions to update this HTTP integration'))
+ end
+ end
+ end
+end
diff --git a/app/services/alert_management/process_prometheus_alert_service.rb b/app/services/alert_management/process_prometheus_alert_service.rb
index 5c7698f724a..28ce5401a6c 100644
--- a/app/services/alert_management/process_prometheus_alert_service.rb
+++ b/app/services/alert_management/process_prometheus_alert_service.rb
@@ -9,6 +9,10 @@ module AlertManagement
return bad_request unless incoming_payload.has_required_attributes?
process_alert_management_alert
+ return bad_request unless alert.persisted?
+
+ process_incident_issues if process_issues?
+ send_alert_email if send_email?
ServiceResponse.success
end
@@ -30,8 +34,6 @@ module AlertManagement
else
create_alert_management_alert
end
-
- process_incident_issues if process_issues?
end
def reset_alert_management_alert_status
@@ -85,12 +87,17 @@ module AlertManagement
end
def process_incident_issues
- return unless alert.persisted?
- return if alert.issue
+ return if alert.issue || alert.resolved?
IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id)
end
+ def send_alert_email
+ notification_service
+ .async
+ .prometheus_alerts_fired(project, [alert])
+ end
+
def logger
@logger ||= Gitlab::AppLogger
end
diff --git a/app/services/alert_management/sync_alert_service_data_service.rb b/app/services/alert_management/sync_alert_service_data_service.rb
new file mode 100644
index 00000000000..1ba197065c5
--- /dev/null
+++ b/app/services/alert_management/sync_alert_service_data_service.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+module AlertManagement
+ class SyncAlertServiceDataService
+ # @param alert_service [AlertsService]
+ def initialize(alert_service)
+ @alert_service = alert_service
+ end
+
+ def execute
+ http_integration = find_http_integration
+
+ result = if http_integration
+ update_integration_data(http_integration)
+ else
+ create_integration
+ end
+
+ result ? ServiceResponse.success : ServiceResponse.error(message: 'Update failed')
+ end
+
+ private
+
+ attr_reader :alert_service
+
+ def find_http_integration
+ AlertManagement::HttpIntegrationsFinder.new(
+ alert_service.project,
+ endpoint_identifier: ::AlertManagement::HttpIntegration::LEGACY_IDENTIFIER
+ )
+ .execute
+ .first
+ end
+
+ def create_integration
+ new_integration = AlertManagement::HttpIntegration.create(
+ project_id: alert_service.project_id,
+ name: 'HTTP endpoint',
+ endpoint_identifier: AlertManagement::HttpIntegration::LEGACY_IDENTIFIER,
+ active: alert_service.active,
+ encrypted_token: alert_service.data.encrypted_token,
+ encrypted_token_iv: alert_service.data.encrypted_token_iv
+ )
+
+ new_integration.persisted?
+ end
+
+ def update_integration_data(http_integration)
+ http_integration.update(
+ active: alert_service.active,
+ encrypted_token: alert_service.data.encrypted_token,
+ encrypted_token_iv: alert_service.data.encrypted_token_iv
+ )
+ end
+ end
+end
diff --git a/app/services/audit_event_service.rb b/app/services/audit_event_service.rb
index 3c21844ec62..d1558c60c3d 100644
--- a/app/services/audit_event_service.rb
+++ b/app/services/audit_event_service.rb
@@ -16,7 +16,7 @@ class AuditEventService
@author = build_author(author)
@entity = entity
@details = details
- @ip_address = (@details[:ip_address].presence || @author.current_sign_in_ip)
+ @ip_address = resolve_ip_address(@details, @author)
end
# Builds the @details attribute for authentication
@@ -64,6 +64,12 @@ class AuditEventService
end
end
+ def resolve_ip_address(details, author)
+ details[:ip_address].presence ||
+ Gitlab::RequestContext.instance.client_ip ||
+ author.current_sign_in_ip
+ end
+
def base_payload
{
author_id: @author.id,
diff --git a/app/services/bulk_create_integration_service.rb b/app/services/bulk_create_integration_service.rb
index 23b89b0d8a9..61c5565db60 100644
--- a/app/services/bulk_create_integration_service.rb
+++ b/app/services/bulk_create_integration_service.rb
@@ -11,6 +11,8 @@ class BulkCreateIntegrationService
service_list = ServiceList.new(batch, service_hash, association).to_array
Service.transaction do
+ run_callbacks(batch) if association == 'project'
+
results = bulk_insert(*service_list)
if integration.data_fields_present?
@@ -18,8 +20,6 @@ class BulkCreateIntegrationService
bulk_insert(*data_list)
end
-
- run_callbacks(batch) if association == 'project'
end
end
@@ -35,11 +35,11 @@ class BulkCreateIntegrationService
# rubocop: disable CodeReuse/ActiveRecord
def run_callbacks(batch)
- if integration.issue_tracker?
+ if integration.external_issue_tracker?
Project.where(id: batch.select(:id)).update_all(has_external_issue_tracker: true)
end
- if integration.type == 'ExternalWikiService'
+ if integration.external_wiki?
Project.where(id: batch.select(:id)).update_all(has_external_wiki: true)
end
end
@@ -49,7 +49,7 @@ class BulkCreateIntegrationService
if integration.template?
integration.to_service_hash
else
- integration.to_service_hash.tap { |json| json['inherit_from_id'] = integration.id }
+ integration.to_service_hash.tap { |json| json['inherit_from_id'] = integration.inherit_from_id || integration.id }
end
end
diff --git a/app/services/bulk_import_service.rb b/app/services/bulk_import_service.rb
new file mode 100644
index 00000000000..bebf9153ce7
--- /dev/null
+++ b/app/services/bulk_import_service.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+# Entry point of the BulkImport feature.
+# This service receives a Gitlab Instance connection params
+# and a list of groups to be imported.
+#
+# Process topography:
+#
+# sync | async
+# |
+# User +--> P1 +----> Pn +---+
+# | ^ | Enqueue new job
+# | +-----+
+#
+# P1 (sync)
+#
+# - Create a BulkImport record
+# - Create a BulkImport::Entity for each group to be imported
+# - Enqueue a BulkImportWorker job (P2) to import the given groups (entities)
+#
+# Pn (async)
+#
+# - For each group to be imported (BulkImport::Entity.with_status(:created))
+# - Import the group data
+# - Create entities for each subgroup of the imported group
+# - Enqueue a BulkImportService job (Pn) to import the new entities (subgroups)
+#
+class BulkImportService
+ attr_reader :current_user, :params, :credentials
+
+ def initialize(current_user, params, credentials)
+ @current_user = current_user
+ @params = params
+ @credentials = credentials
+ end
+
+ def execute
+ bulk_import = create_bulk_import
+
+ BulkImportWorker.perform_async(bulk_import.id)
+ end
+
+ private
+
+ def create_bulk_import
+ BulkImport.transaction do
+ bulk_import = BulkImport.create!(user: current_user, source_type: 'gitlab')
+ bulk_import.create_configuration!(credentials.slice(:url, :access_token))
+
+ params.each do |entity|
+ BulkImports::Entity.create!(
+ bulk_import: bulk_import,
+ source_type: entity[:source_type],
+ source_full_path: entity[:source_full_path],
+ destination_name: entity[:destination_name],
+ destination_namespace: entity[:destination_namespace]
+ )
+ end
+
+ bulk_import
+ end
+ end
+end
diff --git a/app/services/bulk_update_integration_service.rb b/app/services/bulk_update_integration_service.rb
index 74d77618f2c..5ddfdd359c2 100644
--- a/app/services/bulk_update_integration_service.rb
+++ b/app/services/bulk_update_integration_service.rb
@@ -9,7 +9,7 @@ class BulkUpdateIntegrationService
# rubocop: disable CodeReuse/ActiveRecord
def execute
Service.transaction do
- batch.update_all(service_hash)
+ Service.where(id: batch.select(:id)).update_all(service_hash)
if integration.data_fields_present?
integration.data_fields.class.where(service_id: batch.select(:id)).update_all(data_fields_hash)
@@ -23,7 +23,7 @@ class BulkUpdateIntegrationService
attr_reader :integration, :batch
def service_hash
- integration.to_service_hash.tap { |json| json['inherit_from_id'] = integration.id }
+ integration.to_service_hash.tap { |json| json['inherit_from_id'] = integration.inherit_from_id || integration.id }
end
def data_fields_hash
diff --git a/app/services/ci/append_build_trace_service.rb b/app/services/ci/append_build_trace_service.rb
new file mode 100644
index 00000000000..602f8c5030d
--- /dev/null
+++ b/app/services/ci/append_build_trace_service.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+module Ci
+ class AppendBuildTraceService
+ Result = Struct.new(:status, :stream_size, keyword_init: true)
+ TraceRangeError = Class.new(StandardError)
+
+ attr_reader :build, :params
+
+ def initialize(build, params)
+ @build = build
+ @params = params
+ end
+
+ def execute(body_data)
+ # TODO:
+ # it seems that `Content-Range` as formatted by runner is wrong,
+ # the `byte_end` should point to final byte, but it points byte+1
+ # that means that we have to calculate end of body,
+ # as we cannot use `content_length[1]`
+ # Issue: https://gitlab.com/gitlab-org/gitlab-runner/issues/3275
+
+ content_range = stream_range.split('-')
+ body_start = content_range[0].to_i
+ body_end = body_start + body_data.bytesize
+
+ stream_size = build.trace.append(body_data, body_start)
+
+ unless stream_size == body_end
+ log_range_error(stream_size, body_end)
+
+ return Result.new(status: 416, stream_size: stream_size)
+ end
+
+ Result.new(status: 202, stream_size: stream_size)
+ end
+
+ private
+
+ def stream_range
+ params.fetch(:content_range)
+ end
+
+ def log_range_error(stream_size, body_end)
+ extra = {
+ build_id: build.id,
+ body_end: body_end,
+ stream_size: stream_size,
+ stream_class: stream_size.class,
+ stream_range: stream_range
+ }
+
+ build.trace_chunks.last.try do |chunk|
+ extra.merge!(
+ chunk_index: chunk.chunk_index,
+ chunk_store: chunk.data_store,
+ chunks_count: build.trace_chunks.count
+ )
+ end
+
+ ::Gitlab::ErrorTracking
+ .log_exception(TraceRangeError.new, extra)
+ end
+ end
+end
diff --git a/app/services/ci/build_report_result_service.rb b/app/services/ci/build_report_result_service.rb
index 76ecf428f11..f138aa91236 100644
--- a/app/services/ci/build_report_result_service.rb
+++ b/app/services/ci/build_report_result_service.rb
@@ -39,8 +39,6 @@ module Ci
end
def track_test_cases(build, test_suite)
- return if Feature.disabled?(:track_unique_test_cases_parsed, build.project)
-
track_usage_event(EVENT_NAME, test_case_hashes(build, test_suite))
end
diff --git a/app/services/ci/compare_reports_base_service.rb b/app/services/ci/compare_reports_base_service.rb
index 2e84f914db3..9aba3a50ec1 100644
--- a/app/services/ci/compare_reports_base_service.rb
+++ b/app/services/ci/compare_reports_base_service.rb
@@ -8,7 +8,9 @@ module Ci
# issue: https://gitlab.com/gitlab-org/gitlab/issues/34224
class CompareReportsBaseService < ::BaseService
def execute(base_pipeline, head_pipeline)
- comparer = build_comparer(base_pipeline, head_pipeline)
+ base_report = get_report(base_pipeline)
+ head_report = get_report(head_pipeline)
+ comparer = build_comparer(base_report, head_report)
{
status: :parsed,
@@ -31,8 +33,8 @@ module Ci
protected
- def build_comparer(base_pipeline, head_pipeline)
- comparer_class.new(get_report(base_pipeline), get_report(head_pipeline))
+ def build_comparer(base_report, head_report)
+ comparer_class.new(base_report, head_report)
end
private
diff --git a/app/services/ci/compare_test_reports_service.rb b/app/services/ci/compare_test_reports_service.rb
index 382d5b8995f..ed85ca8274c 100644
--- a/app/services/ci/compare_test_reports_service.rb
+++ b/app/services/ci/compare_test_reports_service.rb
@@ -13,5 +13,19 @@ module Ci
def get_report(pipeline)
pipeline&.test_reports
end
+
+ def build_comparer(base_report, head_report)
+ # We need to load the test failure history on the test comparer because we display
+ # this on the MR widget
+ super.tap do |test_reports_comparer|
+ ::Gitlab::Ci::Reports::TestFailureHistory.new(failed_test_cases(test_reports_comparer), project).load!
+ end
+ end
+
+ def failed_test_cases(test_reports_comparer)
+ test_reports_comparer.suite_comparers.flat_map do |suite_comparer|
+ suite_comparer.limited_tests.new_failures + suite_comparer.limited_tests.existing_failures
+ end
+ end
end
end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index e7ede98fea4..e3bab2de44e 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -14,6 +14,7 @@ module Ci
Gitlab::Ci::Pipeline::Chain::Config::Process,
Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs,
Gitlab::Ci::Pipeline::Chain::Skip,
+ Gitlab::Ci::Pipeline::Chain::SeedBlock,
Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules,
Gitlab::Ci::Pipeline::Chain::Seed,
Gitlab::Ci::Pipeline::Chain::Limit::Size,
diff --git a/app/services/ci/daily_build_group_report_result_service.rb b/app/services/ci/daily_build_group_report_result_service.rb
index c32fc27c274..bc966fb9634 100644
--- a/app/services/ci/daily_build_group_report_result_service.rb
+++ b/app/services/ci/daily_build_group_report_result_service.rb
@@ -13,7 +13,8 @@ module Ci
project_id: pipeline.project_id,
ref_path: pipeline.source_ref_path,
date: pipeline.created_at.to_date,
- last_pipeline_id: pipeline.id
+ last_pipeline_id: pipeline.id,
+ default_branch: pipeline.default_branch?
}
aggregate(pipeline.builds.with_coverage).map do |group_name, group|
diff --git a/app/services/ci/destroy_expired_job_artifacts_service.rb b/app/services/ci/destroy_expired_job_artifacts_service.rb
index 438b5c7496d..6e7caba8545 100644
--- a/app/services/ci/destroy_expired_job_artifacts_service.rb
+++ b/app/services/ci/destroy_expired_job_artifacts_service.rb
@@ -4,47 +4,90 @@ module Ci
class DestroyExpiredJobArtifactsService
include ::Gitlab::ExclusiveLeaseHelpers
include ::Gitlab::LoopHelpers
+ include ::Gitlab::Utils::StrongMemoize
BATCH_SIZE = 100
- LOOP_TIMEOUT = 45.minutes
+ LOOP_TIMEOUT = 5.minutes
LOOP_LIMIT = 1000
EXCLUSIVE_LOCK_KEY = 'expired_job_artifacts:destroy:lock'
- LOCK_TIMEOUT = 50.minutes
+ LOCK_TIMEOUT = 6.minutes
##
# Destroy expired job artifacts on GitLab instance
#
- # This destroy process cannot run for more than 45 minutes. This is for
+ # This destroy process cannot run for more than 6 minutes. This is for
# preventing multiple `ExpireBuildArtifactsWorker` CRON jobs run concurrently,
- # which is scheduled at every hour.
+ # which is scheduled every 7 minutes.
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
loop_until(timeout: LOOP_TIMEOUT, limit: LOOP_LIMIT) do
- destroy_batch(Ci::JobArtifact) || destroy_batch(Ci::PipelineArtifact)
+ destroy_artifacts_batch
end
end
end
private
- def destroy_batch(klass)
- artifact_batch = if klass == Ci::JobArtifact
- klass.expired(BATCH_SIZE).unlocked
- else
- klass.expired(BATCH_SIZE)
- end
+ def destroy_artifacts_batch
+ destroy_job_artifacts_batch || destroy_pipeline_artifacts_batch
+ end
+
+ def destroy_job_artifacts_batch
+ artifacts = Ci::JobArtifact
+ .expired(BATCH_SIZE)
+ .unlocked
+ .with_destroy_preloads
+ .to_a
+
+ return false if artifacts.empty?
- artifacts = artifact_batch.to_a
+ parallel_destroy_batch(artifacts)
+ true
+ end
+ # TODO: Make sure this can also be parallelized
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/270973
+ def destroy_pipeline_artifacts_batch
+ artifacts = Ci::PipelineArtifact.expired(BATCH_SIZE).to_a
return false if artifacts.empty?
artifacts.each(&:destroy!)
- run_after_destroy(artifacts)
- true # This is required because of the design of `loop_until` method.
+ true
+ end
+
+ def parallel_destroy_batch(job_artifacts)
+ Ci::DeletedObject.transaction do
+ Ci::DeletedObject.bulk_import(job_artifacts)
+ Ci::JobArtifact.id_in(job_artifacts.map(&:id)).delete_all
+ destroy_related_records_for(job_artifacts)
+ end
+
+ # This is executed outside of the transaction because it depends on Redis
+ update_statistics_for(job_artifacts)
+ destroyed_artifacts_counter.increment({}, job_artifacts.size)
+ end
+
+ # This method is implemented in EE and it must do only database work
+ def destroy_related_records_for(job_artifacts); end
+
+ def update_statistics_for(job_artifacts)
+ artifacts_by_project = job_artifacts.group_by(&:project)
+ artifacts_by_project.each do |project, artifacts|
+ delta = -artifacts.sum { |artifact| artifact.size.to_i }
+ ProjectStatistics.increment_statistic(
+ project, Ci::JobArtifact.project_statistics_name, delta)
+ end
end
- def run_after_destroy(artifacts); end
+ def destroyed_artifacts_counter
+ strong_memoize(:destroyed_artifacts_counter) do
+ name = :destroyed_job_artifacts_count_total
+ comment = 'Counter of destroyed expired job artifacts'
+
+ ::Gitlab::Metrics.counter(name, comment)
+ end
+ end
end
end
diff --git a/app/services/ci/list_config_variables_service.rb b/app/services/ci/list_config_variables_service.rb
index b5dc192b512..4a5b3a92a2c 100644
--- a/app/services/ci/list_config_variables_service.rb
+++ b/app/services/ci/list_config_variables_service.rb
@@ -6,7 +6,10 @@ module Ci
config = project.ci_config_for(sha)
return {} unless config
- result = Gitlab::Ci::YamlProcessor.new(config).execute
+ result = Gitlab::Ci::YamlProcessor.new(config, project: project,
+ user: current_user,
+ sha: sha).execute
+
result.valid? ? result.variables_with_data : {}
end
end
diff --git a/app/services/ci/parse_dotenv_artifact_service.rb b/app/services/ci/parse_dotenv_artifact_service.rb
index 71b306864b2..2ee9be476bb 100644
--- a/app/services/ci/parse_dotenv_artifact_service.rb
+++ b/app/services/ci/parse_dotenv_artifact_service.rb
@@ -3,7 +3,7 @@
module Ci
class ParseDotenvArtifactService < ::BaseService
MAX_ACCEPTABLE_DOTENV_SIZE = 5.kilobytes
- MAX_ACCEPTABLE_VARIABLES_COUNT = 10
+ MAX_ACCEPTABLE_VARIABLES_COUNT = 20
SizeLimitError = Class.new(StandardError)
ParserError = Class.new(StandardError)
diff --git a/app/services/ci/test_cases_service.rb b/app/services/ci/test_cases_service.rb
new file mode 100644
index 00000000000..3139b567571
--- /dev/null
+++ b/app/services/ci/test_cases_service.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module Ci
+ class TestCasesService
+ MAX_TRACKABLE_FAILURES = 200
+
+ def execute(build)
+ return unless Feature.enabled?(:test_failure_history, build.project)
+ return unless build.has_test_reports?
+ return unless build.project.default_branch_or_master == build.ref
+
+ test_suite = generate_test_suite_report(build)
+
+ track_failures(build, test_suite)
+ end
+
+ private
+
+ def generate_test_suite_report(build)
+ build.collect_test_reports!(Gitlab::Ci::Reports::TestReports.new)
+ end
+
+ def track_failures(build, test_suite)
+ return if test_suite.failed_count > MAX_TRACKABLE_FAILURES
+
+ test_suite.failed.keys.each_slice(100) do |keys|
+ Ci::TestCase.transaction do
+ test_cases = Ci::TestCase.find_or_create_by_batch(build.project, keys)
+ Ci::TestCaseFailure.insert_all(test_case_failures(test_cases, build))
+ end
+ end
+ end
+
+ def test_case_failures(test_cases, build)
+ test_cases.map do |test_case|
+ {
+ test_case_id: test_case.id,
+ build_id: build.id,
+ failed_at: build.finished_at
+ }
+ end
+ end
+ end
+end
diff --git a/app/services/ci/update_build_state_service.rb b/app/services/ci/update_build_state_service.rb
index 22a27906700..fb67b0d2355 100644
--- a/app/services/ci/update_build_state_service.rb
+++ b/app/services/ci/update_build_state_service.rb
@@ -163,16 +163,18 @@ module Ci
end
def ensure_pending_state
- Ci::BuildPendingState.create_or_find_by!(
+ build_state = Ci::BuildPendingState.safe_find_or_create_by(
build_id: build.id,
state: params.fetch(:state),
trace_checksum: params.fetch(:checksum),
failure_reason: params.dig(:failure_reason)
)
- rescue ActiveRecord::RecordNotFound
- metrics.increment_trace_operation(operation: :conflict)
- build.pending_state
+ unless build_state.present?
+ metrics.increment_trace_operation(operation: :conflict)
+ end
+
+ build_state || build.pending_state
end
##
diff --git a/app/services/clusters/aws/authorize_role_service.rb b/app/services/clusters/aws/authorize_role_service.rb
index 2712a4b05bb..188c4aebc5f 100644
--- a/app/services/clusters/aws/authorize_role_service.rb
+++ b/app/services/clusters/aws/authorize_role_service.rb
@@ -17,7 +17,8 @@ module Clusters
def initialize(user, params:)
@user = user
- @params = params
+ @role_arn = params[:role_arn]
+ @region = params[:region]
end
def execute
@@ -33,14 +34,14 @@ module Clusters
private
- attr_reader :role, :params
+ attr_reader :role, :role_arn, :region
def ensure_role_exists!
@role = ::Aws::Role.find_by_user_id!(user.id)
end
def update_role_arn!
- role.update!(params)
+ role.update!(role_arn: role_arn, region: region)
end
def credentials
diff --git a/app/services/clusters/aws/fetch_credentials_service.rb b/app/services/clusters/aws/fetch_credentials_service.rb
index 33efc4cc120..96abbb43969 100644
--- a/app/services/clusters/aws/fetch_credentials_service.rb
+++ b/app/services/clusters/aws/fetch_credentials_service.rb
@@ -10,6 +10,7 @@ module Clusters
def initialize(provision_role, provider: nil)
@provision_role = provision_role
@provider = provider
+ @region = provider&.region || provision_role&.region || Clusters::Providers::Aws::DEFAULT_REGION
end
def execute
@@ -26,7 +27,7 @@ module Clusters
private
- attr_reader :provider
+ attr_reader :provider, :region
def client
::Aws::STS::Client.new(credentials: gitlab_credentials, region: region)
@@ -44,10 +45,6 @@ module Clusters
Gitlab::CurrentSettings.eks_secret_access_key
end
- def region
- provider&.region || Clusters::Providers::Aws::DEFAULT_REGION
- end
-
##
# If we haven't created a provider record yet,
# we restrict ourselves to read only access so
diff --git a/app/services/clusters/kubernetes.rb b/app/services/clusters/kubernetes.rb
index aafea64c820..819ac4c8464 100644
--- a/app/services/clusters/kubernetes.rb
+++ b/app/services/clusters/kubernetes.rb
@@ -7,7 +7,7 @@ module Clusters
GITLAB_ADMIN_TOKEN_NAME = 'gitlab-token'
GITLAB_CLUSTER_ROLE_BINDING_NAME = 'gitlab-admin'
GITLAB_CLUSTER_ROLE_NAME = 'cluster-admin'
- PROJECT_CLUSTER_ROLE_NAME = 'edit'
+ PROJECT_CLUSTER_ROLE_NAME = 'admin'
GITLAB_KNATIVE_SERVING_ROLE_NAME = 'gitlab-knative-serving-role'
GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME = 'gitlab-knative-serving-rolebinding'
GITLAB_CROSSPLANE_DATABASE_ROLE_NAME = 'gitlab-crossplane-database-role'
diff --git a/app/services/clusters/kubernetes/create_or_update_service_account_service.rb b/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
index 2725a3aeaa5..eabc428d0d2 100644
--- a/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
+++ b/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
@@ -69,7 +69,13 @@ module Clusters
def create_role_or_cluster_role_binding
if namespace_creator
- kubeclient.create_or_update_role_binding(role_binding_resource)
+ begin
+ kubeclient.delete_role_binding(role_binding_name, service_account_namespace)
+ rescue Kubeclient::ResourceNotFoundError
+ # Do nothing as we will create new role binding below
+ end
+
+ kubeclient.update_role_binding(role_binding_resource)
else
kubeclient.create_or_update_cluster_role_binding(cluster_role_binding_resource)
end
@@ -117,11 +123,9 @@ module Clusters
end
def role_binding_resource
- role_name = Feature.enabled?(:kubernetes_cluster_namespace_role_admin) ? 'admin' : Clusters::Kubernetes::PROJECT_CLUSTER_ROLE_NAME
-
Gitlab::Kubernetes::RoleBinding.new(
name: role_binding_name,
- role_name: role_name,
+ role_name: Clusters::Kubernetes::PROJECT_CLUSTER_ROLE_NAME,
role_kind: :ClusterRole,
namespace: service_account_namespace,
service_account_name: service_account_name
diff --git a/app/services/concerns/admin/propagate_service.rb b/app/services/concerns/admin/propagate_service.rb
index 065ab6f7ff9..03e422aec54 100644
--- a/app/services/concerns/admin/propagate_service.rb
+++ b/app/services/concerns/admin/propagate_service.rb
@@ -21,9 +21,16 @@ module Admin
attr_reader :integration
def create_integration_for_projects_without_integration
- Project.without_integration(integration).each_batch(of: BATCH_SIZE) do |projects|
- min_id, max_id = projects.pick("MIN(projects.id), MAX(projects.id)")
- PropagateIntegrationProjectWorker.perform_async(integration.id, min_id, max_id)
+ propagate_integrations(
+ Project.without_integration(integration),
+ PropagateIntegrationProjectWorker
+ )
+ end
+
+ def propagate_integrations(relation, worker_class)
+ relation.each_batch(of: BATCH_SIZE) do |records|
+ min_id, max_id = records.pick("MIN(#{relation.table_name}.id), MAX(#{relation.table_name}.id)")
+ worker_class.perform_async(integration.id, min_id, max_id)
end
end
end
diff --git a/app/services/concerns/integrations/project_test_data.rb b/app/services/concerns/integrations/project_test_data.rb
index 4d551430315..72c12cfb394 100644
--- a/app/services/concerns/integrations/project_test_data.rb
+++ b/app/services/concerns/integrations/project_test_data.rb
@@ -58,5 +58,12 @@ module Integrations
Gitlab::DataBuilder::Deployment.build(deployment)
end
+
+ def releases_events_data
+ release = project.releases.first
+ return { error: s_('TestHooks|Ensure the project has releases.') } unless release.present?
+
+ release.to_hook_data('create')
+ end
end
end
diff --git a/app/services/concerns/users/participable_service.rb b/app/services/concerns/users/participable_service.rb
index 6fde9abfdb0..fac8e91d216 100644
--- a/app/services/concerns/users/participable_service.rb
+++ b/app/services/concerns/users/participable_service.rb
@@ -45,7 +45,8 @@ module Users
type: user.class.name,
username: user.username,
name: user.name,
- avatar_url: user.avatar_url
+ avatar_url: user.avatar_url,
+ availability: user&.status&.availability
}
end
diff --git a/app/services/container_expiration_policies/cleanup_service.rb b/app/services/container_expiration_policies/cleanup_service.rb
new file mode 100644
index 00000000000..f2bc2beab63
--- /dev/null
+++ b/app/services/container_expiration_policies/cleanup_service.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+module ContainerExpirationPolicies
+ class CleanupService
+ attr_reader :repository
+
+ def initialize(repository)
+ @repository = repository
+ end
+
+ def execute
+ return ServiceResponse.error(message: 'no repository') unless repository
+
+ repository.start_expiration_policy!
+
+ result = Projects::ContainerRepository::CleanupTagsService
+ .new(project, nil, policy_params.merge('container_expiration_policy' => true))
+ .execute(repository)
+
+ if result[:status] == :success
+ repository.update!(
+ expiration_policy_cleanup_status: :cleanup_unscheduled,
+ expiration_policy_started_at: nil
+ )
+ success(:finished)
+ else
+ repository.cleanup_unfinished!
+
+ success(:unfinished)
+ end
+ end
+
+ private
+
+ def success(cleanup_status)
+ ServiceResponse.success(message: "cleanup #{cleanup_status}", payload: { cleanup_status: cleanup_status, container_repository_id: repository.id })
+ end
+
+ def policy_params
+ return {} unless policy
+
+ policy.policy_params
+ end
+
+ def policy
+ project.container_expiration_policy
+ end
+
+ def project
+ repository&.project
+ end
+ end
+end
diff --git a/app/services/container_expiration_policy_service.rb b/app/services/container_expiration_policy_service.rb
index 80f32298323..cf5d702a9ef 100644
--- a/app/services/container_expiration_policy_service.rb
+++ b/app/services/container_expiration_policy_service.rb
@@ -4,20 +4,14 @@ class ContainerExpirationPolicyService < BaseService
InvalidPolicyError = Class.new(StandardError)
def execute(container_expiration_policy)
- unless container_expiration_policy.valid?
- container_expiration_policy.disable!
- raise InvalidPolicyError
- end
-
container_expiration_policy.schedule_next_run!
container_expiration_policy.container_repositories.find_each do |container_repository|
CleanupContainerRepositoryWorker.perform_async(
nil,
container_repository.id,
- container_expiration_policy.attributes
- .except('created_at', 'updated_at')
- .merge(container_expiration_policy: true)
+ container_expiration_policy.policy_params
+ .merge(container_expiration_policy: true)
)
end
end
diff --git a/app/services/dependency_proxy/base_service.rb b/app/services/dependency_proxy/base_service.rb
new file mode 100644
index 00000000000..1b2d4b14a27
--- /dev/null
+++ b/app/services/dependency_proxy/base_service.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module DependencyProxy
+ class BaseService < ::BaseService
+ private
+
+ def registry
+ DependencyProxy::Registry
+ end
+
+ def auth_headers
+ {
+ Authorization: "Bearer #{@token}"
+ }
+ end
+ end
+end
diff --git a/app/services/dependency_proxy/download_blob_service.rb b/app/services/dependency_proxy/download_blob_service.rb
new file mode 100644
index 00000000000..3c690683bf6
--- /dev/null
+++ b/app/services/dependency_proxy/download_blob_service.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module DependencyProxy
+ class DownloadBlobService < DependencyProxy::BaseService
+ class DownloadError < StandardError
+ attr_reader :http_status
+
+ def initialize(message, http_status)
+ @http_status = http_status
+
+ super(message)
+ end
+ end
+
+ def initialize(image, blob_sha, token)
+ @image = image
+ @blob_sha = blob_sha
+ @token = token
+ @temp_file = Tempfile.new
+ end
+
+ def execute
+ File.open(@temp_file.path, "wb") do |file|
+ Gitlab::HTTP.get(blob_url, headers: auth_headers, stream_body: true) do |fragment|
+ if [301, 302, 307].include?(fragment.code)
+ # do nothing
+ elsif fragment.code == 200
+ file.write(fragment)
+ else
+ raise DownloadError.new('Non-success response code on downloading blob fragment', fragment.code)
+ end
+ end
+ end
+
+ success(file: @temp_file)
+ rescue DownloadError => exception
+ error(exception.message, exception.http_status)
+ rescue Timeout::Error => exception
+ error(exception.message, 599)
+ end
+
+ private
+
+ def blob_url
+ registry.blob_url(@image, @blob_sha)
+ end
+ end
+end
diff --git a/app/services/dependency_proxy/find_or_create_blob_service.rb b/app/services/dependency_proxy/find_or_create_blob_service.rb
new file mode 100644
index 00000000000..bd06f9d7628
--- /dev/null
+++ b/app/services/dependency_proxy/find_or_create_blob_service.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module DependencyProxy
+ class FindOrCreateBlobService < DependencyProxy::BaseService
+ def initialize(group, image, token, blob_sha)
+ @group = group
+ @image = image
+ @token = token
+ @blob_sha = blob_sha
+ end
+
+ def execute
+ file_name = @blob_sha.sub('sha256:', '') + '.gz'
+ blob = @group.dependency_proxy_blobs.find_or_build(file_name)
+
+ unless blob.persisted?
+ result = DependencyProxy::DownloadBlobService
+ .new(@image, @blob_sha, @token).execute
+
+ if result[:status] == :error
+ log_failure(result)
+
+ return error('Failed to download the blob', result[:http_status])
+ end
+
+ blob.file = result[:file]
+ blob.size = result[:file].size
+ blob.save!
+ end
+
+ success(blob: blob)
+ end
+
+ private
+
+ def log_failure(result)
+ log_error(
+ "Dependency proxy: Failed to download the blob." \
+ "Blob sha: #{@blob_sha}." \
+ "Error message: #{result[:message][0, 100]}" \
+ "HTTP status: #{result[:http_status]}"
+ )
+ end
+ end
+end
diff --git a/app/services/dependency_proxy/pull_manifest_service.rb b/app/services/dependency_proxy/pull_manifest_service.rb
new file mode 100644
index 00000000000..fc54ef85c96
--- /dev/null
+++ b/app/services/dependency_proxy/pull_manifest_service.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module DependencyProxy
+ class PullManifestService < DependencyProxy::BaseService
+ def initialize(image, tag, token)
+ @image = image
+ @tag = tag
+ @token = token
+ end
+
+ def execute
+ response = Gitlab::HTTP.get(manifest_url, headers: auth_headers)
+
+ if response.success?
+ success(manifest: response.body)
+ else
+ error(response.body, response.code)
+ end
+ rescue Timeout::Error => exception
+ error(exception.message, 599)
+ end
+
+ private
+
+ def manifest_url
+ registry.manifest_url(@image, @tag)
+ end
+ end
+end
diff --git a/app/services/dependency_proxy/request_token_service.rb b/app/services/dependency_proxy/request_token_service.rb
new file mode 100644
index 00000000000..4ca7239b9f6
--- /dev/null
+++ b/app/services/dependency_proxy/request_token_service.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module DependencyProxy
+ class RequestTokenService < DependencyProxy::BaseService
+ def initialize(image)
+ @image = image
+ end
+
+ def execute
+ response = Gitlab::HTTP.get(auth_url)
+
+ if response.success?
+ success(token: Gitlab::Json.parse(response.body)['token'])
+ else
+ error('Expected 200 response code for an access token', response.code)
+ end
+ rescue Timeout::Error => exception
+ error(exception.message, 599)
+ rescue JSON::ParserError
+ error('Failed to parse a response body for an access token', 500)
+ end
+
+ private
+
+ def auth_url
+ registry.auth_url(@image)
+ end
+ end
+end
diff --git a/app/services/deploy_keys/collect_keys_service.rb b/app/services/deploy_keys/collect_keys_service.rb
deleted file mode 100644
index 2ef49bf0f30..00000000000
--- a/app/services/deploy_keys/collect_keys_service.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-module DeployKeys
- class CollectKeysService
- def initialize(project, current_user)
- @project = project
- @current_user = current_user
- end
-
- def execute
- return [] unless current_user && project && user_can_read_project
-
- project.deploy_keys_projects
- .with_deploy_keys
- .with_write_access
- .map(&:deploy_key)
- end
-
- private
-
- def user_can_read_project
- Ability.allowed?(current_user, :read_project, project)
- end
-
- attr_reader :project, :current_user
- end
-end
diff --git a/app/services/design_management/copy_design_collection/copy_service.rb b/app/services/design_management/copy_design_collection/copy_service.rb
index 5099c2c5704..c0b32e1e9ae 100644
--- a/app/services/design_management/copy_design_collection/copy_service.rb
+++ b/app/services/design_management/copy_design_collection/copy_service.rb
@@ -172,20 +172,26 @@ module DesignManagement
def copy_designs!
design_attributes = attributes_config[:design_attributes]
- new_rows = designs.map do |design|
- design.attributes.slice(*design_attributes).merge(
- issue_id: target_issue.id,
- project_id: target_project.id
+ ::DesignManagement::Design.with_project_iid_supply(target_project) do |supply|
+ new_rows = designs.each_with_index.map do |design, i|
+ design.attributes.slice(*design_attributes).merge(
+ issue_id: target_issue.id,
+ project_id: target_project.id,
+ iid: supply.next_value
+ )
+ end
+
+ # TODO Replace `Gitlab::Database.bulk_insert` with `BulkInsertSafe`
+ # once https://gitlab.com/gitlab-org/gitlab/-/issues/247718 is fixed.
+ # When this is fixed, we can remove the call to
+ # `with_project_iid_supply` above, since the objects will be instantiated
+ # and callbacks (including `ensure_project_iid!`) will fire.
+ ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ DesignManagement::Design.table_name,
+ new_rows,
+ return_ids: true
)
end
-
- # TODO Replace `Gitlab::Database.bulk_insert` with `BulkInsertSafe`
- # once https://gitlab.com/gitlab-org/gitlab/-/issues/247718 is fixed.
- ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
- DesignManagement::Design.table_name,
- new_rows,
- return_ids: true
- )
end
def copy_versions!
diff --git a/app/services/discussions/capture_diff_note_position_service.rb b/app/services/discussions/capture_diff_note_position_service.rb
index 87aa27e455f..a95e149a6c9 100644
--- a/app/services/discussions/capture_diff_note_position_service.rb
+++ b/app/services/discussions/capture_diff_note_position_service.rb
@@ -9,8 +9,7 @@ module Discussions
def execute(discussion)
# The service has been implemented for text only
- # The impact of image notes on this service is being investigated in
- # https://gitlab.com/gitlab-org/gitlab/-/issues/213989
+ # We don't need to capture positions for images
return unless discussion.on_text?
result = tracer&.trace(discussion.position)
diff --git a/app/services/feature_flags/update_service.rb b/app/services/feature_flags/update_service.rb
index c837e50b104..ed5e2e794b4 100644
--- a/app/services/feature_flags/update_service.rb
+++ b/app/services/feature_flags/update_service.rb
@@ -22,6 +22,10 @@ module FeatureFlags
audit_event = audit_event(feature_flag)
+ if feature_flag.active_changed?
+ feature_flag.execute_hooks(current_user)
+ end
+
if feature_flag.save
save_audit_event(audit_event)
diff --git a/app/services/git/branch_hooks_service.rb b/app/services/git/branch_hooks_service.rb
index 93a0d139001..d00ca83441a 100644
--- a/app/services/git/branch_hooks_service.rb
+++ b/app/services/git/branch_hooks_service.rb
@@ -84,7 +84,6 @@ module Git
end
def enqueue_metrics_dashboard_sync
- return unless Feature.enabled?(:sync_metrics_dashboards, project)
return unless default_branch?
::Metrics::Dashboard::SyncDashboardsWorker.perform_async(project.id)
diff --git a/app/services/groups/create_service.rb b/app/services/groups/create_service.rb
index cf843d92862..016c31cbccc 100644
--- a/app/services/groups/create_service.rb
+++ b/app/services/groups/create_service.rb
@@ -34,7 +34,7 @@ module Groups
if @group.save
@group.add_owner(current_user)
@group.create_namespace_settings
- Service.create_from_active_default_integrations(@group, :group_id) if Feature.enabled?(:group_level_integrations)
+ Service.create_from_active_default_integrations(@group, :group_id) if Feature.enabled?(:group_level_integrations, default_enabled: true)
end
end
diff --git a/app/services/import/github_service.rb b/app/services/import/github_service.rb
index a2923b1e4f9..948dba2d206 100644
--- a/app/services/import/github_service.rb
+++ b/app/services/import/github_service.rb
@@ -6,6 +6,10 @@ module Import
attr_reader :params, :current_user
def execute(access_params, provider)
+ if blocked_url?
+ return log_and_return_error("Invalid URL: #{url}", :bad_request)
+ end
+
unless authorized?
return error(_('This namespace has already been taken! Please choose another one.'), :unprocessable_entity)
end
@@ -56,6 +60,25 @@ module Import
can?(current_user, :create_projects, target_namespace)
end
+ def url
+ @url ||= params[:github_hostname]
+ end
+
+ def allow_local_requests?
+ Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
+ end
+
+ def blocked_url?
+ Gitlab::UrlBlocker.blocked_url?(
+ url,
+ {
+ allow_localhost: allow_local_requests?,
+ allow_local_network: allow_local_requests?,
+ schemes: %w(http https)
+ }
+ )
+ end
+
private
def log_error(exception)
diff --git a/app/services/integrations/test/project_service.rb b/app/services/integrations/test/project_service.rb
index 941d70c2cc4..39471d373f9 100644
--- a/app/services/integrations/test/project_service.rb
+++ b/app/services/integrations/test/project_service.rb
@@ -35,6 +35,8 @@ module Integrations
wiki_page_events_data
when 'deployment'
deployment_events_data
+ when 'release'
+ releases_events_data
else
push_events_data
end
diff --git a/app/services/issuable/common_system_notes_service.rb b/app/services/issuable/common_system_notes_service.rb
index fbc72dc867a..fd2dc3787c2 100644
--- a/app/services/issuable/common_system_notes_service.rb
+++ b/app/services/issuable/common_system_notes_service.rb
@@ -51,11 +51,11 @@ module Issuable
end
end
- def create_wip_note(old_title)
+ def create_draft_note(old_title)
return unless issuable.is_a?(MergeRequest)
if MergeRequest.work_in_progress?(old_title) != issuable.work_in_progress?
- SystemNoteService.handle_merge_request_wip(issuable, issuable.project, current_user)
+ SystemNoteService.handle_merge_request_draft(issuable, issuable.project, current_user)
end
end
@@ -69,7 +69,7 @@ module Issuable
end
def create_title_change_note(old_title)
- create_wip_note(old_title)
+ create_draft_note(old_title)
if issuable.wipless_title_changed(old_title)
SystemNoteService.change_title(issuable, issuable.project, current_user, old_title)
diff --git a/app/services/issuable/import_csv/base_service.rb b/app/services/issuable/import_csv/base_service.rb
new file mode 100644
index 00000000000..bf5f643a51b
--- /dev/null
+++ b/app/services/issuable/import_csv/base_service.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+module Issuable
+ module ImportCsv
+ class BaseService
+ def initialize(user, project, csv_io)
+ @user = user
+ @project = project
+ @csv_io = csv_io
+ @results = { success: 0, error_lines: [], parse_error: false }
+ end
+
+ def execute
+ process_csv
+ email_results_to_user
+
+ @results
+ end
+
+ private
+
+ def process_csv
+ with_csv_lines.each do |row, line_no|
+ issuable_attributes = {
+ title: row[:title],
+ description: row[:description]
+ }
+
+ if create_issuable(issuable_attributes).persisted?
+ @results[:success] += 1
+ else
+ @results[:error_lines].push(line_no)
+ end
+ end
+ rescue ArgumentError, CSV::MalformedCSVError
+ @results[:parse_error] = true
+ end
+
+ def with_csv_lines
+ csv_data = @csv_io.open(&:read).force_encoding(Encoding::UTF_8)
+ verify_headers!(csv_data)
+
+ csv_parsing_params = {
+ col_sep: detect_col_sep(csv_data.lines.first),
+ headers: true,
+ header_converters: :symbol
+ }
+
+ CSV.new(csv_data, csv_parsing_params).each.with_index(2)
+ end
+
+ def verify_headers!(data)
+ headers = data.lines.first.downcase
+ return if headers.include?('title') && headers.include?('description')
+
+ raise CSV::MalformedCSVError
+ end
+
+ def detect_col_sep(header)
+ if header.include?(",")
+ ","
+ elsif header.include?(";")
+ ";"
+ elsif header.include?("\t")
+ "\t"
+ else
+ raise CSV::MalformedCSVError
+ end
+ end
+
+ def create_issuable(attributes)
+ create_issuable_class.new(@project, @user, attributes).execute
+ end
+
+ def email_results_to_user
+ # defined in ImportCsvService
+ end
+
+ def create_issuable_class
+ # defined in ImportCsvService
+ end
+ end
+ end
+end
diff --git a/app/services/issues/import_csv_service.rb b/app/services/issues/import_csv_service.rb
index 60790ba3547..bce3ecc8bef 100644
--- a/app/services/issues/import_csv_service.rb
+++ b/app/services/issues/import_csv_service.rb
@@ -1,64 +1,25 @@
# frozen_string_literal: true
module Issues
- class ImportCsvService
- def initialize(user, project, csv_io)
- @user = user
- @project = project
- @csv_io = csv_io
- @results = { success: 0, error_lines: [], parse_error: false }
- end
-
+ class ImportCsvService < Issuable::ImportCsv::BaseService
def execute
- process_csv
- email_results_to_user
-
- @results
- end
-
- private
-
- def process_csv
- csv_data = @csv_io.open(&:read).force_encoding(Encoding::UTF_8)
+ record_import_attempt
- csv_parsing_params = {
- col_sep: detect_col_sep(csv_data.lines.first),
- headers: true,
- header_converters: :symbol
- }
-
- CSV.new(csv_data, csv_parsing_params).each.with_index(2) do |row, line_no|
- issue_attributes = {
- title: row[:title],
- description: row[:description]
- }
-
- issue = Issues::CreateService.new(@project, @user, issue_attributes).execute
-
- if issue.persisted?
- @results[:success] += 1
- else
- @results[:error_lines].push(line_no)
- end
- end
- rescue ArgumentError, CSV::MalformedCSVError
- @results[:parse_error] = true
+ super
end
def email_results_to_user
Notify.import_issues_csv_email(@user.id, @project.id, @results).deliver_later
end
- def detect_col_sep(header)
- if header.include?(",")
- ","
- elsif header.include?(";")
- ";"
- elsif header.include?("\t")
- "\t"
- else
- raise CSV::MalformedCSVError
- end
+ private
+
+ def create_issuable_class
+ Issues::CreateService
+ end
+
+ def record_import_attempt
+ Issues::CsvImport.create!(user: @user, project: @project)
end
end
end
diff --git a/app/services/issues/reopen_service.rb b/app/services/issues/reopen_service.rb
index 12dbff57ec5..e2b1b5400c7 100644
--- a/app/services/issues/reopen_service.rb
+++ b/app/services/issues/reopen_service.rb
@@ -5,8 +5,6 @@ module Issues
def execute(issue)
return issue unless can?(current_user, :reopen_issue, issue)
- before_reopen(issue)
-
if issue.reopen
event_service.reopen_issue(issue, current_user)
create_note(issue, 'reopened')
@@ -23,14 +21,8 @@ module Issues
private
- def before_reopen(issue)
- # Overriden in EE
- end
-
def create_note(issue, state = issue.state)
SystemNoteService.change_status(issue, issue.project, current_user, state, nil)
end
end
end
-
-Issues::ReopenService.prepend_if_ee('EE::Issues::ReopenService')
diff --git a/app/services/jira_connect/sync_service.rb b/app/services/jira_connect/sync_service.rb
index 07a648bb8c9..f8855fb6deb 100644
--- a/app/services/jira_connect/sync_service.rb
+++ b/app/services/jira_connect/sync_service.rb
@@ -6,11 +6,11 @@ module JiraConnect
self.project = project
end
- def execute(commits: nil, branches: nil, merge_requests: nil)
+ def execute(commits: nil, branches: nil, merge_requests: nil, update_sequence_id: nil)
JiraConnectInstallation.for_project(project).each do |installation|
client = Atlassian::JiraConnect::Client.new(installation.base_url, installation.shared_secret)
- response = client.store_dev_info(project: project, commits: commits, branches: branches, merge_requests: merge_requests)
+ response = client.store_dev_info(project: project, commits: commits, branches: branches, merge_requests: merge_requests, update_sequence_id: update_sequence_id)
log_response(response)
end
diff --git a/app/services/jira_connect_subscriptions/create_service.rb b/app/services/jira_connect_subscriptions/create_service.rb
index 8e794d3acf7..b169d97615d 100644
--- a/app/services/jira_connect_subscriptions/create_service.rb
+++ b/app/services/jira_connect_subscriptions/create_service.rb
@@ -3,6 +3,8 @@
module JiraConnectSubscriptions
class CreateService < ::JiraConnectSubscriptions::BaseService
include Gitlab::Utils::StrongMemoize
+ MERGE_REQUEST_SYNC_BATCH_SIZE = 20
+ MERGE_REQUEST_SYNC_BATCH_DELAY = 1.minute.freeze
def execute
unless namespace && can?(current_user, :create_jira_connect_subscription, namespace)
@@ -18,6 +20,8 @@ module JiraConnectSubscriptions
subscription = JiraConnectSubscription.new(installation: jira_connect_installation, namespace: namespace)
if subscription.save
+ schedule_sync_project_jobs
+
success
else
error(subscription.errors.full_messages.join(', '), 422)
@@ -29,5 +33,18 @@ module JiraConnectSubscriptions
Namespace.find_by_full_path(params[:namespace_path])
end
end
+
+ def schedule_sync_project_jobs
+ return unless Feature.enabled?(:jira_connect_full_namespace_sync)
+
+ namespace.all_projects.each_batch(of: MERGE_REQUEST_SYNC_BATCH_SIZE) do |projects, index|
+ JiraConnect::SyncProjectWorker.bulk_perform_in_with_contexts(
+ index * MERGE_REQUEST_SYNC_BATCH_DELAY,
+ projects,
+ arguments_proc: -> (project) { [project.id, Atlassian::JiraConnect::Client.generate_update_sequence_id] },
+ context_proc: -> (project) { { project: project } }
+ )
+ end
+ end
end
end
diff --git a/app/services/jira_import/users_importer.rb b/app/services/jira_import/users_importer.rb
index 9babd468d56..438a74343a5 100644
--- a/app/services/jira_import/users_importer.rb
+++ b/app/services/jira_import/users_importer.rb
@@ -2,8 +2,6 @@
module JiraImport
class UsersImporter
- attr_reader :user, :project, :start_at
-
def initialize(user, project, start_at)
@project = project
@start_at = start_at
@@ -23,6 +21,8 @@ module JiraImport
private
+ attr_reader :user, :project, :start_at
+
def mapped_users
users_mapper_service.execute
end
@@ -44,9 +44,9 @@ module JiraImport
# TODO: use deployment_type enum from jira service when https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37003 is merged
case deployment_type.upcase
when JiraService::DEPLOYMENT_TYPES[:server]
- ServerUsersMapperService.new(project.jira_service, start_at)
+ ServerUsersMapperService.new(user, project, start_at)
when JiraService::DEPLOYMENT_TYPES[:cloud]
- CloudUsersMapperService.new(project.jira_service, start_at)
+ CloudUsersMapperService.new(user, project, start_at)
else
raise ArgumentError
end
diff --git a/app/services/jira_import/users_mapper_service.rb b/app/services/jira_import/users_mapper_service.rb
index 480c034f952..6c8610bfbf3 100644
--- a/app/services/jira_import/users_mapper_service.rb
+++ b/app/services/jira_import/users_mapper_service.rb
@@ -2,30 +2,37 @@
module JiraImport
class UsersMapperService
+ include Gitlab::Utils::StrongMemoize
+
# MAX_USERS must match the pageSize value in app/assets/javascripts/jira_import/utils/constants.js
MAX_USERS = 50
- attr_reader :jira_service, :start_at
-
- def initialize(jira_service, start_at)
- @jira_service = jira_service
+ # The class is called from UsersImporter and small batches of users are expected
+ # In case the mapping of a big batch of users is expected to be passed here
+ # the implementation needs to change here and handles the matching in batches
+ def initialize(current_user, project, start_at)
+ @current_user = current_user
+ @project = project
+ @jira_service = project.jira_service
@start_at = start_at
end
def execute
- users.to_a.map do |jira_user|
+ jira_users.to_a.map do |jira_user|
{
jira_account_id: jira_user_id(jira_user),
jira_display_name: jira_user_name(jira_user),
jira_email: jira_user['emailAddress']
- }.merge(match_user(jira_user))
+ }.merge(gitlab_id: find_gitlab_id(jira_user))
end
end
private
- def users
- @users ||= client.get(url)
+ attr_reader :current_user, :project, :jira_service, :start_at
+
+ def jira_users
+ @jira_users ||= client.get(url)
end
def client
@@ -44,10 +51,33 @@ module JiraImport
raise NotImplementedError
end
- # TODO: Matching user by email and displayName will be done as the part
- # of follow-up issue: https://gitlab.com/gitlab-org/gitlab/-/issues/219023
- def match_user(jira_user)
- { gitlab_id: nil, gitlab_username: nil, gitlab_name: nil }
+ def matched_users
+ strong_memoize(:matched_users) do
+ jira_emails = jira_users.map { |u| u['emailAddress']&.downcase }.compact
+ jira_names = jira_users.map { |u| jira_user_name(u)&.downcase }.compact
+
+ relations = []
+ relations << User.by_username(jira_names).select("users.id, users.name, users.username, users.email as user_email")
+ relations << User.by_name(jira_names).select("users.id, users.name, users.username, users.email as user_email")
+ relations << User.by_user_email(jira_emails).select("users.id, users.name, users.username, users.email as user_email")
+ relations << User.by_emails(jira_emails).select("users.id, users.name, users.username, emails.email as user_email")
+
+ User.from_union(relations).id_in(project_member_ids).select("users.id as user_id, users.name as name, users.username as username, user_email")
+ end
+ end
+
+ def find_gitlab_id(jira_user)
+ user = matched_users.find do |matched_user|
+ matched_user.user_email&.downcase == jira_user['emailAddress']&.downcase ||
+ matched_user.name&.downcase == jira_user_name(jira_user)&.downcase ||
+ matched_user.username&.downcase == jira_user_name(jira_user)&.downcase
+ end
+
+ user&.user_id
+ end
+
+ def project_member_ids
+ @project_member_ids ||= MembersFinder.new(project, current_user).execute.select(:user_id)
end
end
end
diff --git a/app/services/labels/promote_service.rb b/app/services/labels/promote_service.rb
index 9ed10f6a11b..fdf2cf13f92 100644
--- a/app/services/labels/promote_service.rb
+++ b/app/services/labels/promote_service.rb
@@ -10,81 +10,79 @@ module Labels
label.is_a?(ProjectLabel)
Label.transaction do
- new_label = clone_label_to_group_label(label)
+ # use the existing group label if it exists
+ group_label = find_or_create_group_label(label)
- label_ids_for_merge(new_label).find_in_batches(batch_size: BATCH_SIZE) do |batched_ids|
- update_old_label_relations(new_label, batched_ids)
+ label_ids_for_merge(group_label).find_in_batches(batch_size: BATCH_SIZE) do |batched_ids|
+ update_old_label_relations(group_label, batched_ids)
destroy_project_labels(batched_ids)
end
- # We skipped validations during creation. Let's run them now, after deleting conflicting labels
- raise ActiveRecord::RecordInvalid.new(new_label) unless new_label.valid?
-
- new_label
+ group_label
end
end
# rubocop: enable CodeReuse/ActiveRecord
private
- def update_old_label_relations(new_label, old_label_ids)
- update_issuables(new_label, old_label_ids)
- update_resource_label_events(new_label, old_label_ids)
- update_issue_board_lists(new_label, old_label_ids)
- update_priorities(new_label, old_label_ids)
- subscribe_users(new_label, old_label_ids)
+ def update_old_label_relations(group_label, old_label_ids)
+ update_issuables(group_label, old_label_ids)
+ update_resource_label_events(group_label, old_label_ids)
+ update_issue_board_lists(group_label, old_label_ids)
+ update_priorities(group_label, old_label_ids)
+ subscribe_users(group_label, old_label_ids)
end
# rubocop: disable CodeReuse/ActiveRecord
- def subscribe_users(new_label, label_ids)
+ def subscribe_users(group_label, label_ids)
# users can be subscribed to multiple labels that will be merged into the group one
# we want to keep only one subscription / user
ids_to_update = Subscription.where(subscribable_id: label_ids, subscribable_type: 'Label')
.group(:user_id)
.pluck('MAX(id)')
- Subscription.where(id: ids_to_update).update_all(subscribable_id: new_label.id)
+ Subscription.where(id: ids_to_update).update_all(subscribable_id: group_label.id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
- def label_ids_for_merge(new_label)
+ def label_ids_for_merge(group_label)
LabelsFinder
- .new(current_user, title: new_label.title, group_id: project.group.id)
+ .new(current_user, title: group_label.title, group_id: project.group.id)
.execute(skip_authorization: true)
- .where.not(id: new_label)
+ .where.not(id: group_label)
.select(:id) # Can't use pluck() to avoid object-creation because of the batching
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
- def update_issuables(new_label, label_ids)
+ def update_issuables(group_label, label_ids)
LabelLink
.where(label: label_ids)
- .update_all(label_id: new_label.id)
+ .update_all(label_id: group_label.id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
- def update_resource_label_events(new_label, label_ids)
+ def update_resource_label_events(group_label, label_ids)
ResourceLabelEvent
.where(label: label_ids)
- .update_all(label_id: new_label.id)
+ .update_all(label_id: group_label.id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
- def update_issue_board_lists(new_label, label_ids)
+ def update_issue_board_lists(group_label, label_ids)
List
.where(label: label_ids)
- .update_all(label_id: new_label.id)
+ .update_all(label_id: group_label.id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
- def update_priorities(new_label, label_ids)
+ def update_priorities(group_label, label_ids)
LabelPriority
.where(label: label_ids)
- .update_all(label_id: new_label.id)
+ .update_all(label_id: group_label.id)
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -92,18 +90,12 @@ module Labels
def destroy_project_labels(label_ids)
Label.where(id: label_ids).destroy_all # rubocop: disable Cop/DestroyAll
end
- # rubocop: enable CodeReuse/ActiveRecord
- def clone_label_to_group_label(label)
+ def find_or_create_group_label(label)
params = label.attributes.slice('title', 'description', 'color')
- # Since the title of the new label has to be the same as the previous labels
- # and we're merging old labels in batches we'll skip validation to omit 2-step
- # merge process and do it in one batch
- # We'll be forcing validation at the end of the transaction to ensure everything
- # was merged correctly
- new_label = GroupLabel.new(params.merge(group: project.group))
- new_label.save(validate: false)
+ new_label = GroupLabel.create_with(params).find_or_initialize_by(group_id: project.group.id, title: label.title)
+ new_label.save! unless new_label.persisted?
new_label
end
end
diff --git a/app/services/members/invite_service.rb b/app/services/members/invite_service.rb
new file mode 100644
index 00000000000..cfab5c3ef9d
--- /dev/null
+++ b/app/services/members/invite_service.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+module Members
+ class InviteService < Members::BaseService
+ DEFAULT_LIMIT = 100
+
+ attr_reader :errors
+
+ def initialize(current_user, params)
+ @current_user, @params = current_user, params.dup
+ @errors = {}
+ end
+
+ def execute(source)
+ return error(s_('Email cannot be blank')) if params[:email].blank?
+
+ emails = params[:email].split(',').uniq.flatten
+ return error(s_("Too many users specified (limit is %{user_limit})") % { user_limit: user_limit }) if
+ user_limit && emails.size > user_limit
+
+ emails.each do |email|
+ next if existing_member?(source, email)
+
+ next if existing_invite?(source, email)
+
+ if existing_user?(email)
+ add_existing_user_as_member(current_user, source, params, email)
+ next
+ end
+
+ invite_new_member_and_user(current_user, source, params, email)
+ end
+
+ return success unless errors.any?
+
+ error(errors)
+ end
+
+ private
+
+ def invite_new_member_and_user(current_user, source, params, email)
+ new_member = (source.class.name + 'Member').constantize.create(source_id: source.id,
+ user_id: nil,
+ access_level: params[:access_level],
+ invite_email: email,
+ created_by_id: current_user.id,
+ expires_at: params[:expires_at],
+ requested_at: Time.current.utc)
+
+ unless new_member.valid? && new_member.persisted?
+ errors[params[:email]] = new_member.errors.full_messages.to_sentence
+ end
+ end
+
+ def add_existing_user_as_member(current_user, source, params, email)
+ new_member = create_member(current_user, existing_user(email), source, params.merge({ invite_email: email }))
+
+ unless new_member.valid? && new_member.persisted?
+ errors[email] = new_member.errors.full_messages.to_sentence
+ end
+ end
+
+ def create_member(current_user, user, source, params)
+ source.add_user(user, params[:access_level], current_user: current_user, expires_at: params[:expires_at])
+ end
+
+ def user_limit
+ limit = params.fetch(:limit, DEFAULT_LIMIT)
+
+ limit && limit < 0 ? nil : limit
+ end
+
+ def existing_member?(source, email)
+ existing_member = source.members.with_user_by_email(email).exists?
+
+ if existing_member
+ errors[email] = "Already a member of #{source.name}"
+ return true
+ end
+
+ false
+ end
+
+ def existing_invite?(source, email)
+ existing_invite = source.members.search_invite_email(email).exists?
+
+ if existing_invite
+ errors[email] = "Member already invited to #{source.name}"
+ return true
+ end
+
+ false
+ end
+
+ def existing_user(email)
+ User.find_by_email(email)
+ end
+
+ def existing_user?(email)
+ existing_user(email).present?
+ end
+ end
+end
diff --git a/app/services/merge_requests/cleanup_refs_service.rb b/app/services/merge_requests/cleanup_refs_service.rb
index d003124a112..23ac8e393f4 100644
--- a/app/services/merge_requests/cleanup_refs_service.rb
+++ b/app/services/merge_requests/cleanup_refs_service.rb
@@ -9,7 +9,7 @@ module MergeRequests
attr_reader :merge_request
def self.schedule(merge_request)
- MergeRequestCleanupRefsWorker.perform_in(TIME_THRESHOLD, merge_request.id)
+ merge_request.create_cleanup_schedule(scheduled_at: TIME_THRESHOLD.from_now)
end
def initialize(merge_request)
@@ -22,6 +22,7 @@ module MergeRequests
end
def execute
+ return error("Merge request is not scheduled to be cleaned up yet.") unless scheduled?
return error("Merge request has not been closed nor merged for #{TIME_THRESHOLD.inspect}.") unless eligible?
# Ensure that commit shas of refs are kept around so we won't lose them when GC runs.
@@ -30,7 +31,10 @@ module MergeRequests
return error('Failed to create keep around refs.') unless kept_around?
return error('Failed to cache merge ref sha.') unless cache_merge_ref_sha
- delete_refs
+ delete_refs if repository.exists?
+
+ return error('Failed to update schedule.') unless update_schedule
+
success
end
@@ -38,6 +42,10 @@ module MergeRequests
attr_reader :repository, :ref_path, :merge_ref_path, :ref_head_sha, :merge_ref_sha
+ def scheduled?
+ merge_request.cleanup_schedule.present? && merge_request.cleanup_schedule.scheduled_at <= Time.current
+ end
+
def eligible?
return met_time_threshold?(merge_request.metrics&.latest_closed_at) if merge_request.closed?
@@ -71,5 +79,9 @@ module MergeRequests
def delete_refs
repository.delete_refs(ref_path, merge_ref_path)
end
+
+ def update_schedule
+ merge_request.cleanup_schedule.update(completed_at: Time.current)
+ end
end
end
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index e5d0b216d6c..ed977a5a872 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -42,7 +42,7 @@ module MergeRequests
end
notify_about_push(mr)
- mark_mr_as_wip_from_commits(mr)
+ mark_mr_as_draft_from_commits(mr)
execute_mr_web_hooks(mr)
end
@@ -246,7 +246,7 @@ module MergeRequests
notification_service.push_to_merge_request(merge_request, @current_user, new_commits: new_commits, existing_commits: existing_commits)
end
- def mark_mr_as_wip_from_commits(merge_request)
+ def mark_mr_as_draft_from_commits(merge_request)
return unless @commits.present?
commit_shas = merge_request.commit_shas
@@ -257,7 +257,7 @@ module MergeRequests
if wip_commit && !merge_request.work_in_progress?
merge_request.update(title: merge_request.wip_title)
- SystemNoteService.add_merge_request_wip_from_commit(
+ SystemNoteService.add_merge_request_draft_from_commit(
merge_request,
merge_request.project,
@current_user,
diff --git a/app/services/merge_requests/reopen_service.rb b/app/services/merge_requests/reopen_service.rb
index f87005bcb6c..bcedbc61c65 100644
--- a/app/services/merge_requests/reopen_service.rb
+++ b/app/services/merge_requests/reopen_service.rb
@@ -15,6 +15,8 @@ module MergeRequests
invalidate_cache_counts(merge_request, users: merge_request.assignees)
merge_request.update_project_counter_caches
merge_request.cache_merge_request_closes_issues!(current_user)
+ merge_request.cleanup_schedule&.destroy
+ merge_request.update_column(:merge_ref_sha, nil)
end
merge_request
diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb
index 48f44affb23..b2826b5c905 100644
--- a/app/services/notes/create_service.rb
+++ b/app/services/notes/create_service.rb
@@ -73,6 +73,8 @@ module Notes
if note.for_merge_request? && note.diff_note? && note.start_of_discussion?
Discussions::CaptureDiffNotePositionService.new(note.noteable, note.diff_file&.paths).execute(note.discussion)
end
+
+ track_note_creation_usage_for_issues(note) if note.for_issue?
end
def do_commands(note, update_params, message, only_commands)
@@ -113,5 +115,9 @@ module Notes
track_usage_event(:incident_management_incident_comment, user.id)
end
+
+ def track_note_creation_usage_for_issues(note)
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_comment_added_action(author: note.author)
+ end
end
end
diff --git a/app/services/notes/destroy_service.rb b/app/services/notes/destroy_service.rb
index ee8a680fcb4..2b6ec47eaef 100644
--- a/app/services/notes/destroy_service.rb
+++ b/app/services/notes/destroy_service.rb
@@ -8,6 +8,13 @@ module Notes
end
clear_noteable_diffs_cache(note)
+ track_note_removal_usage_for_issues(note) if note.for_issue?
+ end
+
+ private
+
+ def track_note_removal_usage_for_issues(note)
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_comment_removed_action(author: note.author)
end
end
end
diff --git a/app/services/notes/update_service.rb b/app/services/notes/update_service.rb
index 193d3080078..37872f7fbdb 100644
--- a/app/services/notes/update_service.rb
+++ b/app/services/notes/update_service.rb
@@ -14,6 +14,8 @@ module Notes
note.save
end
+ track_note_edit_usage_for_issues(note) if note.for_issue?
+
only_commands = false
quick_actions_service = QuickActionsService.new(project, current_user)
@@ -89,6 +91,10 @@ module Notes
Note.id_in(note.discussion.notes.map(&:id)).update_all(confidential: params[:confidential])
end
+
+ def track_note_edit_usage_for_issues(note)
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_comment_edited_action(author: note.author)
+ end
end
end
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index 7853ad11c64..85113d3ca22 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -370,6 +370,16 @@ class NotificationService
end
end
+ def new_instance_access_request(user)
+ recipients = User.instance_access_request_approvers_to_be_notified # https://gitlab.com/gitlab-org/gitlab/-/issues/277016 will change this
+
+ return true if recipients.empty?
+
+ recipients.each do |recipient|
+ mailer.instance_access_request_email(user, recipient).deliver_later
+ end
+ end
+
# Members
def new_access_request(member)
return true unless member.notifiable?(:subscription)
@@ -601,7 +611,7 @@ class NotificationService
return if project.emails_disabled?
owners_and_maintainers_without_invites(project).to_a.product(alerts).each do |recipient, alert|
- mailer.prometheus_alert_fired_email(project.id, recipient.user.id, alert).deliver_later
+ mailer.prometheus_alert_fired_email(project, recipient.user, alert).deliver_later
end
end
diff --git a/app/services/packages/composer/create_package_service.rb b/app/services/packages/composer/create_package_service.rb
index 7e16fc78599..2d2f1568187 100644
--- a/app/services/packages/composer/create_package_service.rb
+++ b/app/services/packages/composer/create_package_service.rb
@@ -10,11 +10,11 @@ module Packages
composer_json
::Packages::Package.transaction do
- ::Packages::Composer::Metadatum.upsert(
+ ::Packages::Composer::Metadatum.upsert({
package_id: created_package.id,
target_sha: target,
composer_json: composer_json
- )
+ })
end
end
diff --git a/app/services/packages/composer/version_parser_service.rb b/app/services/packages/composer/version_parser_service.rb
index 76dfd7a14bd..811cac0b3b7 100644
--- a/app/services/packages/composer/version_parser_service.rb
+++ b/app/services/packages/composer/version_parser_service.rb
@@ -9,7 +9,7 @@ module Packages
def execute
if @tag_name.present?
- @tag_name.match(Gitlab::Regex.composer_package_version_regex).captures[0]
+ @tag_name.delete_prefix('v')
elsif @branch_name.present?
branch_sufix_or_prefix(@branch_name.match(Gitlab::Regex.composer_package_version_regex))
end
diff --git a/app/services/packages/create_event_service.rb b/app/services/packages/create_event_service.rb
index d009cba2812..8350ff993bf 100644
--- a/app/services/packages/create_event_service.rb
+++ b/app/services/packages/create_event_service.rb
@@ -3,18 +3,30 @@
module Packages
class CreateEventService < BaseService
def execute
- event_scope = scope.is_a?(::Packages::Package) ? scope.package_type : scope
-
- ::Packages::Event.create!(
- event_type: event_name,
- originator: current_user&.id,
- originator_type: originator_type,
- event_scope: event_scope
- )
+ if Feature.enabled?(:collect_package_events_redis) && redis_event_name
+ ::Gitlab::UsageDataCounters::HLLRedisCounter.track_event(current_user.id, redis_event_name)
+ end
+
+ if Feature.enabled?(:collect_package_events)
+ ::Packages::Event.create!(
+ event_type: event_name,
+ originator: current_user&.id,
+ originator_type: originator_type,
+ event_scope: event_scope
+ )
+ end
end
private
+ def redis_event_name
+ @redis_event_name ||= ::Packages::Event.allowed_event_name(event_scope, event_name, originator_type)
+ end
+
+ def event_scope
+ @event_scope ||= scope.is_a?(::Packages::Package) ? scope.package_type : scope
+ end
+
def scope
params[:scope]
end
diff --git a/app/services/packages/create_package_file_service.rb b/app/services/packages/create_package_file_service.rb
index 0ebceeee779..5723b0b4717 100644
--- a/app/services/packages/create_package_file_service.rb
+++ b/app/services/packages/create_package_file_service.rb
@@ -9,7 +9,7 @@ module Packages
end
def execute
- package.package_files.create!(
+ package_file = package.package_files.build(
file: params[:file],
size: params[:size],
file_name: params[:file_name],
@@ -17,6 +17,13 @@ module Packages
file_sha256: params[:file_sha256],
file_md5: params[:file_md5]
)
+
+ if params[:build].present?
+ package_file.package_file_build_infos << package_file.package_file_build_infos.build(pipeline: params[:build].pipeline)
+ end
+
+ package_file.save!
+ package_file
end
end
end
diff --git a/app/services/packages/debian/extract_deb_metadata_service.rb b/app/services/packages/debian/extract_deb_metadata_service.rb
new file mode 100644
index 00000000000..eb106f4cd30
--- /dev/null
+++ b/app/services/packages/debian/extract_deb_metadata_service.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Packages
+ module Debian
+ # Returns .deb file metadata
+ class ExtractDebMetadataService
+ CommandFailedError = Class.new(StandardError)
+
+ def initialize(file_path)
+ @file_path = file_path
+ end
+
+ def execute
+ unless success?
+ raise CommandFailedError, "The `#{cmd}` command failed (status: #{result.status}) with the following error:\n#{result.stderr}"
+ end
+
+ sections = ParseDebian822Service.new(result.stdout).execute
+
+ sections.each_value.first
+ end
+
+ private
+
+ def cmd
+ @cmd ||= begin
+ dpkg_deb_path = Gitlab.config.packages.dpkg_deb_path
+ [dpkg_deb_path, '--field', @file_path]
+ end
+ end
+
+ def result
+ @result ||= Gitlab::Popen.popen_with_detail(cmd)
+ end
+
+ def success?
+ result.status&.exitstatus == 0
+ end
+ end
+ end
+end
diff --git a/app/services/packages/debian/parse_debian822_service.rb b/app/services/packages/debian/parse_debian822_service.rb
new file mode 100644
index 00000000000..665929d2324
--- /dev/null
+++ b/app/services/packages/debian/parse_debian822_service.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+module Packages
+ module Debian
+ # Parse String as Debian RFC822 control data format
+ # https://manpages.debian.org/unstable/dpkg-dev/deb822.5
+ class ParseDebian822Service
+ InvalidDebian822Error = Class.new(StandardError)
+
+ def initialize(input)
+ @input = input
+ end
+
+ def execute
+ output = {}
+ @input.each_line('', chomp: true) do |block|
+ section = {}
+ section_name, field = nil
+ block.each_line(chomp: true) do |line|
+ next if comment_line?(line)
+
+ if continuation_line?(line)
+ raise InvalidDebian822Error, "Parse error. Unexpected continuation line" if field.nil?
+
+ section[field] += "\n"
+ section[field] += line[1..] unless paragraph_separator?(line)
+ elsif match = match_section_line(line)
+ section_name = match[:name] if section_name.nil?
+ field = match[:field].to_sym
+
+ raise InvalidDebian822Error, "Duplicate field '#{field}' in section '#{section_name}'" if section.include?(field)
+
+ section[field] = match[:value]
+ else
+ raise InvalidDebian822Error, "Parse error on line #{line}"
+ end
+ end
+
+ raise InvalidDebian822Error, "Duplicate section '#{section_name}'" if output[section_name]
+
+ output[section_name] = section
+ end
+
+ output
+ end
+
+ private
+
+ def comment_line?(line)
+ line.match?(/^#/)
+ end
+
+ def continuation_line?(line)
+ line.match?(/^ /)
+ end
+
+ def paragraph_separator?(line)
+ line == ' .'
+ end
+
+ def match_section_line(line)
+ line.match(/(?<name>(?<field>^\S+):\s*(?<value>.*))/)
+ end
+ end
+ end
+end
diff --git a/app/services/packages/generic/create_package_file_service.rb b/app/services/packages/generic/create_package_file_service.rb
index 4d49c63799f..f25e8b0ae56 100644
--- a/app/services/packages/generic/create_package_file_service.rb
+++ b/app/services/packages/generic/create_package_file_service.rb
@@ -28,7 +28,8 @@ module Packages
file: params[:file],
size: params[:file].size,
file_sha256: params[:file].sha256,
- file_name: params[:file_name]
+ file_name: params[:file_name],
+ build: params[:build]
}
::Packages::CreatePackageFileService.new(package, file_params).execute
diff --git a/app/services/packages/generic/find_or_create_package_service.rb b/app/services/packages/generic/find_or_create_package_service.rb
index 8a8459d167e..97f774a836b 100644
--- a/app/services/packages/generic/find_or_create_package_service.rb
+++ b/app/services/packages/generic/find_or_create_package_service.rb
@@ -6,7 +6,7 @@ module Packages
def execute
find_or_create_package!(::Packages::Package.package_types['generic']) do |package|
if params[:build].present?
- package.build_info = Packages::BuildInfo.new(pipeline: params[:build].pipeline)
+ package.build_infos.new(pipeline: params[:build].pipeline)
end
end
end
diff --git a/app/services/packages/maven/create_package_service.rb b/app/services/packages/maven/create_package_service.rb
index 3df17021499..540c7b1d4da 100644
--- a/app/services/packages/maven/create_package_service.rb
+++ b/app/services/packages/maven/create_package_service.rb
@@ -6,7 +6,7 @@ module Packages
app_group, _, app_name = params[:name].rpartition('/')
app_group.tr!('/', '.')
- package = create_package!(:maven,
+ create_package!(:maven,
maven_metadatum_attributes: {
path: params[:path],
app_group: app_group,
@@ -14,11 +14,6 @@ module Packages
app_version: params[:version]
}
)
-
- build = params[:build]
- package.create_build_info!(pipeline: build.pipeline) if build.present?
-
- package
end
end
end
diff --git a/app/services/packages/maven/find_or_create_package_service.rb b/app/services/packages/maven/find_or_create_package_service.rb
index 505f45a7b21..a2a61ff8d93 100644
--- a/app/services/packages/maven/find_or_create_package_service.rb
+++ b/app/services/packages/maven/find_or_create_package_service.rb
@@ -38,8 +38,7 @@ module Packages
package_params = {
name: package_name,
path: params[:path],
- version: version,
- build: params[:build]
+ version: version
}
package =
@@ -47,6 +46,8 @@ module Packages
.execute
end
+ package.build_infos.create!(pipeline: params[:build].pipeline) if params[:build].present?
+
package
end
end
diff --git a/app/services/packages/npm/create_package_service.rb b/app/services/packages/npm/create_package_service.rb
index 7f868b71734..c4b75348bba 100644
--- a/app/services/packages/npm/create_package_service.rb
+++ b/app/services/packages/npm/create_package_service.rb
@@ -18,7 +18,7 @@ module Packages
package = create_package!(:npm, name: name, version: version)
if build.present?
- package.create_build_info!(pipeline: build.pipeline)
+ package.build_infos.create!(pipeline: build.pipeline)
end
::Packages::CreatePackageFileService.new(package, file_params).execute
@@ -75,7 +75,8 @@ module Packages
file: CarrierWaveStringFile.new(Base64.decode64(attachment['data'])),
size: attachment['length'],
file_sha1: version_data[:dist][:shasum],
- file_name: package_file_name
+ file_name: package_file_name,
+ build: params[:build]
}
end
diff --git a/app/services/pages/destroy_deployments_service.rb b/app/services/pages/destroy_deployments_service.rb
new file mode 100644
index 00000000000..45d906bec7a
--- /dev/null
+++ b/app/services/pages/destroy_deployments_service.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module Pages
+ class DestroyDeploymentsService
+ def initialize(project, last_deployment_id = nil)
+ @project = project
+ @last_deployment_id = last_deployment_id
+ end
+
+ def execute
+ deployments_to_destroy = @project.pages_deployments
+ deployments_to_destroy = deployments_to_destroy.older_than(@last_deployment_id) if @last_deployment_id
+ deployments_to_destroy.find_each(&:destroy) # rubocop: disable CodeReuse/ActiveRecord
+ end
+ end
+end
diff --git a/app/services/personal_access_tokens/create_service.rb b/app/services/personal_access_tokens/create_service.rb
index ff9bb7d6802..93a0135669f 100644
--- a/app/services/personal_access_tokens/create_service.rb
+++ b/app/services/personal_access_tokens/create_service.rb
@@ -2,23 +2,30 @@
module PersonalAccessTokens
class CreateService < BaseService
- def initialize(current_user, params = {})
+ def initialize(current_user:, target_user:, params: {})
@current_user = current_user
+ @target_user = target_user
@params = params.dup
+ @ip_address = @params.delete(:ip_address)
end
def execute
- personal_access_token = current_user.personal_access_tokens.create(params.slice(*allowed_params))
+ return ServiceResponse.error(message: 'Not permitted to create') unless creation_permitted?
- if personal_access_token.persisted?
- ServiceResponse.success(payload: { personal_access_token: personal_access_token })
+ token = target_user.personal_access_tokens.create(params.slice(*allowed_params))
+
+ if token.persisted?
+ log_event(token)
+ ServiceResponse.success(payload: { personal_access_token: token })
else
- ServiceResponse.error(message: personal_access_token.errors.full_messages.to_sentence)
+ ServiceResponse.error(message: token.errors.full_messages.to_sentence, payload: { personal_access_token: token })
end
end
private
+ attr_reader :target_user, :ip_address
+
def allowed_params
[
:name,
@@ -27,5 +34,15 @@ module PersonalAccessTokens
:expires_at
]
end
+
+ def creation_permitted?
+ Ability.allowed?(current_user, :create_user_personal_access_token, target_user)
+ end
+
+ def log_event(token)
+ log_info("PAT CREATION: created_by: '#{current_user.username}', created_for: '#{token.user.username}', token_id: '#{token.id}'")
+ end
end
end
+
+PersonalAccessTokens::CreateService.prepend_if_ee('EE::PersonalAccessTokens::CreateService')
diff --git a/app/services/personal_access_tokens/revoke_service.rb b/app/services/personal_access_tokens/revoke_service.rb
index 17405002d8d..34d542acab1 100644
--- a/app/services/personal_access_tokens/revoke_service.rb
+++ b/app/services/personal_access_tokens/revoke_service.rb
@@ -4,16 +4,17 @@ module PersonalAccessTokens
class RevokeService
attr_reader :token, :current_user, :group
- def initialize(current_user = nil, params = { token: nil, group: nil })
+ def initialize(current_user = nil, token: nil, group: nil )
@current_user = current_user
- @token = params[:token]
- @group = params[:group]
+ @token = token
+ @group = group
end
def execute
return ServiceResponse.error(message: 'Not permitted to revoke') unless revocation_permitted?
if token.revoke!
+ log_event
ServiceResponse.success(message: success_message)
else
ServiceResponse.error(message: error_message)
@@ -33,6 +34,10 @@ module PersonalAccessTokens
def revocation_permitted?
Ability.allowed?(current_user, :revoke_token, token)
end
+
+ def log_event
+ Gitlab::AppLogger.info("PAT REVOCATION: revoked_by: '#{current_user.username}', revoked_for: '#{token.user.username}', token_id: '#{token.id}'")
+ end
end
end
diff --git a/app/services/post_receive_service.rb b/app/services/post_receive_service.rb
index 69c9868c75c..79b613f6a88 100644
--- a/app/services/post_receive_service.rb
+++ b/app/services/post_receive_service.rb
@@ -29,9 +29,7 @@ class PostReceiveService
response.add_alert_message(message)
end
- broadcast_message = BroadcastMessage.current_banner_messages&.last&.message
response.add_alert_message(broadcast_message)
-
response.add_merge_request_urls(merge_request_urls)
# Neither User nor Project are guaranteed to be returned; an orphaned write deploy
@@ -74,6 +72,24 @@ class PostReceiveService
::MergeRequests::GetUrlsService.new(project).execute(params[:changes])
end
+
+ private
+
+ def broadcast_message
+ banner = nil
+
+ if project
+ scoped_messages = BroadcastMessage.current_banner_messages(project.full_path).select do |message|
+ message.target_path.present? && message.matches_current_path(project.full_path)
+ end
+
+ banner = scoped_messages.last
+ end
+
+ banner ||= BroadcastMessage.current_banner_messages.last
+
+ banner&.message
+ end
end
PostReceiveService.prepend_if_ee('EE::PostReceiveService')
diff --git a/app/services/projects/alerting/notify_service.rb b/app/services/projects/alerting/notify_service.rb
index affac45fc3d..ab8f53a3757 100644
--- a/app/services/projects/alerting/notify_service.rb
+++ b/app/services/projects/alerting/notify_service.rb
@@ -6,9 +6,11 @@ module Projects
include Gitlab::Utils::StrongMemoize
include ::IncidentManagement::Settings
- def execute(token)
+ def execute(token, integration = nil)
+ @integration = integration
+
return bad_request unless valid_payload_size?
- return forbidden unless alerts_service_activated?
+ return forbidden unless active_integration?
return unauthorized unless valid_token?(token)
process_alert
@@ -22,7 +24,7 @@ module Projects
private
- delegate :alerts_service, :alerts_service_activated?, to: :project
+ attr_reader :integration
def process_alert
if alert.persisted?
@@ -66,14 +68,11 @@ module Projects
return unless alert.save
alert.execute_services
- SystemNoteService.create_new_alert(
- alert,
- alert.monitoring_tool || 'Generic Alert Endpoint'
- )
+ SystemNoteService.create_new_alert(alert, notification_source)
end
def process_incident_issues
- return if alert.issue
+ return if alert.issue || alert.resolved?
::IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id)
end
@@ -81,7 +80,7 @@ module Projects
def send_alert_email
notification_service
.async
- .prometheus_alerts_fired(project, [alert.attributes])
+ .prometheus_alerts_fired(project, [alert])
end
def alert
@@ -106,12 +105,20 @@ module Projects
end
end
+ def notification_source
+ alert.monitoring_tool || integration&.name || 'Generic Alert Endpoint'
+ end
+
def valid_payload_size?
Gitlab::Utils::DeepSize.new(params).valid?
end
+ def active_integration?
+ integration&.active?
+ end
+
def valid_token?(token)
- token == alerts_service.token
+ token == integration.token
end
def bad_request
diff --git a/app/services/projects/cleanup_service.rb b/app/services/projects/cleanup_service.rb
index 4ced9feff00..6e3b320afbe 100644
--- a/app/services/projects/cleanup_service.rb
+++ b/app/services/projects/cleanup_service.rb
@@ -11,6 +11,24 @@ module Projects
include Gitlab::Utils::StrongMemoize
+ class << self
+ def enqueue(project, current_user, bfg_object_map)
+ Projects::UpdateService.new(project, current_user, bfg_object_map: bfg_object_map).execute.tap do |result|
+ next unless result[:status] == :success
+
+ project.set_repository_read_only!
+ RepositoryCleanupWorker.perform_async(project.id, current_user.id)
+ end
+ rescue Project::RepositoryReadOnlyError => err
+ { status: :error, message: (_('Failed to make repository read-only. %{reason}') % { reason: err.message }) }
+ end
+
+ def cleanup_after(project)
+ project.bfg_object_map.remove!
+ project.set_repository_writable!
+ end
+ end
+
# Attempt to clean up the project following the push. Warning: this is
# destructive!
#
@@ -22,14 +40,14 @@ module Projects
apply_bfg_object_map!
# Remove older objects that are no longer referenced
- GitGarbageCollectWorker.new.perform(project.id, :gc, "project_cleanup:gc:#{project.id}")
+ GitGarbageCollectWorker.new.perform(project.id, :prune, "project_cleanup:gc:#{project.id}")
# The cache may now be inaccurate, and holding onto it could prevent
# bugs assuming the presence of some object from manifesting for some
# time. Better to feel the pain immediately.
project.repository.expire_all_method_caches
- project.bfg_object_map.remove!
+ self.class.cleanup_after(project)
end
private
diff --git a/app/services/projects/container_repository/delete_tags_service.rb b/app/services/projects/container_repository/delete_tags_service.rb
index 9fc3ec0aafb..505ddaf50e3 100644
--- a/app/services/projects/container_repository/delete_tags_service.rb
+++ b/app/services/projects/container_repository/delete_tags_service.rb
@@ -36,11 +36,11 @@ module Projects
def log_response(response)
log_data = LOG_DATA_BASE.merge(
container_repository_id: @container_repository.id,
- message: 'deleted tags'
- )
+ message: 'deleted tags',
+ deleted_tags_count: response[:deleted]&.size
+ ).compact
if response[:status] == :success
- log_data[:deleted_tags_count] = response[:deleted].size
log_info(log_data)
else
log_data[:message] = response[:message]
diff --git a/app/services/projects/container_repository/gitlab/delete_tags_service.rb b/app/services/projects/container_repository/gitlab/delete_tags_service.rb
index cee94b994a3..e4e22dd9543 100644
--- a/app/services/projects/container_repository/gitlab/delete_tags_service.rb
+++ b/app/services/projects/container_repository/gitlab/delete_tags_service.rb
@@ -14,6 +14,7 @@ module Projects
def initialize(container_repository, tag_names)
@container_repository = container_repository
@tag_names = tag_names
+ @deleted_tags = []
end
# Delete tags by name with a single DELETE request. This is only supported
@@ -25,7 +26,7 @@ module Projects
delete_tags
rescue TimeoutError => e
::Gitlab::ErrorTracking.track_exception(e, tags_count: @tag_names&.size, container_repository_id: @container_repository&.id)
- error('timeout while deleting tags')
+ error('timeout while deleting tags', nil, pass_back: { deleted: @deleted_tags })
end
private
@@ -33,13 +34,15 @@ module Projects
def delete_tags
start_time = Time.zone.now
- deleted_tags = @tag_names.select do |name|
+ @tag_names.each do |name|
raise TimeoutError if timeout?(start_time)
- @container_repository.delete_tag_by_name(name)
+ if @container_repository.delete_tag_by_name(name)
+ @deleted_tags.append(name)
+ end
end
- deleted_tags.any? ? success(deleted: deleted_tags) : error('could not delete tags')
+ @deleted_tags.any? ? success(deleted: @deleted_tags) : error('could not delete tags')
end
def timeout?(start_time)
diff --git a/app/services/projects/hashed_storage/base_repository_service.rb b/app/services/projects/hashed_storage/base_repository_service.rb
index 065bf8725be..349d4d367be 100644
--- a/app/services/projects/hashed_storage/base_repository_service.rb
+++ b/app/services/projects/hashed_storage/base_repository_service.rb
@@ -79,13 +79,12 @@ module Projects
end
def try_to_set_repository_read_only!
- # Mitigate any push operation to start during migration
- unless project.set_repository_read_only!
- migration_error = "Target repository '#{old_disk_path}' cannot be made read-only as there is a git transfer in progress"
- logger.error migration_error
+ project.set_repository_read_only!
+ rescue Project::RepositoryReadOnlyError => err
+ migration_error = "Target repository '#{old_disk_path}' cannot be made read-only: #{err.message}"
+ logger.error migration_error
- raise RepositoryInUseError, migration_error
- end
+ raise RepositoryInUseError, migration_error
end
def wiki_path_suffix
diff --git a/app/services/projects/lfs_pointers/lfs_download_service.rb b/app/services/projects/lfs_pointers/lfs_download_service.rb
index d6e5b825e13..525f8a25d04 100644
--- a/app/services/projects/lfs_pointers/lfs_download_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_download_service.rb
@@ -22,7 +22,7 @@ module Projects
def execute
return unless project&.lfs_enabled? && lfs_download_object
return error("LFS file with oid #{lfs_oid} has invalid attributes") unless lfs_download_object.valid?
- return link_existing_lfs_object! if Feature.enabled?(:lfs_link_existing_object, project, default_enabled: true) && lfs_size > LARGE_FILE_SIZE && lfs_object
+ return link_existing_lfs_object! if lfs_size > LARGE_FILE_SIZE && lfs_object
wrap_download_errors do
download_lfs_file!
diff --git a/app/services/projects/prometheus/alerts/notify_service.rb b/app/services/projects/prometheus/alerts/notify_service.rb
index c002aca32db..8ad4f59373d 100644
--- a/app/services/projects/prometheus/alerts/notify_service.rb
+++ b/app/services/projects/prometheus/alerts/notify_service.rb
@@ -17,13 +17,12 @@ module Projects
SUPPORTED_VERSION = '4'
- def execute(token)
+ def execute(token, _integration = nil)
return bad_request unless valid_payload_size?
return unprocessable_entity unless self.class.processable?(params)
return unauthorized unless valid_alert_manager_token?(token)
process_prometheus_alerts
- send_alert_email if send_email?
ServiceResponse.success
end
@@ -120,14 +119,6 @@ module Projects
ActiveSupport::SecurityUtils.secure_compare(expected, actual)
end
- def send_alert_email
- return unless firings.any?
-
- notification_service
- .async
- .prometheus_alerts_fired(project, alerts_attributes)
- end
-
def process_prometheus_alerts
alerts.each do |alert|
AlertManagement::ProcessPrometheusAlertService
@@ -136,18 +127,6 @@ module Projects
end
end
- def alerts_attributes
- firings.map do |payload|
- alert_params = Gitlab::AlertManagement::Payload.parse(
- project,
- payload,
- monitoring_tool: Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus]
- ).alert_params
-
- AlertManagement::Alert.new(alert_params).attributes
- end
- end
-
def bad_request
ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
end
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index 64b9eca9014..b9c579a130f 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -12,6 +12,11 @@ module Projects
# as it shares the namespace with groups
TMP_EXTRACT_PATH = '@pages.tmp'
+ # old deployment can be cached by pages daemon
+ # so we need to give pages daemon some time update cache
+ # 10 minutes is enough, but 30 feels safer
+ OLD_DEPLOYMENTS_DESTRUCTION_DELAY = 30.minutes.freeze
+
attr_reader :build
def initialize(project, build)
@@ -97,7 +102,7 @@ module Projects
build.artifacts_file.use_file do |artifacts_path|
SafeZip::Extract.new(artifacts_path)
.extract(directories: [PUBLIC_DIR], to: temp_path)
- create_pages_deployment(artifacts_path)
+ create_pages_deployment(artifacts_path, build)
end
rescue SafeZip::Extract::Error => e
raise FailedToExtractError, e.message
@@ -119,19 +124,28 @@ module Projects
FileUtils.rm_r(previous_public_path, force: true)
end
- def create_pages_deployment(artifacts_path)
- return unless Feature.enabled?(:zip_pages_deployments, project)
+ def create_pages_deployment(artifacts_path, build)
+ return unless Feature.enabled?(:zip_pages_deployments, project, default_enabled: true)
+
+ # we're using the full archive and pages daemon needs to read it
+ # so we want the total count from entries, not only "public/" directory
+ # because it better approximates work we need to do before we can serve the site
+ entries_count = build.artifacts_metadata_entry("", recursive: true).entries.count
+ sha256 = build.job_artifacts_archive.file_sha256
+ deployment = nil
File.open(artifacts_path) do |file|
- deployment = project.pages_deployments.create!(file: file)
- project.pages_metadatum.update!(pages_deployment: deployment)
+ deployment = project.pages_deployments.create!(file: file,
+ file_count: entries_count,
+ file_sha256: sha256)
+ project.update_pages_deployment!(deployment)
end
- # TODO: schedule old deployment removal https://gitlab.com/gitlab-org/gitlab/-/issues/235730
- rescue => e
- # we don't want to break current pages deployment process if something goes wrong
- # TODO: remove this rescue as part of https://gitlab.com/gitlab-org/gitlab/-/issues/245308
- Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
+ DestroyPagesDeploymentsWorker.perform_in(
+ OLD_DEPLOYMENTS_DESTRUCTION_DELAY,
+ project.id,
+ deployment.id
+ )
end
def latest?
diff --git a/app/services/projects/update_repository_storage_service.rb b/app/services/projects/update_repository_storage_service.rb
index a479d53a43a..e0d2398bc66 100644
--- a/app/services/projects/update_repository_storage_service.rb
+++ b/app/services/projects/update_repository_storage_service.rb
@@ -54,7 +54,7 @@ module Projects
end
def mirror_repositories
- mirror_repository
+ mirror_repository if project.repository_exists?
if project.wiki.repository_exists?
mirror_repository(type: Gitlab::GlRepository::WIKI)
@@ -92,12 +92,14 @@ module Projects
end
def remove_old_paths
- Gitlab::Git::Repository.new(
- source_storage_name,
- "#{project.disk_path}.git",
- nil,
- nil
- ).remove
+ if project.repository_exists?
+ Gitlab::Git::Repository.new(
+ source_storage_name,
+ "#{project.disk_path}.git",
+ nil,
+ nil
+ ).remove
+ end
if project.wiki.repository_exists?
Gitlab::Git::Repository.new(
diff --git a/app/services/releases/base_service.rb b/app/services/releases/base_service.rb
index 15d040287a3..38ef80ced56 100644
--- a/app/services/releases/base_service.rb
+++ b/app/services/releases/base_service.rb
@@ -81,6 +81,10 @@ module Releases
params.key?(:milestones)
end
+ def execute_hooks(release, action = 'create')
+ release.execute_hooks(action)
+ end
+
# overridden in EE
def project_group_id; end
end
diff --git a/app/services/releases/create_service.rb b/app/services/releases/create_service.rb
index 887c2d355ee..deefe559d5d 100644
--- a/app/services/releases/create_service.rb
+++ b/app/services/releases/create_service.rb
@@ -52,6 +52,8 @@ module Releases
notify_create_release(release)
+ execute_hooks(release, 'create')
+
create_evidence!(release, evidence_pipeline)
success(tag: tag, release: release)
diff --git a/app/services/releases/update_service.rb b/app/services/releases/update_service.rb
index 4786d35f31e..4e78120ac05 100644
--- a/app/services/releases/update_service.rb
+++ b/app/services/releases/update_service.rb
@@ -3,11 +3,9 @@
module Releases
class UpdateService < Releases::BaseService
def execute
- return error('Tag does not exist', 404) unless existing_tag
- return error('Release does not exist', 404) unless release
- return error('Access Denied', 403) unless allowed?
- return error('params is empty', 400) if empty_params?
- return error("Milestone(s) not found: #{inexistent_milestones.join(', ')}", 400) if inexistent_milestones.any?
+ if error = validate
+ return error
+ end
if param_for_milestone_titles_provided?
previous_milestones = release.milestones.map(&:title)
@@ -20,6 +18,7 @@ module Releases
# see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/43385
ActiveRecord::Base.transaction do
if release.update(params)
+ execute_hooks(release, 'update')
success(tag: existing_tag, release: release, milestones_updated: milestones_updated?(previous_milestones))
else
error(release.errors.messages || '400 Bad request', 400)
@@ -31,6 +30,14 @@ module Releases
private
+ def validate
+ return error('Tag does not exist', 404) unless existing_tag
+ return error('Release does not exist', 404) unless release
+ return error('Access Denied', 403) unless allowed?
+ return error('params is empty', 400) if empty_params?
+ return error("Milestone(s) not found: #{inexistent_milestones.join(', ')}", 400) if inexistent_milestones.any?
+ end
+
def allowed?
Ability.allowed?(current_user, :update_release, release)
end
diff --git a/app/services/resource_access_tokens/create_service.rb b/app/services/resource_access_tokens/create_service.rb
index cdeb57627a8..70e09be9407 100644
--- a/app/services/resource_access_tokens/create_service.rb
+++ b/app/services/resource_access_tokens/create_service.rb
@@ -15,13 +15,20 @@ module ResourceAccessTokens
user = create_user
return error(user.errors.full_messages.to_sentence) unless user.persisted?
- return error("Failed to provide maintainer access") unless provision_access(resource, user)
+
+ member = create_membership(resource, user)
+
+ unless member.persisted?
+ delete_failed_user(user)
+ return error("Could not provision maintainer access to project access token")
+ end
token_response = create_personal_access_token(user)
if token_response.success?
success(token_response.payload[:personal_access_token])
else
+ delete_failed_user(user)
error(token_response.message)
end
end
@@ -43,6 +50,10 @@ module ResourceAccessTokens
Users::CreateService.new(current_user, default_user_params).execute(skip_authorization: true)
end
+ def delete_failed_user(user)
+ DeleteUserWorker.perform_async(current_user.id, user.id, hard_delete: true, skip_authorization: true)
+ end
+
def default_user_params
{
name: params[:name] || "#{resource.name.to_s.humanize} bot",
@@ -72,7 +83,9 @@ module ResourceAccessTokens
end
def create_personal_access_token(user)
- PersonalAccessTokens::CreateService.new(user, personal_access_token_params).execute
+ PersonalAccessTokens::CreateService.new(
+ current_user: user, target_user: user, params: personal_access_token_params
+ ).execute
end
def personal_access_token_params
@@ -88,7 +101,7 @@ module ResourceAccessTokens
Gitlab::Auth.resource_bot_scopes
end
- def provision_access(resource, user)
+ def create_membership(resource, user)
resource.add_user(user, :maintainer, expires_at: params[:expires_at])
end
diff --git a/app/services/search/global_service.rb b/app/services/search/global_service.rb
index 5f80b07aa59..9038650adb7 100644
--- a/app/services/search/global_service.rb
+++ b/app/services/search/global_service.rb
@@ -16,6 +16,7 @@ module Search
Gitlab::SearchResults.new(current_user,
params[:search],
projects,
+ order_by: params[:order_by],
sort: params[:sort],
filters: { state: params[:state], confidential: params[:confidential] })
end
diff --git a/app/services/search/group_service.rb b/app/services/search/group_service.rb
index e17522dcd68..4b2d8499582 100644
--- a/app/services/search/group_service.rb
+++ b/app/services/search/group_service.rb
@@ -16,6 +16,7 @@ module Search
params[:search],
projects,
group: group,
+ order_by: params[:order_by],
sort: params[:sort],
filters: { state: params[:state], confidential: params[:confidential] }
)
diff --git a/app/services/search/project_service.rb b/app/services/search/project_service.rb
index d32534303be..e5fc5a7a438 100644
--- a/app/services/search/project_service.rb
+++ b/app/services/search/project_service.rb
@@ -17,6 +17,7 @@ module Search
params[:search],
project: project,
repository_ref: params[:repository_ref],
+ order_by: params[:order_by],
sort: params[:sort],
filters: { confidential: params[:confidential], state: params[:state] }
)
diff --git a/app/services/search_service.rb b/app/services/search_service.rb
index 3ccd67c8d30..84d7e33c3d0 100644
--- a/app/services/search_service.rb
+++ b/app/services/search_service.rb
@@ -62,7 +62,9 @@ class SearchService
end
def search_objects(preload_method = nil)
- @search_objects ||= redact_unauthorized_results(search_results.objects(scope, page: params[:page], per_page: per_page, preload_method: preload_method))
+ @search_objects ||= redact_unauthorized_results(
+ search_results.objects(scope, page: page, per_page: per_page, preload_method: preload_method)
+ )
end
def search_highlight
@@ -71,6 +73,10 @@ class SearchService
private
+ def page
+ [1, params[:page].to_i].max
+ end
+
def per_page
per_page_param = params[:per_page].to_i
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
index d7181883c39..0881be73eaf 100644
--- a/app/services/snippets/create_service.rb
+++ b/app/services/snippets/create_service.rb
@@ -56,7 +56,7 @@ module Snippets
snippet_saved
rescue => e # Rescuing all because we can receive Creation exceptions, GRPC exceptions, Git exceptions, ...
- log_error(e.message)
+ Gitlab::ErrorTracking.log_exception(e, service: 'Snippets::CreateService')
# If the commit action failed we need to remove the repository if exists
delete_repository(@snippet) if @snippet.repository_exists?
diff --git a/app/services/snippets/update_service.rb b/app/services/snippets/update_service.rb
index 0115cd19287..b982ff98747 100644
--- a/app/services/snippets/update_service.rb
+++ b/app/services/snippets/update_service.rb
@@ -74,7 +74,7 @@ module Snippets
add_snippet_repository_error(snippet: snippet, error: e)
- log_error(e.message)
+ Gitlab::ErrorTracking.log_exception(e, service: 'Snippets::UpdateService')
# If the commit action failed we remove it because
# we don't want to leave empty repositories
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 1a4374f2e94..eacc88f98a3 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -130,12 +130,12 @@ module SystemNoteService
::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).abort_merge_when_pipeline_succeeds(reason)
end
- def handle_merge_request_wip(noteable, project, author)
- ::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).handle_merge_request_wip
+ def handle_merge_request_draft(noteable, project, author)
+ ::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).handle_merge_request_draft
end
- def add_merge_request_wip_from_commit(noteable, project, author, commit)
- ::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).add_merge_request_wip_from_commit(commit)
+ def add_merge_request_draft_from_commit(noteable, project, author, commit)
+ ::SystemNotes::MergeRequestsService.new(noteable: noteable, project: project, author: author).add_merge_request_draft_from_commit(commit)
end
def resolve_all_discussions(merge_request, project, author)
diff --git a/app/services/system_notes/merge_requests_service.rb b/app/services/system_notes/merge_requests_service.rb
index 9b5c9ba20b2..a51e2053394 100644
--- a/app/services/system_notes/merge_requests_service.rb
+++ b/app/services/system_notes/merge_requests_service.rb
@@ -26,16 +26,16 @@ module SystemNotes
create_note(NoteSummary.new(noteable, project, author, body, action: 'merge'))
end
- def handle_merge_request_wip
- prefix = noteable.work_in_progress? ? "marked" : "unmarked"
+ def handle_merge_request_draft
+ action = noteable.work_in_progress? ? "draft" : "ready"
- body = "#{prefix} as a **Work In Progress**"
+ body = "marked this merge request as **#{action}**"
create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
end
- def add_merge_request_wip_from_commit(commit)
- body = "marked as a **Work In Progress** from #{commit.to_reference(project)}"
+ def add_merge_request_draft_from_commit(commit)
+ body = "marked this merge request as **draft** from #{commit.to_reference(project)}"
create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
end
diff --git a/app/services/test_hooks/project_service.rb b/app/services/test_hooks/project_service.rb
index 4e554dce357..dcd92ac2b8c 100644
--- a/app/services/test_hooks/project_service.rb
+++ b/app/services/test_hooks/project_service.rb
@@ -30,6 +30,8 @@ module TestHooks
pipeline_events_data
when 'wiki_page_events'
wiki_page_events_data
+ when 'releases_events'
+ releases_events_data
end
end
end
diff --git a/app/services/users/approve_service.rb b/app/services/users/approve_service.rb
index 228cfbd6947..27668e9430e 100644
--- a/app/services/users/approve_service.rb
+++ b/app/services/users/approve_service.rb
@@ -15,7 +15,9 @@ module Users
# Please see Devise's implementation of `resend_confirmation_instructions` for detail.
user.resend_confirmation_instructions
user.accept_pending_invitations! if user.active_for_authentication?
+ DeviseMailer.user_admin_approval(user).deliver_later
+ after_approve_hook(user)
success
else
error(user.errors.full_messages.uniq.join('. '))
@@ -26,6 +28,10 @@ module Users
attr_reader :current_user
+ def after_approve_hook(user)
+ # overridden by EE module
+ end
+
def allowed?
can?(current_user, :approve_user)
end
@@ -35,3 +41,5 @@ module Users
end
end
end
+
+Users::ApproveService.prepend_if_ee('EE::Users::ApproveService')
diff --git a/app/services/users/set_status_service.rb b/app/services/users/set_status_service.rb
index 89008368d5f..356c8782af1 100644
--- a/app/services/users/set_status_service.rb
+++ b/app/services/users/set_status_service.rb
@@ -14,7 +14,7 @@ module Users
def execute
return false unless can?(current_user, :update_user_status, target_user)
- if params[:emoji].present? || params[:message].present?
+ if params[:emoji].present? || params[:message].present? || params[:availability].present?
set_status
else
remove_status
@@ -25,6 +25,9 @@ module Users
def set_status
params[:emoji] = UserStatus::DEFAULT_EMOJI if params[:emoji].blank?
+ params.delete(:availability) if params[:availability].blank?
+ return false if params[:availability].present? && UserStatus.availabilities.keys.exclude?(params[:availability])
+
user_status.update(params)
end
diff --git a/app/services/web_hook_service.rb b/app/services/web_hook_service.rb
index d6cb0729d6f..aef07b13cae 100644
--- a/app/services/web_hook_service.rb
+++ b/app/services/web_hook_service.rb
@@ -120,6 +120,7 @@ class WebHookService
@headers ||= begin
{
'Content-Type' => 'application/json',
+ 'User-Agent' => "GitLab/#{Gitlab::VERSION}",
GITLAB_EVENT_HEADER => self.class.hook_to_event(hook_name)
}.tap do |hash|
hash['X-Gitlab-Token'] = Gitlab::Utils.remove_line_breaks(hook.token) if hook.token.present?