summaryrefslogtreecommitdiff
path: root/app/services
diff options
context:
space:
mode:
Diffstat (limited to 'app/services')
-rw-r--r--app/services/admin/propagate_integration_service.rb88
-rw-r--r--app/services/admin/propagate_service_template.rb19
-rw-r--r--app/services/alert_management/create_alert_issue_service.rb3
-rw-r--r--app/services/alert_management/process_prometheus_alert_service.rb89
-rw-r--r--app/services/audit_event_service.rb38
-rw-r--r--app/services/auto_merge/base_service.rb15
-rw-r--r--app/services/auto_merge/merge_when_pipeline_succeeds_service.rb2
-rw-r--r--app/services/boards/destroy_service.rb8
-rw-r--r--app/services/boards/issues/move_service.rb4
-rw-r--r--app/services/branches/delete_service.rb2
-rw-r--r--app/services/ci/archive_trace_service.rb1
-rw-r--r--app/services/ci/cancel_user_pipelines_service.rb4
-rw-r--r--app/services/ci/create_downstream_pipeline_service.rb (renamed from app/services/ci/create_cross_project_pipeline_service.rb)28
-rw-r--r--app/services/ci/create_job_artifacts_service.rb4
-rw-r--r--app/services/ci/create_web_ide_terminal_service.rb2
-rw-r--r--app/services/ci/destroy_expired_job_artifacts_service.rb10
-rw-r--r--app/services/ci/destroy_pipeline_service.rb4
-rw-r--r--app/services/ci/generate_coverage_reports_service.rb2
-rw-r--r--app/services/ci/parse_dotenv_artifact_service.rb2
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb9
-rw-r--r--app/services/ci/pipelines/create_artifact_service.rb33
-rw-r--r--app/services/ci/process_pipeline_service.rb10
-rw-r--r--app/services/ci/retry_build_service.rb25
-rw-r--r--app/services/ci/retry_pipeline_service.rb2
-rw-r--r--app/services/ci/update_build_state_service.rb128
-rw-r--r--app/services/ci/update_ci_ref_status_service.rb66
-rw-r--r--app/services/clusters/aws/provision_service.rb1
-rw-r--r--app/services/concerns/admin/propagate_service.rb73
-rw-r--r--app/services/concerns/incident_management/settings.rb4
-rw-r--r--app/services/concerns/incident_management/usage_data.rb18
-rw-r--r--app/services/concerns/measurable.rb2
-rw-r--r--app/services/concerns/merge_requests/removes_refs.rb9
-rw-r--r--app/services/design_management/move_designs_service.rb1
-rw-r--r--app/services/event_create_service.rb28
-rw-r--r--app/services/git/branch_hooks_service.rb12
-rw-r--r--app/services/git/wiki_push_service.rb21
-rw-r--r--app/services/git/wiki_push_service/change.rb6
-rw-r--r--app/services/ide/base_config_service.rb (renamed from app/services/ci/web_ide_config_service.rb)20
-rw-r--r--app/services/ide/schemas_config_service.rb49
-rw-r--r--app/services/ide/terminal_config_service.rb13
-rw-r--r--app/services/incident_management/incidents/create_service.rb15
-rw-r--r--app/services/issuable/common_system_notes_service.rb14
-rw-r--r--app/services/issuable_base_service.rb52
-rw-r--r--app/services/issuable_links/create_service.rb119
-rw-r--r--app/services/issuable_links/destroy_service.rb34
-rw-r--r--app/services/issuable_links/list_service.rb27
-rw-r--r--app/services/issue_links/create_service.rb46
-rw-r--r--app/services/issue_links/destroy_service.rb28
-rw-r--r--app/services/issue_links/list_service.rb18
-rw-r--r--app/services/issue_rebalancing_service.rb71
-rw-r--r--app/services/issues/base_service.rb31
-rw-r--r--app/services/issues/close_service.rb1
-rw-r--r--app/services/issues/create_service.rb16
-rw-r--r--app/services/issues/duplicate_service.rb9
-rw-r--r--app/services/issues/move_service.rb14
-rw-r--r--app/services/issues/related_branches_service.rb2
-rw-r--r--app/services/issues/reopen_service.rb1
-rw-r--r--app/services/issues/update_service.rb4
-rw-r--r--app/services/issues/zoom_link_service.rb1
-rw-r--r--app/services/jira_connect/sync_service.rb43
-rw-r--r--app/services/jira_connect_subscriptions/base_service.rb11
-rw-r--r--app/services/jira_connect_subscriptions/create_service.rb33
-rw-r--r--app/services/lfs/push_service.rb80
-rw-r--r--app/services/merge_requests/base_service.rb32
-rw-r--r--app/services/merge_requests/cleanup_refs_service.rb75
-rw-r--r--app/services/merge_requests/close_service.rb3
-rw-r--r--app/services/merge_requests/create_pipeline_service.rb12
-rw-r--r--app/services/merge_requests/merge_service.rb5
-rw-r--r--app/services/merge_requests/post_merge_service.rb3
-rw-r--r--app/services/merge_requests/refresh_service.rb130
-rw-r--r--app/services/merge_requests/update_service.rb9
-rw-r--r--app/services/metrics/dashboard/custom_metric_embed_service.rb6
-rw-r--r--app/services/notes/create_service.rb9
-rw-r--r--app/services/notification_service.rb6
-rw-r--r--app/services/packages/composer/create_package_service.rb7
-rw-r--r--app/services/packages/conan/create_package_service.rb5
-rw-r--r--app/services/packages/create_package_service.rb37
-rw-r--r--app/services/packages/maven/create_package_service.rb7
-rw-r--r--app/services/packages/npm/create_package_service.rb17
-rw-r--r--app/services/packages/nuget/create_dependency_service.rb2
-rw-r--r--app/services/packages/nuget/create_package_service.rb4
-rw-r--r--app/services/packages/pypi/create_package_service.rb17
-rw-r--r--app/services/packages/update_tags_service.rb2
-rw-r--r--app/services/pages/delete_service.rb3
-rw-r--r--app/services/product_analytics/build_activity_graph_service.rb13
-rw-r--r--app/services/product_analytics/build_graph_service.rb10
-rw-r--r--app/services/projects/after_rename_service.rb19
-rw-r--r--app/services/projects/alerting/notify_service.rb31
-rw-r--r--app/services/projects/container_repository/gitlab/delete_tags_service.rb35
-rw-r--r--app/services/projects/container_repository/third_party/delete_tags_service.rb2
-rw-r--r--app/services/projects/create_service.rb9
-rw-r--r--app/services/projects/destroy_service.rb36
-rw-r--r--app/services/projects/download_service.rb2
-rw-r--r--app/services/projects/fork_service.rb4
-rw-r--r--app/services/projects/lfs_pointers/lfs_download_service.rb39
-rw-r--r--app/services/projects/open_issues_count_service.rb6
-rw-r--r--app/services/projects/propagate_service_template.rb83
-rw-r--r--app/services/projects/transfer_service.rb12
-rw-r--r--app/services/projects/unlink_fork_service.rb31
-rw-r--r--app/services/projects/update_pages_configuration_service.rb3
-rw-r--r--app/services/projects/update_pages_service.rb2
-rw-r--r--app/services/projects/update_remote_mirror_service.rb20
-rw-r--r--app/services/projects/update_service.rb6
-rw-r--r--app/services/prometheus/proxy_service.rb4
-rw-r--r--app/services/quick_actions/interpret_service.rb1
-rw-r--r--app/services/resource_events/change_state_service.rb18
-rw-r--r--app/services/search/global_service.rb8
-rw-r--r--app/services/search/group_service.rb7
-rw-r--r--app/services/search/project_service.rb5
-rw-r--r--app/services/snippets/base_service.rb2
-rw-r--r--app/services/snippets/create_service.rb2
-rw-r--r--app/services/snippets/destroy_service.rb2
-rw-r--r--app/services/snippets/repository_validation_service.rb4
-rw-r--r--app/services/snippets/update_service.rb2
-rw-r--r--app/services/static_site_editor/config_service.rb49
-rw-r--r--app/services/submit_usage_ping_service.rb4
-rw-r--r--app/services/system_note_service.rb12
-rw-r--r--app/services/system_notes/alert_management_service.rb15
-rw-r--r--app/services/system_notes/issuables_service.rb62
-rw-r--r--app/services/todo_service.rb29
-rw-r--r--app/services/two_factor/base_service.rb14
-rw-r--r--app/services/two_factor/destroy_service.rb24
-rw-r--r--app/services/users/build_service.rb1
-rw-r--r--app/services/users/signup_service.rb2
-rw-r--r--app/services/webauthn/authenticate_service.rb61
-rw-r--r--app/services/webauthn/register_service.rb34
-rw-r--r--app/services/wiki_pages/destroy_service.rb9
-rw-r--r--app/services/wiki_pages/update_service.rb8
128 files changed, 2042 insertions, 616 deletions
diff --git a/app/services/admin/propagate_integration_service.rb b/app/services/admin/propagate_integration_service.rb
index 9a5ce58ee2c..34d6008cb6a 100644
--- a/app/services/admin/propagate_integration_service.rb
+++ b/app/services/admin/propagate_integration_service.rb
@@ -2,46 +2,24 @@
module Admin
class PropagateIntegrationService
- BATCH_SIZE = 100
-
- delegate :data_fields_present?, to: :integration
-
- def self.propagate(integration:, overwrite:)
- new(integration, overwrite).propagate
- end
-
- def initialize(integration, overwrite)
- @integration = integration
- @overwrite = overwrite
- end
+ include PropagateService
def propagate
- if overwrite
- update_integration_for_all_projects
- else
- update_integration_for_inherited_projects
- end
+ update_inherited_integrations
+ create_integration_for_groups_without_integration if Feature.enabled?(:group_level_integrations)
create_integration_for_projects_without_integration
end
private
- attr_reader :integration, :overwrite
-
# rubocop: disable Cop/InBatches
# rubocop: disable CodeReuse/ActiveRecord
- def update_integration_for_inherited_projects
+ def update_inherited_integrations
Service.where(type: integration.type, inherit_from_id: integration.id).in_batches(of: BATCH_SIZE) do |batch|
bulk_update_from_integration(batch)
end
end
-
- def update_integration_for_all_projects
- Service.where(type: integration.type).in_batches(of: BATCH_SIZE) do |batch|
- bulk_update_from_integration(batch)
- end
- end
# rubocop: enable Cop/InBatches
# rubocop: enable CodeReuse/ActiveRecord
@@ -62,61 +40,33 @@ module Admin
end
# rubocop: enable CodeReuse/ActiveRecord
- def create_integration_for_projects_without_integration
+ def create_integration_for_groups_without_integration
loop do
- batch = Project.uncached { Project.ids_without_integration(integration, BATCH_SIZE) }
+ batch = Group.uncached { group_ids_without_integration(integration, BATCH_SIZE) }
- bulk_create_from_integration(batch) unless batch.empty?
+ bulk_create_from_integration(batch, 'group') unless batch.empty?
break if batch.size < BATCH_SIZE
end
end
- def bulk_create_from_integration(batch)
- service_list = ServiceList.new(batch, service_hash, { 'inherit_from_id' => integration.id }).to_array
-
- Project.transaction do
- results = bulk_insert(*service_list)
-
- if data_fields_present?
- data_list = DataList.new(results, data_fields_hash, integration.data_fields.class).to_array
-
- bulk_insert(*data_list)
- end
-
- run_callbacks(batch)
- end
- end
-
- def bulk_insert(klass, columns, values_array)
- items_to_insert = values_array.map { |array| Hash[columns.zip(array)] }
-
- klass.insert_all(items_to_insert, returning: [:id])
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def run_callbacks(batch)
- if integration.issue_tracker?
- Project.where(id: batch).update_all(has_external_issue_tracker: true)
- end
-
- if active_external_wiki?
- Project.where(id: batch).update_all(has_external_wiki: true)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def active_external_wiki?
- integration.type == 'ExternalWikiService'
- end
-
def service_hash
@service_hash ||= integration.to_service_hash
.tap { |json| json['inherit_from_id'] = integration.id }
end
- def data_fields_hash
- @data_fields_hash ||= integration.to_data_fields_hash
+ # rubocop:disable CodeReuse/ActiveRecord
+ def group_ids_without_integration(integration, limit)
+ services = Service
+ .select('1')
+ .where('services.group_id = namespaces.id')
+ .where(type: integration.type)
+
+ Group
+ .where('NOT EXISTS (?)', services)
+ .limit(limit)
+ .pluck(:id)
end
+ # rubocop:enable CodeReuse/ActiveRecord
end
end
diff --git a/app/services/admin/propagate_service_template.rb b/app/services/admin/propagate_service_template.rb
new file mode 100644
index 00000000000..cd0d2d5d03f
--- /dev/null
+++ b/app/services/admin/propagate_service_template.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Admin
+ class PropagateServiceTemplate
+ include PropagateService
+
+ def propagate
+ return unless integration.active?
+
+ create_integration_for_projects_without_integration
+ end
+
+ private
+
+ def service_hash
+ @service_hash ||= integration.to_service_hash
+ end
+ end
+end
diff --git a/app/services/alert_management/create_alert_issue_service.rb b/app/services/alert_management/create_alert_issue_service.rb
index f16b106b748..58c7402c6c1 100644
--- a/app/services/alert_management/create_alert_issue_service.rb
+++ b/app/services/alert_management/create_alert_issue_service.rb
@@ -41,7 +41,8 @@ module AlertManagement
project,
user,
title: alert_presenter.title,
- description: alert_presenter.issue_description
+ description: alert_presenter.issue_description,
+ severity: alert.severity
).execute
end
diff --git a/app/services/alert_management/process_prometheus_alert_service.rb b/app/services/alert_management/process_prometheus_alert_service.rb
index c233ea4e2e5..95ae84a85a4 100644
--- a/app/services/alert_management/process_prometheus_alert_service.rb
+++ b/app/services/alert_management/process_prometheus_alert_service.rb
@@ -3,9 +3,10 @@
module AlertManagement
class ProcessPrometheusAlertService < BaseService
include Gitlab::Utils::StrongMemoize
+ include ::IncidentManagement::Settings
def execute
- return bad_request unless parsed_alert.valid?
+ return bad_request unless incoming_payload.has_required_attributes?
process_alert_management_alert
@@ -14,71 +15,62 @@ module AlertManagement
private
- delegate :firing?, :resolved?, :gitlab_fingerprint, :ends_at, to: :parsed_alert
-
- def parsed_alert
- strong_memoize(:parsed_alert) do
- Gitlab::Alerting::Alert.new(project: project, payload: params)
- end
- end
-
def process_alert_management_alert
- process_firing_alert_management_alert if firing?
- process_resolved_alert_management_alert if resolved?
+ if incoming_payload.resolved?
+ process_resolved_alert_management_alert
+ else
+ process_firing_alert_management_alert
+ end
end
def process_firing_alert_management_alert
- if am_alert.present?
- am_alert.register_new_event!
+ if alert.persisted?
+ alert.register_new_event!
reset_alert_management_alert_status
else
create_alert_management_alert
end
- process_incident_alert
+ process_incident_issues if process_issues?
end
def reset_alert_management_alert_status
- return if am_alert.trigger
+ return if alert.trigger
logger.warn(
message: 'Unable to update AlertManagement::Alert status to triggered',
project_id: project.id,
- alert_id: am_alert.id
+ alert_id: alert.id
)
end
def create_alert_management_alert
- new_alert = AlertManagement::Alert.new(am_alert_params.merge(ended_at: nil))
- if new_alert.save
- new_alert.execute_services
- @am_alert = new_alert
+ if alert.save
+ alert.execute_services
+ SystemNoteService.create_new_alert(alert, Gitlab::AlertManagement::AlertParams::MONITORING_TOOLS[:prometheus])
return
end
logger.warn(
message: 'Unable to create AlertManagement::Alert',
project_id: project.id,
- alert_errors: new_alert.errors.messages
+ alert_errors: alert.errors.messages
)
end
- def am_alert_params
- Gitlab::AlertManagement::AlertParams.from_prometheus_alert(project: project, parsed_alert: parsed_alert)
- end
-
def process_resolved_alert_management_alert
- return if am_alert.blank?
+ return unless alert.persisted?
+ return unless auto_close_incident?
- if am_alert.resolve(ends_at)
- close_issue(am_alert.issue)
+ if alert.resolve(incoming_payload.ends_at)
+ close_issue(alert.issue)
return
end
logger.warn(
message: 'Unable to update AlertManagement::Alert status to resolved',
project_id: project.id,
- alert_id: am_alert.id
+ alert_id: alert.id
)
end
@@ -92,20 +84,45 @@ module AlertManagement
SystemNoteService.auto_resolve_prometheus_alert(issue, project, User.alert_bot) if issue.reset.closed?
end
- def process_incident_alert
- return unless am_alert
- return if am_alert.issue
+ def process_incident_issues
+ return unless alert.persisted?
+ return if alert.issue
- IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, am_alert.id)
+ IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id)
end
def logger
@logger ||= Gitlab::AppLogger
end
- def am_alert
- strong_memoize(:am_alert) do
- AlertManagement::Alert.not_resolved.for_fingerprint(project, gitlab_fingerprint).first
+ def alert
+ strong_memoize(:alert) do
+ existing_alert || new_alert
+ end
+ end
+
+ def existing_alert
+ strong_memoize(:existing_alert) do
+ AlertManagement::Alert.not_resolved.for_fingerprint(project, incoming_payload.gitlab_fingerprint).first
+ end
+ end
+
+ def new_alert
+ strong_memoize(:new_alert) do
+ AlertManagement::Alert.new(
+ **incoming_payload.alert_params,
+ ended_at: nil
+ )
+ end
+ end
+
+ def incoming_payload
+ strong_memoize(:incoming_payload) do
+ Gitlab::AlertManagement::Payload.parse(
+ project,
+ params,
+ monitoring_tool: Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus]
+ )
end
end
diff --git a/app/services/audit_event_service.rb b/app/services/audit_event_service.rb
index fef733a7d09..d7630dbdac9 100644
--- a/app/services/audit_event_service.rb
+++ b/app/services/audit_event_service.rb
@@ -25,6 +25,8 @@ class AuditEventService
#
# @return [AuditEventService]
def for_authentication
+ mark_as_authentication_event!
+
@details = {
with: @details[:with],
target_id: @author.id,
@@ -37,9 +39,10 @@ class AuditEventService
# Writes event to a file and creates an event record in DB
#
- # @return [SecurityEvent] persited if saves and non-persisted if fails
+ # @return [AuditEvent] persited if saves and non-persisted if fails
def security_event
log_security_event_to_file
+ log_authentication_event_to_database
log_security_event_to_database
end
@@ -50,6 +53,7 @@ class AuditEventService
private
+ attr_accessor :authentication_event
attr_reader :ip_address
def build_author(author)
@@ -70,6 +74,22 @@ class AuditEventService
}
end
+ def authentication_event_payload
+ {
+ # @author can be a User or various Gitlab::Audit authors.
+ # Only capture real users for successful authentication events.
+ user: author_if_user,
+ user_name: @author.name,
+ ip_address: ip_address,
+ result: AuthenticationEvent.results[:success],
+ provider: @details[:with]
+ }
+ end
+
+ def author_if_user
+ @author if @author.is_a?(User)
+ end
+
def file_logger
@file_logger ||= Gitlab::AuditJsonLogger.build
end
@@ -78,10 +98,24 @@ class AuditEventService
@details.merge(@details.slice(:from, :to).transform_values(&:to_s))
end
+ def mark_as_authentication_event!
+ self.authentication_event = true
+ end
+
+ def authentication_event?
+ authentication_event
+ end
+
def log_security_event_to_database
return if Gitlab::Database.read_only?
- SecurityEvent.create(base_payload.merge(details: @details))
+ AuditEvent.create(base_payload.merge(details: @details))
+ end
+
+ def log_authentication_event_to_database
+ return unless Gitlab::Database.read_write? && authentication_event?
+
+ AuthenticationEvent.create(authentication_event_payload)
end
end
diff --git a/app/services/auto_merge/base_service.rb b/app/services/auto_merge/base_service.rb
index 5c63dc34cb1..41236286d23 100644
--- a/app/services/auto_merge/base_service.rb
+++ b/app/services/auto_merge/base_service.rb
@@ -60,6 +60,21 @@ module AutoMerge
end
end
+ ##
+ # NOTE: This method is to be removed when `disallow_to_create_merge_request_pipelines_in_target_project`
+ # feature flag is removed.
+ def self.can_add_to_merge_train?(merge_request)
+ if Gitlab::Ci::Features.disallow_to_create_merge_request_pipelines_in_target_project?(merge_request.target_project)
+ merge_request.for_same_project?
+ else
+ true
+ end
+ end
+
+ def can_add_to_merge_train?(merge_request)
+ self.class.can_add_to_merge_train?(merge_request)
+ end
+
private
# Overridden in child classes
diff --git a/app/services/auto_merge/merge_when_pipeline_succeeds_service.rb b/app/services/auto_merge/merge_when_pipeline_succeeds_service.rb
index 7e0298432ac..d18f2935d92 100644
--- a/app/services/auto_merge/merge_when_pipeline_succeeds_service.rb
+++ b/app/services/auto_merge/merge_when_pipeline_succeeds_service.rb
@@ -38,8 +38,6 @@ module AutoMerge
private
def notify(merge_request)
- return unless Feature.enabled?(:mwps_notification, project)
-
notification_service.async.merge_when_pipeline_succeeds(merge_request, current_user) if merge_request.saved_change_to_auto_merge_enabled?
end
end
diff --git a/app/services/boards/destroy_service.rb b/app/services/boards/destroy_service.rb
index ea0c1394aa3..8f3d4b58b7b 100644
--- a/app/services/boards/destroy_service.rb
+++ b/app/services/boards/destroy_service.rb
@@ -3,9 +3,13 @@
module Boards
class DestroyService < Boards::BaseService
def execute(board)
- return false if parent.boards.size == 1
+ if parent.boards.size == 1
+ return ServiceResponse.error(message: "The board could not be deleted, because the parent doesn't have any other boards.")
+ end
- board.destroy
+ board.destroy!
+
+ ServiceResponse.success
end
end
end
diff --git a/app/services/boards/issues/move_service.rb b/app/services/boards/issues/move_service.rb
index 14e8683ebdf..56a7e228b10 100644
--- a/app/services/boards/issues/move_service.rb
+++ b/app/services/boards/issues/move_service.rb
@@ -71,12 +71,16 @@ module Boards
# rubocop: disable CodeReuse/ActiveRecord
def moving_from_list
+ return unless params[:from_list_id].present?
+
@moving_from_list ||= board.lists.find_by(id: params[:from_list_id])
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def moving_to_list
+ return unless params[:to_list_id].present?
+
@moving_to_list ||= board.lists.find_by(id: params[:to_list_id])
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/branches/delete_service.rb b/app/services/branches/delete_service.rb
index 9bd5b343448..6efbdd161a1 100644
--- a/app/services/branches/delete_service.rb
+++ b/app/services/branches/delete_service.rb
@@ -26,7 +26,7 @@ module Branches
message: 'Failed to remove branch',
http_status: 400)
end
- rescue Gitlab::Git::PreReceiveError => ex
+ rescue Gitlab::Git::PreReceiveError, Gitlab::Git::CommandError => ex
ServiceResponse.error(message: ex.message, http_status: 400)
end
diff --git a/app/services/ci/archive_trace_service.rb b/app/services/ci/archive_trace_service.rb
index f143736ddc1..073ef465e13 100644
--- a/app/services/ci/archive_trace_service.rb
+++ b/app/services/ci/archive_trace_service.rb
@@ -13,6 +13,7 @@ module Ci
end
job.trace.archive!
+ job.remove_pending_state!
# TODO: Remove this logging once we confirmed new live trace architecture is functional.
# See https://gitlab.com/gitlab-com/gl-infra/infrastructure/issues/4667.
diff --git a/app/services/ci/cancel_user_pipelines_service.rb b/app/services/ci/cancel_user_pipelines_service.rb
index bcafb6b4a35..3a8b5e91088 100644
--- a/app/services/ci/cancel_user_pipelines_service.rb
+++ b/app/services/ci/cancel_user_pipelines_service.rb
@@ -7,6 +7,10 @@ module Ci
# https://gitlab.com/gitlab-org/gitlab/issues/32332
def execute(user)
user.pipelines.cancelable.find_each(&:cancel_running)
+
+ ServiceResponse.success(message: 'Pipeline canceled')
+ rescue ActiveRecord::StaleObjectError
+ ServiceResponse.error(message: 'Error canceling pipeline')
end
# rubocop: enable CodeReuse/ActiveRecord
end
diff --git a/app/services/ci/create_cross_project_pipeline_service.rb b/app/services/ci/create_downstream_pipeline_service.rb
index 23207d809d4..0394cfb6119 100644
--- a/app/services/ci/create_cross_project_pipeline_service.rb
+++ b/app/services/ci/create_downstream_pipeline_service.rb
@@ -1,12 +1,16 @@
# frozen_string_literal: true
module Ci
- # TODO: rename this (and worker) to CreateDownstreamPipelineService
- class CreateCrossProjectPipelineService < ::BaseService
+ # Takes in input a Ci::Bridge job and creates a downstream pipeline
+ # (either multi-project or child pipeline) according to the Ci::Bridge
+ # specifications.
+ class CreateDownstreamPipelineService < ::BaseService
include Gitlab::Utils::StrongMemoize
DuplicateDownstreamPipelineError = Class.new(StandardError)
+ MAX_DESCENDANTS_DEPTH = 2
+
def execute(bridge)
@bridge = bridge
@@ -73,9 +77,16 @@ module Ci
# TODO: Remove this condition if favour of model validation
# https://gitlab.com/gitlab-org/gitlab/issues/38338
- if @bridge.triggers_child_pipeline? && @bridge.pipeline.parent_pipeline.present?
- @bridge.drop!(:bridge_pipeline_is_child_pipeline)
- return false
+ if ::Gitlab::Ci::Features.child_of_child_pipeline_enabled?(project)
+ if has_max_descendants_depth?
+ @bridge.drop!(:reached_max_descendant_pipelines_depth)
+ return false
+ end
+ else
+ if @bridge.triggers_child_pipeline? && @bridge.pipeline.parent_pipeline.present?
+ @bridge.drop!(:bridge_pipeline_is_child_pipeline)
+ return false
+ end
end
unless can_create_downstream_pipeline?(target_ref)
@@ -106,5 +117,12 @@ module Ci
@bridge.downstream_project
end
end
+
+ def has_max_descendants_depth?
+ return false unless @bridge.triggers_child_pipeline?
+
+ ancestors_of_new_child = @bridge.pipeline.base_and_ancestors(same_project: true)
+ ancestors_of_new_child.count > MAX_DESCENDANTS_DEPTH
+ end
end
end
diff --git a/app/services/ci/create_job_artifacts_service.rb b/app/services/ci/create_job_artifacts_service.rb
index cd3807e0495..1fe65898d55 100644
--- a/app/services/ci/create_job_artifacts_service.rb
+++ b/app/services/ci/create_job_artifacts_service.rb
@@ -2,6 +2,8 @@
module Ci
class CreateJobArtifactsService < ::BaseService
+ include Gitlab::Utils::UsageData
+
ArtifactsExistError = Class.new(StandardError)
LSIF_ARTIFACT_TYPE = 'lsif'
@@ -25,7 +27,7 @@ module Ci
if lsif?(artifact_type)
headers[:ProcessLsif] = true
- headers[:ProcessLsifReferences] = Feature.enabled?(:code_navigation_references, project, default_enabled: true)
+ track_usage_event('i_source_code_code_intelligence', project.id)
end
success(headers: headers)
diff --git a/app/services/ci/create_web_ide_terminal_service.rb b/app/services/ci/create_web_ide_terminal_service.rb
index 4f1bf0447d2..a78281aed16 100644
--- a/app/services/ci/create_web_ide_terminal_service.rb
+++ b/app/services/ci/create_web_ide_terminal_service.rb
@@ -70,7 +70,7 @@ module Ci
end
def load_terminal_config!
- result = ::Ci::WebIdeConfigService.new(project, current_user, sha: sha).execute
+ result = ::Ide::TerminalConfigService.new(project, current_user, sha: sha).execute
raise TerminalCreationError, result[:message] if result[:status] != :success
@terminal = result[:terminal]
diff --git a/app/services/ci/destroy_expired_job_artifacts_service.rb b/app/services/ci/destroy_expired_job_artifacts_service.rb
index 1fa8926faa1..ca6e60f819a 100644
--- a/app/services/ci/destroy_expired_job_artifacts_service.rb
+++ b/app/services/ci/destroy_expired_job_artifacts_service.rb
@@ -20,18 +20,18 @@ module Ci
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
loop_until(timeout: LOOP_TIMEOUT, limit: LOOP_LIMIT) do
- destroy_batch
+ destroy_batch(Ci::JobArtifact) || destroy_batch(Ci::PipelineArtifact)
end
end
end
private
- def destroy_batch
- artifact_batch = if Gitlab::Ci::Features.destroy_only_unlocked_expired_artifacts_enabled?
- Ci::JobArtifact.expired(BATCH_SIZE).unlocked
+ def destroy_batch(klass)
+ artifact_batch = if klass == Ci::JobArtifact
+ klass.expired(BATCH_SIZE).unlocked
else
- Ci::JobArtifact.expired(BATCH_SIZE)
+ klass.expired(BATCH_SIZE)
end
artifacts = artifact_batch.to_a
diff --git a/app/services/ci/destroy_pipeline_service.rb b/app/services/ci/destroy_pipeline_service.rb
index 9aea20c45f7..1d9533ed76f 100644
--- a/app/services/ci/destroy_pipeline_service.rb
+++ b/app/services/ci/destroy_pipeline_service.rb
@@ -8,6 +8,10 @@ module Ci
Ci::ExpirePipelineCacheService.new.execute(pipeline, delete: true)
pipeline.destroy!
+
+ ServiceResponse.success(message: 'Pipeline not found')
+ rescue ActiveRecord::RecordNotFound
+ ServiceResponse.error(message: 'Pipeline not found')
end
end
end
diff --git a/app/services/ci/generate_coverage_reports_service.rb b/app/services/ci/generate_coverage_reports_service.rb
index ebd1eaf0bad..063fb966183 100644
--- a/app/services/ci/generate_coverage_reports_service.rb
+++ b/app/services/ci/generate_coverage_reports_service.rb
@@ -12,7 +12,7 @@ module Ci
{
status: :parsed,
key: key(base_pipeline, head_pipeline),
- data: head_pipeline.coverage_reports.pick(merge_request.new_paths)
+ data: head_pipeline.pipeline_artifacts.find_with_code_coverage.present.for_files(merge_request.new_paths)
}
rescue => e
Gitlab::ErrorTracking.track_exception(e, project_id: project.id)
diff --git a/app/services/ci/parse_dotenv_artifact_service.rb b/app/services/ci/parse_dotenv_artifact_service.rb
index fcbdc94c097..71b306864b2 100644
--- a/app/services/ci/parse_dotenv_artifact_service.rb
+++ b/app/services/ci/parse_dotenv_artifact_service.rb
@@ -54,7 +54,7 @@ module Ci
end
def scan_line!(line)
- result = line.scan(/^(.*)=(.*)$/).last
+ result = line.scan(/^(.*?)=(.*)$/).last
raise ParserError, 'Invalid Format' if result.nil?
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
index d0aa8b04775..aeabbb99468 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
@@ -77,15 +77,8 @@ module Ci
private
def status_for_array(statuses, dag:)
- # TODO: This is hack to support
- # the same exact behaviour for Atomic and Legacy processing
- # that DAG is blocked from executing if dependent is not "complete"
- if dag && statuses.any? { |status| Ci::HasStatus::COMPLETED_STATUSES.exclude?(status[:status]) }
- return 'pending'
- end
-
result = Gitlab::Ci::Status::Composite
- .new(statuses)
+ .new(statuses, dag: dag)
.status
result || 'success'
end
diff --git a/app/services/ci/pipelines/create_artifact_service.rb b/app/services/ci/pipelines/create_artifact_service.rb
new file mode 100644
index 00000000000..b7d334e436d
--- /dev/null
+++ b/app/services/ci/pipelines/create_artifact_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+module Ci
+ module Pipelines
+ class CreateArtifactService
+ def execute(pipeline)
+ return unless ::Gitlab::Ci::Features.coverage_report_view?(pipeline.project)
+ return unless pipeline.can_generate_coverage_reports?
+ return if pipeline.has_coverage_reports?
+
+ file = build_carrierwave_file(pipeline)
+
+ pipeline.pipeline_artifacts.create!(
+ project_id: pipeline.project_id,
+ file_type: :code_coverage,
+ file_format: :raw,
+ size: file["tempfile"].size,
+ file: file,
+ expire_at: Ci::PipelineArtifact::EXPIRATION_DATE.from_now
+ )
+ end
+
+ private
+
+ def build_carrierwave_file(pipeline)
+ CarrierWaveStringFile.new_file(
+ file_content: pipeline.coverage_reports.to_json,
+ filename: Ci::PipelineArtifact::DEFAULT_FILE_NAMES.fetch(:code_coverage),
+ content_type: 'application/json'
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index d84ef5fbb93..18bae26613f 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -37,10 +37,12 @@ module Ci
.pluck(Arel.sql('MAX(id)'), 'name')
# mark builds that are retried
- pipeline.statuses.latest
- .where(name: latest_statuses.map(&:second))
- .where.not(id: latest_statuses.map(&:first))
- .update_all(retried: true) if latest_statuses.any?
+ if latest_statuses.any?
+ pipeline.statuses.latest
+ .where(name: latest_statuses.map(&:second))
+ .where.not(id: latest_statuses.map(&:first))
+ .update_all(retried: true)
+ end
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index 60b3d28b0c5..6b2e6c245f3 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -2,17 +2,20 @@
module Ci
class RetryBuildService < ::BaseService
- CLONE_ACCESSORS = %i[pipeline project ref tag options name
- allow_failure stage stage_id stage_idx trigger_request
- yaml_variables when environment coverage_regex
- description tag_list protected needs_attributes
- resource_group scheduling_type].freeze
+ def self.clone_accessors
+ %i[pipeline project ref tag options name
+ allow_failure stage stage_id stage_idx trigger_request
+ yaml_variables when environment coverage_regex
+ description tag_list protected needs_attributes
+ resource_group scheduling_type].freeze
+ end
def execute(build)
build.ensure_scheduling_type!
reprocess!(build).tap do |new_build|
- build.pipeline.mark_as_processable_after_stage(build.stage_idx)
+ mark_subsequent_stages_as_processable(build)
+ build.pipeline.reset_ancestor_bridges!
Gitlab::OptimisticLocking.retry_lock(new_build, &:enqueue)
@@ -28,7 +31,7 @@ module Ci
raise Gitlab::Access::AccessDeniedError
end
- attributes = CLONE_ACCESSORS.map do |attribute|
+ attributes = self.class.clone_accessors.map do |attribute|
[attribute, build.public_send(attribute)] # rubocop:disable GitlabSecurity/PublicSend
end.to_h
@@ -60,5 +63,13 @@ module Ci
end
build
end
+
+ def mark_subsequent_stages_as_processable(build)
+ build.pipeline.processables.skipped.after_stage(build.stage_idx).find_each do |processable|
+ Gitlab::OptimisticLocking.retry_lock(processable, &:process)
+ end
+ end
end
end
+
+Ci::RetryBuildService.prepend_if_ee('EE::Ci::RetryBuildService')
diff --git a/app/services/ci/retry_pipeline_service.rb b/app/services/ci/retry_pipeline_service.rb
index 2f52f0a39c1..45244d16393 100644
--- a/app/services/ci/retry_pipeline_service.rb
+++ b/app/services/ci/retry_pipeline_service.rb
@@ -26,6 +26,8 @@ module Ci
retry_optimistic_lock(skipped) { |build| build.process }
end
+ pipeline.reset_ancestor_bridges!
+
MergeRequests::AddTodoWhenBuildFailsService
.new(project, current_user)
.close_all(pipeline)
diff --git a/app/services/ci/update_build_state_service.rb b/app/services/ci/update_build_state_service.rb
new file mode 100644
index 00000000000..61e4c77c1e5
--- /dev/null
+++ b/app/services/ci/update_build_state_service.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+module Ci
+ class UpdateBuildStateService
+ Result = Struct.new(:status, keyword_init: true)
+
+ ACCEPT_TIMEOUT = 5.minutes.freeze
+
+ attr_reader :build, :params, :metrics
+
+ def initialize(build, params, metrics = ::Gitlab::Ci::Trace::Metrics.new)
+ @build = build
+ @params = params
+ @metrics = metrics
+ end
+
+ def execute
+ overwrite_trace! if has_trace?
+
+ if accept_request?
+ accept_build_state!
+ else
+ check_migration_state
+ update_build_state!
+ end
+ end
+
+ private
+
+ def accept_build_state!
+ if Time.current - ensure_pending_state.created_at > ACCEPT_TIMEOUT
+ metrics.increment_trace_operation(operation: :discarded)
+
+ return update_build_state!
+ end
+
+ build.trace_chunks.live.find_each do |chunk|
+ chunk.schedule_to_persist!
+ end
+
+ metrics.increment_trace_operation(operation: :accepted)
+
+ Result.new(status: 202)
+ end
+
+ def overwrite_trace!
+ metrics.increment_trace_operation(operation: :overwrite)
+
+ build.trace.set(params[:trace]) if Gitlab::Ci::Features.trace_overwrite?
+ end
+
+ def check_migration_state
+ return unless accept_available?
+
+ if has_chunks? && !live_chunks_pending?
+ metrics.increment_trace_operation(operation: :finalized)
+ end
+ end
+
+ def update_build_state!
+ case build_state
+ when 'running'
+ build.touch if build.needs_touch?
+
+ Result.new(status: 200)
+ when 'success'
+ build.success!
+
+ Result.new(status: 200)
+ when 'failed'
+ build.drop!(params[:failure_reason] || :unknown_failure)
+
+ Result.new(status: 200)
+ else
+ Result.new(status: 400)
+ end
+ end
+
+ def accept_available?
+ !build_running? && has_checksum? && chunks_migration_enabled?
+ end
+
+ def accept_request?
+ accept_available? && live_chunks_pending?
+ end
+
+ def build_state
+ params.dig(:state).to_s
+ end
+
+ def has_trace?
+ params.dig(:trace).present?
+ end
+
+ def has_checksum?
+ params.dig(:checksum).present?
+ end
+
+ def has_chunks?
+ build.trace_chunks.any?
+ end
+
+ def live_chunks_pending?
+ build.trace_chunks.live.any?
+ end
+
+ def build_running?
+ build_state == 'running'
+ end
+
+ def ensure_pending_state
+ Ci::BuildPendingState.create_or_find_by!(
+ build_id: build.id,
+ state: params.fetch(:state),
+ trace_checksum: params.fetch(:checksum),
+ failure_reason: params.dig(:failure_reason)
+ )
+ rescue ActiveRecord::RecordNotFound
+ metrics.increment_trace_operation(operation: :conflict)
+
+ build.pending_state
+ end
+
+ def chunks_migration_enabled?
+ ::Gitlab::Ci::Features.accept_trace?(build.project)
+ end
+ end
+end
diff --git a/app/services/ci/update_ci_ref_status_service.rb b/app/services/ci/update_ci_ref_status_service.rb
deleted file mode 100644
index 22cc43232cc..00000000000
--- a/app/services/ci/update_ci_ref_status_service.rb
+++ /dev/null
@@ -1,66 +0,0 @@
-# frozen_string_literal: true
-
-# NOTE: This class is unused and to be removed in 13.1~
-module Ci
- class UpdateCiRefStatusService
- include Gitlab::OptimisticLocking
-
- attr_reader :pipeline
-
- def initialize(pipeline)
- @pipeline = pipeline
- end
-
- def call
- save.tap { |success| after_save if success }
- end
-
- private
-
- def save
- might_insert = ref.new_record?
-
- begin
- retry_optimistic_lock(ref) do
- next false if ref.persisted? &&
- (ref.last_updated_by_pipeline_id || 0) > pipeline.id
-
- ref.update(status: next_status(ref.status, pipeline.status),
- last_updated_by_pipeline: pipeline)
- end
- rescue ActiveRecord::RecordNotUnique
- if might_insert
- @ref = pipeline.reset.ref_status
- might_insert = false
- retry
- else
- raise
- end
- end
- end
-
- def next_status(ref_status, pipeline_status)
- if ref_status == 'failed' && pipeline_status == 'success'
- 'fixed'
- else
- pipeline_status
- end
- end
-
- def after_save
- enqueue_pipeline_notification
- end
-
- def enqueue_pipeline_notification
- PipelineNotificationWorker.perform_async(pipeline.id, ref_status: ref.status)
- end
-
- def ref
- @ref ||= pipeline.ref_status || build_ref
- end
-
- def build_ref
- Ci::Ref.new(ref: pipeline.ref, project: pipeline.project, tag: pipeline.tag)
- end
- end
-end
diff --git a/app/services/clusters/aws/provision_service.rb b/app/services/clusters/aws/provision_service.rb
index 109e4c04a9c..b454a7a5f59 100644
--- a/app/services/clusters/aws/provision_service.rb
+++ b/app/services/clusters/aws/provision_service.rb
@@ -63,6 +63,7 @@ module Clusters
[
parameter('ClusterName', provider.cluster.name),
parameter('ClusterRole', provider.role_arn),
+ parameter('KubernetesVersion', provider.kubernetes_version),
parameter('ClusterControlPlaneSecurityGroup', provider.security_group_id),
parameter('VpcId', provider.vpc_id),
parameter('Subnets', provider.subnet_ids.join(',')),
diff --git a/app/services/concerns/admin/propagate_service.rb b/app/services/concerns/admin/propagate_service.rb
new file mode 100644
index 00000000000..974408f678c
--- /dev/null
+++ b/app/services/concerns/admin/propagate_service.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+module Admin
+ module PropagateService
+ extend ActiveSupport::Concern
+
+ BATCH_SIZE = 100
+
+ delegate :data_fields_present?, to: :integration
+
+ class_methods do
+ def propagate(integration)
+ new(integration).propagate
+ end
+ end
+
+ def initialize(integration)
+ @integration = integration
+ end
+
+ private
+
+ attr_reader :integration
+
+ def create_integration_for_projects_without_integration
+ loop do
+ batch_ids = Project.uncached { Project.ids_without_integration(integration, BATCH_SIZE) }
+
+ bulk_create_from_integration(batch_ids, 'project') unless batch_ids.empty?
+
+ break if batch_ids.size < BATCH_SIZE
+ end
+ end
+
+ def bulk_create_from_integration(batch_ids, association)
+ service_list = ServiceList.new(batch_ids, service_hash, association).to_array
+
+ Service.transaction do
+ results = bulk_insert(*service_list)
+
+ if data_fields_present?
+ data_list = DataList.new(results, data_fields_hash, integration.data_fields.class).to_array
+
+ bulk_insert(*data_list)
+ end
+
+ run_callbacks(batch_ids) if association == 'project'
+ end
+ end
+
+ def bulk_insert(klass, columns, values_array)
+ items_to_insert = values_array.map { |array| Hash[columns.zip(array)] }
+
+ klass.insert_all(items_to_insert, returning: [:id])
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def run_callbacks(batch_ids)
+ if integration.issue_tracker?
+ Project.where(id: batch_ids).update_all(has_external_issue_tracker: true)
+ end
+
+ if integration.type == 'ExternalWikiService'
+ Project.where(id: batch_ids).update_all(has_external_wiki: true)
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def data_fields_hash
+ @data_fields_hash ||= integration.to_data_fields_hash
+ end
+ end
+end
diff --git a/app/services/concerns/incident_management/settings.rb b/app/services/concerns/incident_management/settings.rb
index 13a047ec106..93dfd6a306d 100644
--- a/app/services/concerns/incident_management/settings.rb
+++ b/app/services/concerns/incident_management/settings.rb
@@ -16,5 +16,9 @@ module IncidentManagement
def process_issues?
incident_management_setting.create_issue?
end
+
+ def auto_close_incident?
+ incident_management_setting.auto_close_incident?
+ end
end
end
diff --git a/app/services/concerns/incident_management/usage_data.rb b/app/services/concerns/incident_management/usage_data.rb
new file mode 100644
index 00000000000..b91aa59099d
--- /dev/null
+++ b/app/services/concerns/incident_management/usage_data.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module IncidentManagement
+ module UsageData
+ include Gitlab::Utils::UsageData
+
+ def track_incident_action(current_user, target, action)
+ return unless target.incident?
+
+ track_usage_event(:"incident_management_#{action}", current_user.id)
+ end
+
+ # No-op as optionally overridden in implementing classes.
+ # For use to provide checks before calling #track_incident_action.
+ def track_event
+ end
+ end
+end
diff --git a/app/services/concerns/measurable.rb b/app/services/concerns/measurable.rb
index b099a58a9ae..fcb3022a1dc 100644
--- a/app/services/concerns/measurable.rb
+++ b/app/services/concerns/measurable.rb
@@ -45,7 +45,7 @@ module Measurable
private
def measuring?
- Feature.enabled?("gitlab_service_measuring_#{service_class}")
+ Feature.enabled?("gitlab_service_measuring_#{service_class}", type: :ops)
end
# These attributes are always present in log.
diff --git a/app/services/concerns/merge_requests/removes_refs.rb b/app/services/concerns/merge_requests/removes_refs.rb
new file mode 100644
index 00000000000..87c15746548
--- /dev/null
+++ b/app/services/concerns/merge_requests/removes_refs.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ module RemovesRefs
+ def cleanup_refs(merge_request)
+ CleanupRefsService.schedule(merge_request)
+ end
+ end
+end
diff --git a/app/services/design_management/move_designs_service.rb b/app/services/design_management/move_designs_service.rb
index de763caba2f..ca715b10351 100644
--- a/app/services/design_management/move_designs_service.rb
+++ b/app/services/design_management/move_designs_service.rb
@@ -13,7 +13,6 @@ module DesignManagement
def execute
return error(:no_focus) unless current_design.present?
- return error(:cannot_move) unless ::Feature.enabled?(:reorder_designs, project, default_enabled: true)
return error(:cannot_move) unless current_user.can?(:move_design, current_design)
return error(:no_neighbors) unless neighbors.present?
return error(:not_distinct) unless all_distinct?
diff --git a/app/services/event_create_service.rb b/app/services/event_create_service.rb
index 3921dbefd06..85658598afc 100644
--- a/app/services/event_create_service.rb
+++ b/app/services/event_create_service.rb
@@ -23,11 +23,15 @@ class EventCreateService
end
def open_mr(merge_request, current_user)
- create_record_event(merge_request, current_user, :created)
+ create_record_event(merge_request, current_user, :created).tap do
+ track_event(event_action: :created, event_target: MergeRequest, author_id: current_user.id)
+ end
end
def close_mr(merge_request, current_user)
- create_record_event(merge_request, current_user, :closed)
+ create_record_event(merge_request, current_user, :closed).tap do
+ track_event(event_action: :closed, event_target: MergeRequest, author_id: current_user.id)
+ end
end
def reopen_mr(merge_request, current_user)
@@ -35,7 +39,9 @@ class EventCreateService
end
def merge_mr(merge_request, current_user)
- create_record_event(merge_request, current_user, :merged)
+ create_record_event(merge_request, current_user, :merged).tap do
+ track_event(event_action: :merged, event_target: MergeRequest, author_id: current_user.id)
+ end
end
def open_milestone(milestone, current_user)
@@ -55,7 +61,11 @@ class EventCreateService
end
def leave_note(note, current_user)
- create_record_event(note, current_user, :commented)
+ create_record_event(note, current_user, :commented).tap do
+ if note.is_a?(DiffNote) && note.for_merge_request?
+ track_event(event_action: :commented, event_target: MergeRequest, author_id: current_user.id)
+ end
+ end
end
def join_project(project, current_user)
@@ -109,7 +119,7 @@ class EventCreateService
def wiki_event(wiki_page_meta, author, action, fingerprint)
raise IllegalActionError, action unless Event::WIKI_ACTIONS.include?(action)
- Gitlab::UsageDataCounters::TrackUniqueActions.track_event(event_action: action, event_target: wiki_page_meta.class, author_id: author.id)
+ track_event(event_action: action, event_target: wiki_page_meta.class, author_id: author.id)
duplicate = Event.for_wiki_meta(wiki_page_meta).for_fingerprint(fingerprint).first
return duplicate if duplicate.present?
@@ -154,7 +164,7 @@ class EventCreateService
result = Event.insert_all(attribute_sets, returning: %w[id])
tuples.each do |record, status, _|
- Gitlab::UsageDataCounters::TrackUniqueActions.track_event(event_action: status, event_target: record.class, author_id: current_user.id)
+ track_event(event_action: status, event_target: record.class, author_id: current_user.id)
end
result
@@ -172,7 +182,7 @@ class EventCreateService
new_event
end
- Gitlab::UsageDataCounters::TrackUniqueActions.track_event(event_action: :pushed, event_target: Project, author_id: current_user.id)
+ track_event(event_action: :pushed, event_target: Project, author_id: current_user.id)
Users::LastPushEventService.new(current_user)
.cache_last_push_event(event)
@@ -206,6 +216,10 @@ class EventCreateService
{ resource_parent_attr => resource_parent.id }
end
+
+ def track_event(**params)
+ Gitlab::UsageDataCounters::TrackUniqueEvents.track_event(**params)
+ end
end
EventCreateService.prepend_if_ee('EE::EventCreateService')
diff --git a/app/services/git/branch_hooks_service.rb b/app/services/git/branch_hooks_service.rb
index 92e7702727c..dcb32b4c84b 100644
--- a/app/services/git/branch_hooks_service.rb
+++ b/app/services/git/branch_hooks_service.rb
@@ -75,6 +75,7 @@ module Git
def branch_change_hooks
enqueue_process_commit_messages
+ enqueue_jira_connect_sync_messages
end
def branch_remove_hooks
@@ -103,6 +104,17 @@ module Git
end
end
+ def enqueue_jira_connect_sync_messages
+ return unless project.jira_subscription_exists?
+
+ branch_to_sync = branch_name if Atlassian::JiraIssueKeyExtractor.has_keys?(branch_name)
+ commits_to_sync = limited_commits.select { |commit| Atlassian::JiraIssueKeyExtractor.has_keys?(commit.safe_message) }.map(&:sha)
+
+ if branch_to_sync || commits_to_sync.any?
+ JiraConnect::SyncBranchWorker.perform_async(project.id, branch_to_sync, commits_to_sync)
+ end
+ end
+
def unsigned_x509_shas(commits)
X509CommitSignature.unsigned_commit_shas(commits.map(&:sha))
end
diff --git a/app/services/git/wiki_push_service.rb b/app/services/git/wiki_push_service.rb
index f9de72f2d5f..fa3019ee9d6 100644
--- a/app/services/git/wiki_push_service.rb
+++ b/app/services/git/wiki_push_service.rb
@@ -5,7 +5,16 @@ module Git
# Maximum number of change events we will process on any single push
MAX_CHANGES = 100
+ attr_reader :wiki
+
+ def initialize(wiki, current_user, params)
+ @wiki, @current_user, @params = wiki, current_user, params.dup
+ end
+
def execute
+ # Execute model-specific callbacks
+ wiki.after_post_receive
+
process_changes
end
@@ -23,7 +32,11 @@ module Git
end
def can_process_wiki_events?
- Feature.enabled?(:wiki_events_on_git_push, project)
+ # TODO: Support activity events for group wikis
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/209306
+ return false unless wiki.is_a?(ProjectWiki)
+
+ Feature.enabled?(:wiki_events_on_git_push, wiki.container)
end
def push_changes
@@ -36,10 +49,6 @@ module Git
wiki.repository.raw.raw_changes_between(change[:oldrev], change[:newrev])
end
- def wiki
- project.wiki
- end
-
def create_event_for(change)
event_service.execute(
change.last_known_slug,
@@ -54,7 +63,7 @@ module Git
end
def on_default_branch?(change)
- project.wiki.default_branch == ::Gitlab::Git.branch_name(change[:ref])
+ wiki.default_branch == ::Gitlab::Git.branch_name(change[:ref])
end
# See: [Gitlab::GitPostReceive#changes]
diff --git a/app/services/git/wiki_push_service/change.rb b/app/services/git/wiki_push_service/change.rb
index 562c43487e9..3d1d0fe8c4e 100644
--- a/app/services/git/wiki_push_service/change.rb
+++ b/app/services/git/wiki_push_service/change.rb
@@ -5,11 +5,11 @@ module Git
class Change
include Gitlab::Utils::StrongMemoize
- # @param [ProjectWiki] wiki
+ # @param [Wiki] wiki
# @param [Hash] change - must have keys `:oldrev` and `:newrev`
# @param [Gitlab::Git::RawDiffChange] raw_change
- def initialize(project_wiki, change, raw_change)
- @wiki, @raw_change, @change = project_wiki, raw_change, change
+ def initialize(wiki, change, raw_change)
+ @wiki, @raw_change, @change = wiki, raw_change, change
end
def page
diff --git a/app/services/ci/web_ide_config_service.rb b/app/services/ide/base_config_service.rb
index ade9132f419..1f8d5c17584 100644
--- a/app/services/ci/web_ide_config_service.rb
+++ b/app/services/ide/base_config_service.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
-module Ci
- class WebIdeConfigService < ::BaseService
- include ::Gitlab::Utils::StrongMemoize
-
+module Ide
+ class BaseConfigService < ::BaseService
ValidationError = Class.new(StandardError)
WEBIDE_CONFIG_FILE = '.gitlab/.gitlab-webide.yml'.freeze
@@ -11,15 +9,21 @@ module Ci
attr_reader :config, :config_content
def execute
- check_access!
- load_config_content!
- load_config!
+ check_access_and_load_config!
- success(terminal: config.terminal_value)
+ success
rescue ValidationError => e
error(e.message)
end
+ protected
+
+ def check_access_and_load_config!
+ check_access!
+ load_config_content!
+ load_config!
+ end
+
private
def check_access!
diff --git a/app/services/ide/schemas_config_service.rb b/app/services/ide/schemas_config_service.rb
new file mode 100644
index 00000000000..8d2ce97103d
--- /dev/null
+++ b/app/services/ide/schemas_config_service.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Ide
+ class SchemasConfigService < ::Ide::BaseConfigService
+ PREDEFINED_SCHEMAS = [{
+ uri: 'https://json.schemastore.org/gitlab-ci',
+ match: ['*.gitlab-ci.yml']
+ }].freeze
+
+ def execute
+ schema = predefined_schema_for(params[:filename]) || {}
+ success(schema: schema)
+ rescue => e
+ error(e.message)
+ end
+
+ private
+
+ def find_schema(filename, schemas)
+ match_flags = ::File::FNM_DOTMATCH | ::File::FNM_PATHNAME
+
+ schemas.each do |schema|
+ match = schema[:match].any? { |pattern| ::File.fnmatch?(pattern, filename, match_flags) }
+
+ return Gitlab::Json.parse(get_cached(schema[:uri])) if match
+ end
+
+ nil
+ end
+
+ def predefined_schema_for(filename)
+ find_schema(filename, predefined_schemas)
+ end
+
+ def predefined_schemas
+ return PREDEFINED_SCHEMAS if Feature.enabled?(:schema_linting)
+
+ []
+ end
+
+ def get_cached(url)
+ Rails.cache.fetch("services:ide:schema:#{url}", expires_in: 1.day) do
+ Gitlab::HTTP.get(url).body
+ end
+ end
+ end
+end
+
+Ide::SchemasConfigService.prepend_if_ee('::EE::Ide::SchemasConfigService')
diff --git a/app/services/ide/terminal_config_service.rb b/app/services/ide/terminal_config_service.rb
new file mode 100644
index 00000000000..318df3436c4
--- /dev/null
+++ b/app/services/ide/terminal_config_service.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Ide
+ class TerminalConfigService < ::Ide::BaseConfigService
+ private
+
+ def success(pass_back = {})
+ result = super(pass_back)
+ result[:terminal] = config.terminal_value
+ result
+ end
+ end
+end
diff --git a/app/services/incident_management/incidents/create_service.rb b/app/services/incident_management/incidents/create_service.rb
index 7206eaf51b2..5b925e0f440 100644
--- a/app/services/incident_management/incidents/create_service.rb
+++ b/app/services/incident_management/incidents/create_service.rb
@@ -5,11 +5,12 @@ module IncidentManagement
class CreateService < BaseService
ISSUE_TYPE = 'incident'
- def initialize(project, current_user, title:, description:)
+ def initialize(project, current_user, title:, description:, severity: IssuableSeverity::DEFAULT)
super(project, current_user)
@title = title
@description = description
+ @severity = severity
end
def execute
@@ -18,25 +19,19 @@ module IncidentManagement
current_user,
title: title,
description: description,
- label_ids: [find_or_create_incident_label.id],
issue_type: ISSUE_TYPE
).execute
return error(issue.errors.full_messages.to_sentence, issue) unless issue.valid?
+ issue.update_severity(severity)
+
success(issue)
end
private
- attr_reader :title, :description
-
- def find_or_create_incident_label
- IncidentManagement::CreateIncidentLabelService
- .new(project, current_user)
- .execute
- .payload[:label]
- end
+ attr_reader :title, :description, :severity
def success(issue)
ServiceResponse.success(payload: { issue: issue })
diff --git a/app/services/issuable/common_system_notes_service.rb b/app/services/issuable/common_system_notes_service.rb
index 84024cca68c..fbc72dc867a 100644
--- a/app/services/issuable/common_system_notes_service.rb
+++ b/app/services/issuable/common_system_notes_service.rb
@@ -22,7 +22,7 @@ module Issuable
end
create_due_date_note if issuable.previous_changes.include?('due_date')
- create_milestone_note(old_milestone) if issuable.previous_changes.include?('milestone_id')
+ create_milestone_change_event(old_milestone) if issuable.previous_changes.include?('milestone_id')
create_labels_note(old_labels) if old_labels && issuable.labels != old_labels
end
end
@@ -94,23 +94,11 @@ module Issuable
SystemNoteService.change_time_spent(issuable, issuable.project, issuable.time_spent_user)
end
- def create_milestone_note(old_milestone)
- if milestone_changes_tracking_enabled?
- create_milestone_change_event(old_milestone)
- else
- SystemNoteService.change_milestone(issuable, issuable.project, current_user, issuable.milestone)
- end
- end
-
def create_milestone_change_event(old_milestone)
ResourceEvents::ChangeMilestoneService.new(issuable, current_user, old_milestone: old_milestone)
.execute
end
- def milestone_changes_tracking_enabled?
- ::Feature.enabled?(:track_resource_milestone_change_events, issuable.project, default_enabled: true)
- end
-
def create_due_date_note
SystemNoteService.change_due_date(issuable, issuable.project, current_user, issuable.due_date)
end
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 65a73dadc2e..56bcef0c562 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -46,7 +46,7 @@ class IssuableBaseService < BaseService
params[:assignee_ids] = params[:assignee_ids].first(1)
end
- assignee_ids = params[:assignee_ids].select { |assignee_id| assignee_can_read?(issuable, assignee_id) }
+ assignee_ids = params[:assignee_ids].select { |assignee_id| user_can_read?(issuable, assignee_id) }
if params[:assignee_ids].map(&:to_s) == [IssuableFinder::Params::NONE]
params[:assignee_ids] = []
@@ -57,15 +57,15 @@ class IssuableBaseService < BaseService
end
end
- def assignee_can_read?(issuable, assignee_id)
- new_assignee = User.find_by_id(assignee_id)
+ def user_can_read?(issuable, user_id)
+ user = User.find_by_id(user_id)
- return false unless new_assignee
+ return false unless user
ability_name = :"read_#{issuable.to_ability_name}"
resource = issuable.persisted? ? issuable : project
- can?(new_assignee, ability_name, resource)
+ can?(user, ability_name, resource)
end
def filter_milestone
@@ -136,7 +136,7 @@ class IssuableBaseService < BaseService
{}
end
- def create(issuable)
+ def create(issuable, skip_system_notes: false)
handle_quick_actions(issuable)
filter_params(issuable)
@@ -153,7 +153,7 @@ class IssuableBaseService < BaseService
end
if issuable_saved
- Issuable::CommonSystemNotesService.new(project, current_user).execute(issuable, is_update: false)
+ create_system_notes(issuable, is_update: false) unless skip_system_notes
after_create(issuable)
execute_hooks(issuable)
@@ -184,10 +184,7 @@ class IssuableBaseService < BaseService
handle_quick_actions(issuable)
filter_params(issuable)
- change_state(issuable)
- change_subscription(issuable)
- change_todo(issuable)
- toggle_award(issuable)
+ change_additional_attributes(issuable)
old_associations = associations_before_update(issuable)
label_ids = process_label_ids(params, existing_label_ids: issuable.label_ids)
@@ -220,8 +217,9 @@ class IssuableBaseService < BaseService
end
if issuable_saved
- Issuable::CommonSystemNotesService.new(project, current_user).execute(
- issuable, old_labels: old_associations[:labels], old_milestone: old_associations[:milestone])
+ create_system_notes(
+ issuable, old_labels: old_associations[:labels], old_milestone: old_associations[:milestone]
+ )
handle_changes(issuable, old_associations: old_associations)
@@ -255,7 +253,7 @@ class IssuableBaseService < BaseService
before_update(issuable, skip_spam_check: true)
if issuable.with_transaction_returning_status { issuable.save }
- Issuable::CommonSystemNotesService.new(project, current_user).execute(issuable, old_labels: nil)
+ create_system_notes(issuable, old_labels: nil)
handle_task_changes(issuable)
invalidate_cache_counts(issuable, users: issuable.assignees.to_a)
@@ -304,6 +302,14 @@ class IssuableBaseService < BaseService
issuable.title_changed? || issuable.description_changed?
end
+ def change_additional_attributes(issuable)
+ change_state(issuable)
+ change_severity(issuable)
+ change_subscription(issuable)
+ change_todo(issuable)
+ toggle_award(issuable)
+ end
+
def change_state(issuable)
case params.delete(:state_event)
when 'reopen'
@@ -313,6 +319,12 @@ class IssuableBaseService < BaseService
end
end
+ def change_severity(issuable)
+ if severity = params.delete(:severity)
+ issuable.update_severity(severity)
+ end
+ end
+
def change_subscription(issuable)
case params.delete(:subscription_event)
when 'subscribe'
@@ -339,6 +351,10 @@ class IssuableBaseService < BaseService
AwardEmojis::ToggleService.new(issuable, award, current_user).execute if award
end
+ def create_system_notes(issuable, **options)
+ Issuable::CommonSystemNotesService.new(project, current_user).execute(issuable, **options)
+ end
+
def associations_before_update(issuable)
associations =
{
@@ -353,8 +369,8 @@ class IssuableBaseService < BaseService
associations
end
- def has_changes?(issuable, old_labels: [], old_assignees: [])
- valid_attrs = [:title, :description, :assignee_ids, :milestone_id, :target_branch]
+ def has_changes?(issuable, old_labels: [], old_assignees: [], old_reviewers: [])
+ valid_attrs = [:title, :description, :assignee_ids, :reviewer_ids, :milestone_id, :target_branch]
attrs_changed = valid_attrs.any? do |attr|
issuable.previous_changes.include?(attr.to_s)
@@ -364,7 +380,9 @@ class IssuableBaseService < BaseService
assignees_changed = issuable.assignees != old_assignees
- attrs_changed || labels_changed || assignees_changed
+ reviewers_changed = issuable.reviewers != old_reviewers if issuable.allows_reviewers?
+
+ attrs_changed || labels_changed || assignees_changed || reviewers_changed
end
def invalidate_cache_counts(issuable, users: [])
diff --git a/app/services/issuable_links/create_service.rb b/app/services/issuable_links/create_service.rb
new file mode 100644
index 00000000000..f148c503dcf
--- /dev/null
+++ b/app/services/issuable_links/create_service.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+module IssuableLinks
+ class CreateService < BaseService
+ include IncidentManagement::UsageData
+
+ attr_reader :issuable, :current_user, :params
+
+ def initialize(issuable, user, params)
+ @issuable, @current_user, @params = issuable, user, params.dup
+ end
+
+ def execute
+ # If ALL referenced issues are already assigned to the given epic it renders a conflict status,
+ # otherwise create issue links for the issues which
+ # are still not assigned and return success message.
+ if render_conflict_error?
+ return error(issuables_assigned_message, 409)
+ end
+
+ if render_not_found_error?
+ return error(issuables_not_found_message, 404)
+ end
+
+ @errors = []
+ create_links
+
+ if @errors.present?
+ return error(@errors.join('. '), 422)
+ end
+
+ track_event
+
+ success
+ end
+
+ private
+
+ def render_conflict_error?
+ referenced_issuables.present? && (referenced_issuables - previous_related_issuables).empty?
+ end
+
+ def render_not_found_error?
+ linkable_issuables(referenced_issuables).empty?
+ end
+
+ def create_links
+ objects = linkable_issuables(referenced_issuables)
+ link_issuables(objects)
+ end
+
+ def link_issuables(target_issuables)
+ target_issuables.each do |referenced_object|
+ link = relate_issuables(referenced_object)
+
+ unless link.valid?
+ @errors << _("%{ref} cannot be added: %{error}") % {
+ ref: referenced_object.to_reference,
+ error: link.errors.messages.values.flatten.to_sentence
+ }
+ end
+ end
+ end
+
+ def referenced_issuables
+ @referenced_issuables ||= begin
+ target_issuable = params[:target_issuable]
+
+ if params[:issuable_references].present?
+ extract_references
+ elsif target_issuable
+ [target_issuable]
+ else
+ []
+ end
+ end
+ end
+
+ def extract_references
+ issuable_references = params[:issuable_references]
+ text = issuable_references.join(' ')
+
+ extractor = Gitlab::ReferenceExtractor.new(issuable.project, current_user)
+ extractor.analyze(text, extractor_context)
+
+ references(extractor)
+ end
+
+ def references(extractor)
+ extractor.issues
+ end
+
+ def extractor_context
+ {}
+ end
+
+ def linkable_issuables(objects)
+ raise NotImplementedError
+ end
+
+ def previous_related_issuables
+ raise NotImplementedError
+ end
+
+ def relate_issuables(referenced_object)
+ raise NotImplementedError
+ end
+
+ def issuables_assigned_message
+ 'Issue(s) already assigned'
+ end
+
+ def issuables_not_found_message
+ 'No Issue found for given params'
+ end
+ end
+end
+
+IssuableLinks::CreateService.prepend_if_ee('EE::IssuableLinks::CreateService')
diff --git a/app/services/issuable_links/destroy_service.rb b/app/services/issuable_links/destroy_service.rb
new file mode 100644
index 00000000000..57e1314e0da
--- /dev/null
+++ b/app/services/issuable_links/destroy_service.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module IssuableLinks
+ class DestroyService < BaseService
+ include IncidentManagement::UsageData
+
+ attr_reader :link, :current_user
+
+ def initialize(link, user)
+ @link = link
+ @current_user = user
+ end
+
+ def execute
+ return error(not_found_message, 404) unless permission_to_remove_relation?
+
+ remove_relation
+ create_notes
+ track_event
+
+ success(message: 'Relation was removed')
+ end
+
+ private
+
+ def remove_relation
+ link.destroy!
+ end
+
+ def not_found_message
+ 'No Issue Link found'
+ end
+ end
+end
diff --git a/app/services/issuable_links/list_service.rb b/app/services/issuable_links/list_service.rb
new file mode 100644
index 00000000000..10a2da7eb03
--- /dev/null
+++ b/app/services/issuable_links/list_service.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module IssuableLinks
+ class ListService
+ include Gitlab::Routing
+
+ attr_reader :issuable, :current_user
+
+ def initialize(issuable, user)
+ @issuable, @current_user = issuable, user
+ end
+
+ def execute
+ serializer.new(current_user: current_user, issuable: issuable).represent(child_issuables)
+ end
+
+ private
+
+ def serializer
+ raise NotImplementedError
+ end
+
+ def preload_for_collection
+ [{ project: :namespace }, :assignees]
+ end
+ end
+end
diff --git a/app/services/issue_links/create_service.rb b/app/services/issue_links/create_service.rb
new file mode 100644
index 00000000000..63762b1af79
--- /dev/null
+++ b/app/services/issue_links/create_service.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module IssueLinks
+ class CreateService < IssuableLinks::CreateService
+ # rubocop: disable CodeReuse/ActiveRecord
+ def relate_issuables(referenced_issue)
+ link = IssueLink.find_or_initialize_by(source: issuable, target: referenced_issue)
+
+ set_link_type(link)
+
+ if link.changed? && link.save
+ create_notes(referenced_issue)
+ end
+
+ link
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def linkable_issuables(issues)
+ @linkable_issuables ||= begin
+ issues.select { |issue| can?(current_user, :admin_issue_link, issue) }
+ end
+ end
+
+ def create_notes(referenced_issue)
+ SystemNoteService.relate_issue(issuable, referenced_issue, current_user)
+ SystemNoteService.relate_issue(referenced_issue, issuable, current_user)
+ end
+
+ def previous_related_issuables
+ @related_issues ||= issuable.related_issues(current_user).to_a
+ end
+
+ private
+
+ def set_link_type(_link)
+ # EE only
+ end
+
+ def track_event
+ track_incident_action(current_user, issuable, :incident_relate)
+ end
+ end
+end
+
+IssueLinks::CreateService.prepend_if_ee('EE::IssueLinks::CreateService')
diff --git a/app/services/issue_links/destroy_service.rb b/app/services/issue_links/destroy_service.rb
new file mode 100644
index 00000000000..25a45fc697b
--- /dev/null
+++ b/app/services/issue_links/destroy_service.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module IssueLinks
+ class DestroyService < IssuableLinks::DestroyService
+ private
+
+ def source
+ @source ||= link.source
+ end
+
+ def target
+ @target ||= link.target
+ end
+
+ def permission_to_remove_relation?
+ can?(current_user, :admin_issue_link, source) && can?(current_user, :admin_issue_link, target)
+ end
+
+ def create_notes
+ SystemNoteService.unrelate_issue(source, target, current_user)
+ SystemNoteService.unrelate_issue(target, source, current_user)
+ end
+
+ def track_event
+ track_incident_action(current_user, target, :incident_unrelate)
+ end
+ end
+end
diff --git a/app/services/issue_links/list_service.rb b/app/services/issue_links/list_service.rb
new file mode 100644
index 00000000000..5ac731d3fbc
--- /dev/null
+++ b/app/services/issue_links/list_service.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module IssueLinks
+ class ListService < IssuableLinks::ListService
+ extend ::Gitlab::Utils::Override
+
+ private
+
+ def child_issuables
+ issuable.related_issues(current_user, preload: preload_for_collection)
+ end
+
+ override :serializer
+ def serializer
+ LinkedProjectIssueSerializer
+ end
+ end
+end
diff --git a/app/services/issue_rebalancing_service.rb b/app/services/issue_rebalancing_service.rb
new file mode 100644
index 00000000000..4138c6441c8
--- /dev/null
+++ b/app/services/issue_rebalancing_service.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+class IssueRebalancingService
+ MAX_ISSUE_COUNT = 10_000
+ TooManyIssues = Class.new(StandardError)
+
+ def initialize(issue)
+ @issue = issue
+ @base = Issue.relative_positioning_query_base(issue)
+ end
+
+ def execute
+ gates = [issue.project, issue.project.group].compact
+ return unless gates.any? { |gate| Feature.enabled?(:rebalance_issues, gate) }
+
+ raise TooManyIssues, "#{issue_count} issues" if issue_count > MAX_ISSUE_COUNT
+
+ start = RelativePositioning::START_POSITION - (gaps / 2) * gap_size
+
+ Issue.transaction do
+ indexed_ids.each_slice(100) { |pairs| assign_positions(start, pairs) }
+ end
+ end
+
+ private
+
+ attr_reader :issue, :base
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def indexed_ids
+ base.reorder(:relative_position, :id).pluck(:id).each_with_index
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def assign_positions(start, positions)
+ values = positions.map do |id, index|
+ "(#{id}, #{start + (index * gap_size)})"
+ end.join(', ')
+
+ Issue.connection.exec_query(<<~SQL, "rebalance issue positions")
+ WITH cte(cte_id, new_pos) AS (
+ SELECT *
+ FROM (VALUES #{values}) as t (id, pos)
+ )
+ UPDATE #{Issue.table_name}
+ SET relative_position = cte.new_pos
+ FROM cte
+ WHERE cte_id = id
+ SQL
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def issue_count
+ @issue_count ||= base.count
+ end
+
+ def gaps
+ issue_count - 1
+ end
+
+ def gap_size
+ # We could try to split the available range over the number of gaps we need,
+ # but IDEAL_DISTANCE * MAX_ISSUE_COUNT is only 0.1% of the available range,
+ # so we are guaranteed not to exhaust it by using this static value.
+ #
+ # If we raise MAX_ISSUE_COUNT or IDEAL_DISTANCE significantly, this may
+ # change!
+ RelativePositioning::IDEAL_DISTANCE
+ end
+end
diff --git a/app/services/issues/base_service.rb b/app/services/issues/base_service.rb
index 9e72f6dad8d..0ed2b08b7b1 100644
--- a/app/services/issues/base_service.rb
+++ b/app/services/issues/base_service.rb
@@ -2,6 +2,8 @@
module Issues
class BaseService < ::IssuableBaseService
+ include IncidentManagement::UsageData
+
def hook_data(issue, action, old_associations: {})
hook_data = issue.to_hook_data(current_user, old_associations: old_associations)
hook_data[:object_attributes][:action] = action
@@ -17,6 +19,19 @@ module Issues
Issues::CloseService
end
+ NO_REBALANCING_NEEDED = ((RelativePositioning::MIN_POSITION * 0.9999)..(RelativePositioning::MAX_POSITION * 0.9999)).freeze
+
+ def rebalance_if_needed(issue)
+ return unless issue
+ return if issue.relative_position.nil?
+ return if NO_REBALANCING_NEEDED.cover?(issue.relative_position)
+
+ gates = [issue.project, issue.project.group].compact
+ return unless gates.any? { |gate| Feature.enabled?(:rebalance_issues, gate) }
+
+ IssueRebalancingWorker.perform_async(nil, issue.project_id)
+ end
+
private
def create_assignee_note(issue, old_assignees)
@@ -46,6 +61,22 @@ module Issues
Milestones::IssuesCountService.new(milestone).delete_cache
end
+
+ # Applies label "incident" (creates it if missing) to incident issues.
+ # Please use in "after" hooks only to ensure we are not appyling
+ # labels prematurely.
+ def add_incident_label(issue)
+ return unless issue.incident?
+
+ label = ::IncidentManagement::CreateIncidentLabelService
+ .new(project, current_user)
+ .execute
+ .payload[:label]
+
+ return if issue.label_ids.include?(label.id)
+
+ issue.labels << label
+ end
end
end
diff --git a/app/services/issues/close_service.rb b/app/services/issues/close_service.rb
index e431c766df8..c3677de015f 100644
--- a/app/services/issues/close_service.rb
+++ b/app/services/issues/close_service.rb
@@ -37,6 +37,7 @@ module Issues
execute_hooks(issue, 'close')
invalidate_cache_counts(issue, users: issue.assignees)
issue.update_project_counter_caches
+ track_incident_action(current_user, issue, :incident_closed)
store_first_mentioned_in_commit_at(issue, closed_via) if closed_via.is_a?(MergeRequest)
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index c0194f5b847..fb7683f940d 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -5,31 +5,33 @@ module Issues
include SpamCheckMethods
include ResolveDiscussions
- def execute
+ def execute(skip_system_notes: false)
@issue = BuildService.new(project, current_user, params).execute
filter_spam_check_params
filter_resolve_discussion_params
- create(@issue)
+ create(@issue, skip_system_notes: skip_system_notes)
end
def before_create(issue)
spam_check(issue, current_user, action: :create)
- issue.move_to_end
# current_user (defined in BaseService) is not available within run_after_commit block
user = current_user
issue.run_after_commit do
NewIssueWorker.perform_async(issue.id, user.id)
+ IssuePlacementWorker.perform_async(nil, issue.project_id)
end
end
- def after_create(issuable)
- todo_service.new_issue(issuable, current_user)
+ def after_create(issue)
+ add_incident_label(issue)
+ todo_service.new_issue(issue, current_user)
user_agent_detail_service.create
- resolve_discussions_with_issue(issuable)
- delete_milestone_total_issue_counter_cache(issuable.milestone)
+ resolve_discussions_with_issue(issue)
+ delete_milestone_total_issue_counter_cache(issue.milestone)
+ track_incident_action(current_user, issue, :incident_created)
super
end
diff --git a/app/services/issues/duplicate_service.rb b/app/services/issues/duplicate_service.rb
index c936d75e277..feb496542c8 100644
--- a/app/services/issues/duplicate_service.rb
+++ b/app/services/issues/duplicate_service.rb
@@ -12,6 +12,8 @@ module Issues
close_service.new(project, current_user, {}).execute(duplicate_issue)
duplicate_issue.update(duplicated_to: canonical_issue)
+
+ relate_two_issues(duplicate_issue, canonical_issue)
end
private
@@ -23,7 +25,10 @@ module Issues
def create_issue_canonical_note(canonical_issue, duplicate_issue)
SystemNoteService.mark_canonical_issue_of_duplicate(canonical_issue, canonical_issue.project, current_user, duplicate_issue)
end
+
+ def relate_two_issues(duplicate_issue, canonical_issue)
+ params = { target_issuable: canonical_issue }
+ IssueLinks::CreateService.new(duplicate_issue, current_user, params).execute
+ end
end
end
-
-Issues::DuplicateService.prepend_if_ee('EE::Issues::DuplicateService')
diff --git a/app/services/issues/move_service.rb b/app/services/issues/move_service.rb
index ce1466307e1..60e0d1eec3d 100644
--- a/app/services/issues/move_service.rb
+++ b/app/services/issues/move_service.rb
@@ -38,6 +38,7 @@ module Issues
def update_old_entity
super
+ rewrite_related_issues
mark_as_moved
end
@@ -51,13 +52,24 @@ module Issues
}
new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params)
- CreateService.new(@target_project, @current_user, new_params).execute
+
+ # Skip creation of system notes for existing attributes of the issue. The system notes of the old
+ # issue are copied over so we don't want to end up with duplicate notes.
+ CreateService.new(@target_project, @current_user, new_params).execute(skip_system_notes: true)
end
def mark_as_moved
original_entity.update(moved_to: new_entity)
end
+ def rewrite_related_issues
+ source_issue_links = IssueLink.for_source_issue(original_entity)
+ source_issue_links.update_all(source_id: new_entity.id)
+
+ target_issue_links = IssueLink.for_target_issue(original_entity)
+ target_issue_links.update_all(target_id: new_entity.id)
+ end
+
def notify_participants
notification_service.async.issue_moved(original_entity, new_entity, @current_user)
end
diff --git a/app/services/issues/related_branches_service.rb b/app/services/issues/related_branches_service.rb
index 46076218857..98d8412102f 100644
--- a/app/services/issues/related_branches_service.rb
+++ b/app/services/issues/related_branches_service.rb
@@ -24,7 +24,7 @@ module Issues
return unless target
- pipeline = project.pipeline_for(branch_name, target.sha)
+ pipeline = project.latest_pipeline(branch_name, target.sha)
pipeline.detailed_status(current_user) if can?(current_user, :read_pipeline, pipeline)
end
diff --git a/app/services/issues/reopen_service.rb b/app/services/issues/reopen_service.rb
index 0ffe33dd317..e2b1b5400c7 100644
--- a/app/services/issues/reopen_service.rb
+++ b/app/services/issues/reopen_service.rb
@@ -13,6 +13,7 @@ module Issues
invalidate_cache_counts(issue, users: issue.assignees)
issue.update_project_counter_caches
delete_milestone_closed_issue_counter_cache(issue.milestone)
+ track_incident_action(current_user, issue, :incident_reopened)
end
issue
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index ac7baba3b7c..ce21b2e0275 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -22,6 +22,7 @@ module Issues
end
def after_update(issue)
+ add_incident_label(issue)
IssuesChannel.broadcast_to(issue, event: 'updated') if Gitlab::ActionCable::Config.in_app? || Feature.enabled?(:broadcast_issue_updates, issue.project)
end
@@ -44,12 +45,14 @@ module Issues
create_assignee_note(issue, old_assignees)
notification_service.async.reassigned_issue(issue, current_user, old_assignees)
todo_service.reassigned_assignable(issue, current_user, old_assignees)
+ track_incident_action(current_user, issue, :incident_assigned)
end
if issue.previous_changes.include?('confidential')
# don't enqueue immediately to prevent todos removal in case of a mistake
TodosDestroyer::ConfidentialIssueWorker.perform_in(Todo::WAIT_FOR_DELETE, issue.id) if issue.confidential?
create_confidentiality_note(issue)
+ track_usage_event(:incident_management_incident_change_confidential, current_user.id)
end
added_labels = issue.labels - old_labels
@@ -83,6 +86,7 @@ module Issues
raise ActiveRecord::RecordNotFound unless issue_before || issue_after
issue.move_between(issue_before, issue_after)
+ rebalance_if_needed(issue)
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/services/issues/zoom_link_service.rb b/app/services/issues/zoom_link_service.rb
index 9572cf50564..1384e2f83b2 100644
--- a/app/services/issues/zoom_link_service.rb
+++ b/app/services/issues/zoom_link_service.rb
@@ -60,6 +60,7 @@ module Issues
if @issue.persisted?
# Save the meeting directly since we only want to update one meeting, not all
zoom_meeting.save
+ track_incident_action(current_user, issue, :incident_zoom_meeting)
success(message: _('Zoom meeting added'))
else
success(message: _('Zoom meeting added'), payload: { zoom_meetings: [zoom_meeting] })
diff --git a/app/services/jira_connect/sync_service.rb b/app/services/jira_connect/sync_service.rb
new file mode 100644
index 00000000000..07a648bb8c9
--- /dev/null
+++ b/app/services/jira_connect/sync_service.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module JiraConnect
+ class SyncService
+ def initialize(project)
+ self.project = project
+ end
+
+ def execute(commits: nil, branches: nil, merge_requests: nil)
+ JiraConnectInstallation.for_project(project).each do |installation|
+ client = Atlassian::JiraConnect::Client.new(installation.base_url, installation.shared_secret)
+
+ response = client.store_dev_info(project: project, commits: commits, branches: branches, merge_requests: merge_requests)
+
+ log_response(response)
+ end
+ end
+
+ private
+
+ attr_accessor :project
+
+ def log_response(response)
+ message = {
+ message: 'response from jira dev_info api',
+ integration: 'JiraConnect',
+ project_id: project.id,
+ project_path: project.full_path,
+ jira_response: response&.to_json
+ }
+
+ if response && response['errorMessages']
+ logger.error(message)
+ else
+ logger.info(message)
+ end
+ end
+
+ def logger
+ Gitlab::ProjectServiceLogger
+ end
+ end
+end
diff --git a/app/services/jira_connect_subscriptions/base_service.rb b/app/services/jira_connect_subscriptions/base_service.rb
new file mode 100644
index 00000000000..0e5bb91660e
--- /dev/null
+++ b/app/services/jira_connect_subscriptions/base_service.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module JiraConnectSubscriptions
+ class BaseService < ::BaseService
+ attr_accessor :jira_connect_installation, :current_user, :params
+
+ def initialize(jira_connect_installation, user = nil, params = {})
+ @jira_connect_installation, @current_user, @params = jira_connect_installation, user, params.dup
+ end
+ end
+end
diff --git a/app/services/jira_connect_subscriptions/create_service.rb b/app/services/jira_connect_subscriptions/create_service.rb
new file mode 100644
index 00000000000..8e794d3acf7
--- /dev/null
+++ b/app/services/jira_connect_subscriptions/create_service.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module JiraConnectSubscriptions
+ class CreateService < ::JiraConnectSubscriptions::BaseService
+ include Gitlab::Utils::StrongMemoize
+
+ def execute
+ unless namespace && can?(current_user, :create_jira_connect_subscription, namespace)
+ return error('Invalid namespace. Please make sure you have sufficient permissions', 401)
+ end
+
+ create_subscription
+ end
+
+ private
+
+ def create_subscription
+ subscription = JiraConnectSubscription.new(installation: jira_connect_installation, namespace: namespace)
+
+ if subscription.save
+ success
+ else
+ error(subscription.errors.full_messages.join(', '), 422)
+ end
+ end
+
+ def namespace
+ strong_memoize(:namespace) do
+ Namespace.find_by_full_path(params[:namespace_path])
+ end
+ end
+ end
+end
diff --git a/app/services/lfs/push_service.rb b/app/services/lfs/push_service.rb
new file mode 100644
index 00000000000..6e1a11ebff8
--- /dev/null
+++ b/app/services/lfs/push_service.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+module Lfs
+ # Lfs::PushService pushes the LFS objects associated with a project to a
+ # remote URL
+ class PushService < BaseService
+ include Gitlab::Utils::StrongMemoize
+
+ # Match the canonical LFS client's batch size:
+ # https://github.com/git-lfs/git-lfs/blob/master/tq/transfer_queue.go#L19
+ BATCH_SIZE = 100
+
+ def execute
+ lfs_objects_relation.each_batch(of: BATCH_SIZE) do |objects|
+ push_objects(objects)
+ end
+
+ success
+ rescue => err
+ error(err.message)
+ end
+
+ private
+
+ # Currently we only set repository_type for design repository objects, so
+ # push mirroring must send objects with a `nil` repository type - but if the
+ # wiki repository uses LFS, its objects will also be sent. This will be
+ # addressed by https://gitlab.com/gitlab-org/gitlab/-/issues/250346
+ def lfs_objects_relation
+ project.lfs_objects_for_repository_types(nil, :project)
+ end
+
+ def push_objects(objects)
+ rsp = lfs_client.batch('upload', objects)
+ objects = objects.index_by(&:oid)
+
+ rsp.fetch('objects', []).each do |spec|
+ actions = spec['actions']
+ object = objects[spec['oid']]
+
+ upload_object!(object, spec) if actions&.key?('upload')
+ verify_object!(object, spec) if actions&.key?('verify')
+ end
+ end
+
+ def upload_object!(object, spec)
+ authenticated = spec['authenticated']
+ upload = spec.dig('actions', 'upload')
+
+ # The server wants us to upload the object but something is wrong
+ unless object && object.size == spec['size'].to_i
+ log_error("Couldn't match object #{spec['oid']}/#{spec['size']}")
+ return
+ end
+
+ lfs_client.upload(object, upload, authenticated: authenticated)
+ end
+
+ def verify_object!(object, spec)
+ # TODO: the remote has requested that we make another call to verify that
+ # the object has been sent correctly.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/250654
+ log_error("LFS upload verification requested, but not supported for #{object.oid}")
+ end
+
+ def url
+ params.fetch(:url)
+ end
+
+ def credentials
+ params.fetch(:credentials)
+ end
+
+ def lfs_client
+ strong_memoize(:lfs_client) do
+ Gitlab::Lfs::Client.new(url, credentials: credentials)
+ end
+ end
+ end
+end
diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb
index 7e301f311e9..abc3f99797d 100644
--- a/app/services/merge_requests/base_service.rb
+++ b/app/services/merge_requests/base_service.rb
@@ -23,6 +23,8 @@ module MergeRequests
merge_data = hook_data(merge_request, action, old_rev: old_rev, old_associations: old_associations)
merge_request.project.execute_hooks(merge_data, :merge_request_hooks)
merge_request.project.execute_services(merge_data, :merge_request_hooks)
+
+ enqueue_jira_connect_messages_for(merge_request)
end
def cleanup_environments(merge_request)
@@ -52,6 +54,14 @@ module MergeRequests
private
+ def enqueue_jira_connect_messages_for(merge_request)
+ return unless project.jira_subscription_exists?
+
+ if Atlassian::JiraIssueKeyExtractor.has_keys?(merge_request.title, merge_request.description)
+ JiraConnect::SyncMergeRequestWorker.perform_async(merge_request.id)
+ end
+ end
+
def create(merge_request)
self.params = assign_allowed_merge_params(merge_request, params)
@@ -87,6 +97,28 @@ module MergeRequests
unless merge_request.can_allow_collaboration?(current_user)
params.delete(:allow_collaboration)
end
+
+ filter_reviewer(merge_request)
+ end
+
+ def filter_reviewer(merge_request)
+ return if params[:reviewer_ids].blank?
+
+ unless can_admin_issuable?(merge_request) && merge_request.allows_reviewers?
+ params.delete(:reviewer_ids)
+
+ return
+ end
+
+ reviewer_ids = params[:reviewer_ids].select { |reviewer_id| user_can_read?(merge_request, reviewer_id) }
+
+ if params[:reviewer_ids].map(&:to_s) == [IssuableFinder::Params::NONE]
+ params[:reviewer_ids] = []
+ elsif reviewer_ids.any?
+ params[:reviewer_ids] = reviewer_ids
+ else
+ params.delete(:reviewer_ids)
+ end
end
def merge_request_metrics_service(merge_request)
diff --git a/app/services/merge_requests/cleanup_refs_service.rb b/app/services/merge_requests/cleanup_refs_service.rb
new file mode 100644
index 00000000000..0f03f5f09b4
--- /dev/null
+++ b/app/services/merge_requests/cleanup_refs_service.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ class CleanupRefsService
+ include BaseServiceUtility
+
+ TIME_THRESHOLD = 14.days
+
+ attr_reader :merge_request
+
+ def self.schedule(merge_request)
+ MergeRequestCleanupRefsWorker.perform_in(TIME_THRESHOLD, merge_request.id)
+ end
+
+ def initialize(merge_request)
+ @merge_request = merge_request
+ @repository = merge_request.project.repository
+ @ref_path = merge_request.ref_path
+ @merge_ref_path = merge_request.merge_ref_path
+ @ref_head_sha = @repository.commit(merge_request.ref_path).id
+ @merge_ref_sha = merge_request.merge_ref_head&.id
+ end
+
+ def execute
+ return error("Merge request has not been closed nor merged for #{TIME_THRESHOLD.inspect}.") unless eligible?
+
+ # Ensure that commit shas of refs are kept around so we won't lose them when GC runs.
+ keep_around
+
+ return error('Failed to create keep around refs.') unless kept_around?
+ return error('Failed to cache merge ref sha.') unless cache_merge_ref_sha
+
+ delete_refs
+ success
+ end
+
+ private
+
+ attr_reader :repository, :ref_path, :merge_ref_path, :ref_head_sha, :merge_ref_sha
+
+ def eligible?
+ return met_time_threshold?(merge_request.metrics&.latest_closed_at) if merge_request.closed?
+
+ merge_request.merged? && met_time_threshold?(merge_request.metrics&.merged_at)
+ end
+
+ def met_time_threshold?(attr)
+ attr.nil? || attr.to_i <= TIME_THRESHOLD.ago.to_i
+ end
+
+ def kept_around?
+ service = Gitlab::Git::KeepAround.new(repository)
+
+ [ref_head_sha, merge_ref_sha].compact.all? do |sha|
+ service.kept_around?(sha)
+ end
+ end
+
+ def keep_around
+ repository.keep_around(ref_head_sha, merge_ref_sha)
+ end
+
+ def cache_merge_ref_sha
+ return true if merge_ref_sha.nil?
+
+ # Caching the merge ref sha is needed before we delete the merge ref so
+ # we can still show the merge ref diff (via `MergeRequest#merge_ref_head`)
+ merge_request.update_column(:merge_ref_sha, merge_ref_sha)
+ end
+
+ def delete_refs
+ repository.delete_refs(ref_path, merge_ref_path)
+ end
+ end
+end
diff --git a/app/services/merge_requests/close_service.rb b/app/services/merge_requests/close_service.rb
index c2174d2a130..b0a7face594 100644
--- a/app/services/merge_requests/close_service.rb
+++ b/app/services/merge_requests/close_service.rb
@@ -2,6 +2,8 @@
module MergeRequests
class CloseService < MergeRequests::BaseService
+ include RemovesRefs
+
def execute(merge_request, commit = nil)
return merge_request unless can?(current_user, :update_merge_request, merge_request)
@@ -19,6 +21,7 @@ module MergeRequests
merge_request.update_project_counter_caches
cleanup_environments(merge_request)
abort_auto_merge(merge_request, 'merge request was closed')
+ cleanup_refs(merge_request)
end
merge_request
diff --git a/app/services/merge_requests/create_pipeline_service.rb b/app/services/merge_requests/create_pipeline_service.rb
index f9352f10fea..46c4c102091 100644
--- a/app/services/merge_requests/create_pipeline_service.rb
+++ b/app/services/merge_requests/create_pipeline_service.rb
@@ -48,12 +48,18 @@ module MergeRequests
end
def can_create_pipeline_in_target_project?(merge_request)
- if Gitlab::Ci::Features.allow_to_create_merge_request_pipelines_in_target_project?(merge_request.target_project)
- can?(current_user, :create_pipeline, merge_request.target_project)
- else
+ if Gitlab::Ci::Features.disallow_to_create_merge_request_pipelines_in_target_project?(merge_request.target_project)
merge_request.for_same_project?
+ else
+ can?(current_user, :create_pipeline, merge_request.target_project) &&
+ can_update_source_branch_in_target_project?(merge_request)
end
end
+
+ def can_update_source_branch_in_target_project?(merge_request)
+ ::Gitlab::UserAccess.new(current_user, container: merge_request.target_project)
+ .can_update_branch?(merge_request.source_branch_ref)
+ end
end
end
diff --git a/app/services/merge_requests/merge_service.rb b/app/services/merge_requests/merge_service.rb
index 961a7cb1ef6..437e87dadf7 100644
--- a/app/services/merge_requests/merge_service.rb
+++ b/app/services/merge_requests/merge_service.rb
@@ -10,13 +10,14 @@ module MergeRequests
class MergeService < MergeRequests::MergeBaseService
delegate :merge_jid, :state, to: :@merge_request
- def execute(merge_request)
+ def execute(merge_request, options = {})
if project.merge_requests_ff_only_enabled && !self.is_a?(FfMergeService)
FfMergeService.new(project, current_user, params).execute(merge_request)
return
end
@merge_request = merge_request
+ @options = options
validate!
@@ -55,7 +56,7 @@ module MergeRequests
error =
if @merge_request.should_be_rebased?
'Only fast-forward merge is allowed for your project. Please update your source branch'
- elsif !@merge_request.mergeable?
+ elsif !@merge_request.mergeable?(skip_discussions_check: @options[:skip_discussions_check])
'Merge request is not mergeable'
elsif !@merge_request.squash && project.squash_always?
'This project requires squashing commits when merge requests are accepted.'
diff --git a/app/services/merge_requests/post_merge_service.rb b/app/services/merge_requests/post_merge_service.rb
index fdf8f442297..1c78fca3c26 100644
--- a/app/services/merge_requests/post_merge_service.rb
+++ b/app/services/merge_requests/post_merge_service.rb
@@ -7,6 +7,8 @@ module MergeRequests
# and execute all hooks and notifications
#
class PostMergeService < MergeRequests::BaseService
+ include RemovesRefs
+
def execute(merge_request)
merge_request.mark_as_merged
close_issues(merge_request)
@@ -20,6 +22,7 @@ module MergeRequests
delete_non_latest_diffs(merge_request)
cancel_review_app_jobs!(merge_request)
cleanup_environments(merge_request)
+ cleanup_refs(merge_request)
end
private
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index 56a91fa0305..405b8fe9c9e 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -2,6 +2,7 @@
module MergeRequests
class RefreshService < MergeRequests::BaseService
+ include Gitlab::Utils::StrongMemoize
attr_reader :push
def execute(oldrev, newrev, ref)
@@ -23,25 +24,37 @@ module MergeRequests
post_merge_manually_merged
link_forks_lfs_objects
reload_merge_requests
- outdate_suggestions
- refresh_pipelines_on_merge_requests
- abort_auto_merges
+
+ merge_requests_for_source_branch.each do |mr|
+ outdate_suggestions(mr)
+ refresh_pipelines_on_merge_requests(mr)
+ abort_auto_merges(mr)
+ mark_pending_todos_done(mr)
+ end
+
abort_ff_merge_requests_with_when_pipeline_succeeds
- mark_pending_todos_done
cache_merge_requests_closing_issues
- # Leave a system note if a branch was deleted/added
- if @push.branch_added? || @push.branch_removed?
- comment_mr_branch_presence_changed
- end
+ merge_requests_for_source_branch.each do |mr|
+ # Leave a system note if a branch was deleted/added
+ if branch_added_or_removed?
+ comment_mr_branch_presence_changed(mr)
+ end
- notify_about_push
- mark_mr_as_wip_from_commits
- execute_mr_web_hooks
+ notify_about_push(mr)
+ mark_mr_as_wip_from_commits(mr)
+ execute_mr_web_hooks(mr)
+ end
true
end
+ def branch_added_or_removed?
+ strong_memoize(:branch_added_or_removed) do
+ @push.branch_added? || @push.branch_removed?
+ end
+ end
+
def close_upon_missing_source_branch_ref
# MergeRequest#reload_diff ignores not opened MRs. This means it won't
# create an `empty` diff for `closed` MRs without a source branch, keeping
@@ -140,25 +153,22 @@ module MergeRequests
merge_request.source_branch == @push.branch_name
end
- def outdate_suggestions
- outdate_service = Suggestions::OutdateService.new
+ def outdate_suggestions(merge_request)
+ outdate_service.execute(merge_request)
+ end
- merge_requests_for_source_branch.each do |merge_request|
- outdate_service.execute(merge_request)
- end
+ def outdate_service
+ @outdate_service ||= Suggestions::OutdateService.new
end
- def refresh_pipelines_on_merge_requests
- merge_requests_for_source_branch.each do |merge_request|
- create_pipeline_for(merge_request, current_user)
- UpdateHeadPipelineForMergeRequestWorker.perform_async(merge_request.id)
- end
+ def refresh_pipelines_on_merge_requests(merge_request)
+ create_pipeline_for(merge_request, current_user)
+
+ UpdateHeadPipelineForMergeRequestWorker.perform_async(merge_request.id)
end
- def abort_auto_merges
- merge_requests_for_source_branch.each do |merge_request|
- abort_auto_merge(merge_request, 'source branch was updated')
- end
+ def abort_auto_merges(merge_request)
+ abort_auto_merge(merge_request, 'source branch was updated')
end
def abort_ff_merge_requests_with_when_pipeline_succeeds
@@ -187,10 +197,8 @@ module MergeRequests
.with_auto_merge_enabled
end
- def mark_pending_todos_done
- merge_requests_for_source_branch.each do |merge_request|
- todo_service.merge_request_push(merge_request, @current_user)
- end
+ def mark_pending_todos_done(merge_request)
+ todo_service.merge_request_push(merge_request, @current_user)
end
def find_new_commits
@@ -218,62 +226,54 @@ module MergeRequests
end
# Add comment about branches being deleted or added to merge requests
- def comment_mr_branch_presence_changed
+ def comment_mr_branch_presence_changed(merge_request)
presence = @push.branch_added? ? :add : :delete
- merge_requests_for_source_branch.each do |merge_request|
- SystemNoteService.change_branch_presence(
- merge_request, merge_request.project, @current_user,
- :source, @push.branch_name, presence)
- end
+ SystemNoteService.change_branch_presence(
+ merge_request, merge_request.project, @current_user,
+ :source, @push.branch_name, presence)
end
# Add comment about pushing new commits to merge requests and send nofitication emails
- def notify_about_push
+ def notify_about_push(merge_request)
return unless @commits.present?
- merge_requests_for_source_branch.each do |merge_request|
- mr_commit_ids = Set.new(merge_request.commit_shas)
+ mr_commit_ids = Set.new(merge_request.commit_shas)
- new_commits, existing_commits = @commits.partition do |commit|
- mr_commit_ids.include?(commit.id)
- end
+ new_commits, existing_commits = @commits.partition do |commit|
+ mr_commit_ids.include?(commit.id)
+ end
- SystemNoteService.add_commits(merge_request, merge_request.project,
- @current_user, new_commits,
- existing_commits, @push.oldrev)
+ SystemNoteService.add_commits(merge_request, merge_request.project,
+ @current_user, new_commits,
+ existing_commits, @push.oldrev)
- notification_service.push_to_merge_request(merge_request, @current_user, new_commits: new_commits, existing_commits: existing_commits)
- end
+ notification_service.push_to_merge_request(merge_request, @current_user, new_commits: new_commits, existing_commits: existing_commits)
end
- def mark_mr_as_wip_from_commits
+ def mark_mr_as_wip_from_commits(merge_request)
return unless @commits.present?
- merge_requests_for_source_branch.each do |merge_request|
- commit_shas = merge_request.commit_shas
+ commit_shas = merge_request.commit_shas
- wip_commit = @commits.detect do |commit|
- commit.work_in_progress? && commit_shas.include?(commit.sha)
- end
+ wip_commit = @commits.detect do |commit|
+ commit.work_in_progress? && commit_shas.include?(commit.sha)
+ end
- if wip_commit && !merge_request.work_in_progress?
- merge_request.update(title: merge_request.wip_title)
- SystemNoteService.add_merge_request_wip_from_commit(
- merge_request,
- merge_request.project,
- @current_user,
- wip_commit
- )
- end
+ if wip_commit && !merge_request.work_in_progress?
+ merge_request.update(title: merge_request.wip_title)
+ SystemNoteService.add_merge_request_wip_from_commit(
+ merge_request,
+ merge_request.project,
+ @current_user,
+ wip_commit
+ )
end
end
# Call merge request webhook with update branches
- def execute_mr_web_hooks
- merge_requests_for_source_branch.each do |merge_request|
- execute_hooks(merge_request, 'update', old_rev: @push.oldrev)
- end
+ def execute_mr_web_hooks(merge_request)
+ execute_hooks(merge_request, 'update', old_rev: @push.oldrev)
end
# If the merge requests closes any issues, save this information in the
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index cf02158b629..1468bfd6bb6 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -24,8 +24,9 @@ module MergeRequests
old_labels = old_associations.fetch(:labels, [])
old_mentioned_users = old_associations.fetch(:mentioned_users, [])
old_assignees = old_associations.fetch(:assignees, [])
+ old_reviewers = old_associations.fetch(:reviewers, [])
- if has_changes?(merge_request, old_labels: old_labels, old_assignees: old_assignees)
+ if has_changes?(merge_request, old_labels: old_labels, old_assignees: old_assignees, old_reviewers: old_reviewers)
todo_service.resolve_todos_for_target(merge_request, current_user)
end
@@ -44,6 +45,8 @@ module MergeRequests
handle_assignees_change(merge_request, old_assignees) if merge_request.assignees != old_assignees
+ handle_reviewers_change(merge_request, old_reviewers) if merge_request.reviewers != old_reviewers
+
if merge_request.previous_changes.include?('target_branch') ||
merge_request.previous_changes.include?('source_branch')
merge_request.mark_as_unchecked
@@ -108,6 +111,10 @@ module MergeRequests
todo_service.reassigned_assignable(merge_request, current_user, old_assignees)
end
+ def handle_reviewers_change(merge_request, old_reviewers)
+ todo_service.reassigned_reviewable(merge_request, current_user, old_reviewers)
+ end
+
def create_branch_change_note(issuable, branch_type, old_branch, new_branch)
SystemNoteService.change_branch(
issuable, issuable.project, current_user, branch_type,
diff --git a/app/services/metrics/dashboard/custom_metric_embed_service.rb b/app/services/metrics/dashboard/custom_metric_embed_service.rb
index 229bd17f5cf..eff1db21aff 100644
--- a/app/services/metrics/dashboard/custom_metric_embed_service.rb
+++ b/app/services/metrics/dashboard/custom_metric_embed_service.rb
@@ -31,7 +31,7 @@ module Metrics
# A group title is valid if it is one of the limited
# options the user can select in the UI.
def valid_group_title?(group)
- PrometheusMetricEnums
+ Enums::PrometheusMetric
.custom_group_details
.map { |_, details| details[:group_title] }
.include?(group)
@@ -99,12 +99,12 @@ module Metrics
# Returns a symbol representing the group that
# the dashboard's group title belongs to.
# It will be one of the keys found under
- # PrometheusMetricEnums.custom_groups.
+ # Enums::PrometheusMetric.custom_groups.
#
# @return [String]
def group_key
strong_memoize(:group_key) do
- PrometheusMetricEnums
+ Enums::PrometheusMetric
.group_details
.find { |_, details| details[:group_title] == group }
.first
diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb
index 4f2329a42f2..e26f662a697 100644
--- a/app/services/notes/create_service.rb
+++ b/app/services/notes/create_service.rb
@@ -2,6 +2,8 @@
module Notes
class CreateService < ::Notes::BaseService
+ include IncidentManagement::UsageData
+
def execute
note = Notes::BuildService.new(project, current_user, params.except(:merge_request_diff_head_sha)).execute
@@ -62,6 +64,7 @@ module Notes
clear_noteable_diffs_cache(note)
Suggestions::CreateService.new(note).execute
increment_usage_counter(note)
+ track_event(note, current_user)
if Feature.enabled?(:notes_create_service_tracking, project)
Gitlab::Tracking.event('Notes::CreateService', 'execute', tracking_data_for(note))
@@ -104,5 +107,11 @@ module Notes
value: note.id
}
end
+
+ def track_event(note, user)
+ return unless note.noteable.is_a?(Issue) && note.noteable.incident?
+
+ track_usage_event(:incident_management_incident_comment, user.id)
+ end
end
end
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index 909a0033d12..731d72c41d4 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -35,6 +35,12 @@ class NotificationService
@async ||= Async.new(self)
end
+ def disabled_two_factor(user)
+ return unless user.can?(:receive_notifications)
+
+ mailer.disabled_two_factor_email(user).deliver_later
+ end
+
# Always notify user about ssh key added
# only if ssh key is not deploy key
#
diff --git a/app/services/packages/composer/create_package_service.rb b/app/services/packages/composer/create_package_service.rb
index ad5d267698b..7e16fc78599 100644
--- a/app/services/packages/composer/create_package_service.rb
+++ b/app/services/packages/composer/create_package_service.rb
@@ -2,7 +2,7 @@
module Packages
module Composer
- class CreatePackageService < BaseService
+ class CreatePackageService < ::Packages::CreatePackageService
include ::Gitlab::Utils::StrongMemoize
def execute
@@ -21,10 +21,7 @@ module Packages
private
def created_package
- project
- .packages
- .composer
- .safe_find_or_create_by!(name: package_name, version: package_version)
+ find_or_create_package!(:composer, name: package_name, version: package_version)
end
def composer_json
diff --git a/app/services/packages/conan/create_package_service.rb b/app/services/packages/conan/create_package_service.rb
index 22a0436c5fb..35046d8776e 100644
--- a/app/services/packages/conan/create_package_service.rb
+++ b/app/services/packages/conan/create_package_service.rb
@@ -2,12 +2,11 @@
module Packages
module Conan
- class CreatePackageService < BaseService
+ class CreatePackageService < ::Packages::CreatePackageService
def execute
- project.packages.create!(
+ create_package!(:conan,
name: params[:package_name],
version: params[:package_version],
- package_type: :conan,
conan_metadatum_attributes: {
package_username: params[:package_username],
package_channel: params[:package_channel]
diff --git a/app/services/packages/create_package_service.rb b/app/services/packages/create_package_service.rb
new file mode 100644
index 00000000000..397a5f74e0a
--- /dev/null
+++ b/app/services/packages/create_package_service.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Packages
+ class CreatePackageService < BaseService
+ protected
+
+ def find_or_create_package!(package_type, name: params[:name], version: params[:version])
+ project
+ .packages
+ .with_package_type(package_type)
+ .safe_find_or_create_by!(name: name, version: version) do |pkg|
+ pkg.creator = package_creator
+ end
+ end
+
+ def create_package!(package_type, attrs = {})
+ project
+ .packages
+ .with_package_type(package_type)
+ .create!(package_attrs(attrs))
+ end
+
+ private
+
+ def package_attrs(attrs)
+ {
+ creator: package_creator,
+ name: params[:name],
+ version: params[:version]
+ }.merge(attrs)
+ end
+
+ def package_creator
+ current_user if current_user.is_a?(User)
+ end
+ end
+end
diff --git a/app/services/packages/maven/create_package_service.rb b/app/services/packages/maven/create_package_service.rb
index aca5d28ca98..3df17021499 100644
--- a/app/services/packages/maven/create_package_service.rb
+++ b/app/services/packages/maven/create_package_service.rb
@@ -1,15 +1,12 @@
# frozen_string_literal: true
module Packages
module Maven
- class CreatePackageService < BaseService
+ class CreatePackageService < ::Packages::CreatePackageService
def execute
app_group, _, app_name = params[:name].rpartition('/')
app_group.tr!('/', '.')
- package = project.packages.create!(
- name: params[:name],
- version: params[:version],
- package_type: :maven,
+ package = create_package!(:maven,
maven_metadatum_attributes: {
path: params[:path],
app_group: app_group,
diff --git a/app/services/packages/npm/create_package_service.rb b/app/services/packages/npm/create_package_service.rb
index cf927683ce9..7f868b71734 100644
--- a/app/services/packages/npm/create_package_service.rb
+++ b/app/services/packages/npm/create_package_service.rb
@@ -1,24 +1,21 @@
# frozen_string_literal: true
module Packages
module Npm
- class CreatePackageService < BaseService
+ class CreatePackageService < ::Packages::CreatePackageService
include Gitlab::Utils::StrongMemoize
def execute
return error('Version is empty.', 400) if version.blank?
return error('Package already exists.', 403) if current_package_exists?
+ return error('File is too large.', 400) if file_size_exceeded?
- ActiveRecord::Base.transaction { create_package! }
+ ActiveRecord::Base.transaction { create_npm_package! }
end
private
- def create_package!
- package = project.packages.create!(
- name: name,
- version: version,
- package_type: 'npm'
- )
+ def create_npm_package!
+ package = create_package!(:npm, name: name, version: version)
if build.present?
package.create_build_info!(pipeline: build.pipeline)
@@ -86,6 +83,10 @@ module Packages
_version, versions_data = params[:versions].first
versions_data
end
+
+ def file_size_exceeded?
+ project.actual_limits.exceeded?(:npm_max_file_size, attachment['length'].to_i)
+ end
end
end
end
diff --git a/app/services/packages/nuget/create_dependency_service.rb b/app/services/packages/nuget/create_dependency_service.rb
index 2be5db732f6..19143fe3778 100644
--- a/app/services/packages/nuget/create_dependency_service.rb
+++ b/app/services/packages/nuget/create_dependency_service.rb
@@ -41,7 +41,7 @@ module Packages
}
end
- ::Gitlab::Database.bulk_insert(::Packages::Nuget::DependencyLinkMetadatum.table_name, rows.compact)
+ ::Gitlab::Database.bulk_insert(::Packages::Nuget::DependencyLinkMetadatum.table_name, rows.compact) # rubocop:disable Gitlab/BulkInsert
end
def raw_dependency_for(dependency)
diff --git a/app/services/packages/nuget/create_package_service.rb b/app/services/packages/nuget/create_package_service.rb
index 68ad7f028e4..3999ccd3347 100644
--- a/app/services/packages/nuget/create_package_service.rb
+++ b/app/services/packages/nuget/create_package_service.rb
@@ -2,12 +2,12 @@
module Packages
module Nuget
- class CreatePackageService < BaseService
+ class CreatePackageService < ::Packages::CreatePackageService
TEMPORARY_PACKAGE_NAME = 'NuGet.Temporary.Package'
PACKAGE_VERSION = '0.0.0'
def execute
- project.packages.nuget.create!(
+ create_package!(:nuget,
name: TEMPORARY_PACKAGE_NAME,
version: "#{PACKAGE_VERSION}-#{uuid}"
)
diff --git a/app/services/packages/pypi/create_package_service.rb b/app/services/packages/pypi/create_package_service.rb
index 1313fc80e33..c49efca0fc5 100644
--- a/app/services/packages/pypi/create_package_service.rb
+++ b/app/services/packages/pypi/create_package_service.rb
@@ -2,16 +2,22 @@
module Packages
module Pypi
- class CreatePackageService < BaseService
+ class CreatePackageService < ::Packages::CreatePackageService
include ::Gitlab::Utils::StrongMemoize
def execute
::Packages::Package.transaction do
- Packages::Pypi::Metadatum.upsert(
- package_id: created_package.id,
+ meta = Packages::Pypi::Metadatum.new(
+ package: created_package,
required_python: params[:requires_python]
)
+ unless meta.valid?
+ raise ActiveRecord::RecordInvalid.new(meta)
+ end
+
+ Packages::Pypi::Metadatum.upsert(meta.attributes)
+
::Packages::CreatePackageFileService.new(created_package, file_params).execute
end
end
@@ -20,10 +26,7 @@ module Packages
def created_package
strong_memoize(:created_package) do
- project
- .packages
- .pypi
- .safe_find_or_create_by!(name: params[:name], version: params[:version])
+ find_or_create_package!(:pypi)
end
end
diff --git a/app/services/packages/update_tags_service.rb b/app/services/packages/update_tags_service.rb
index da50cd3479e..adfad52910e 100644
--- a/app/services/packages/update_tags_service.rb
+++ b/app/services/packages/update_tags_service.rb
@@ -15,7 +15,7 @@ module Packages
tags_to_create = @tags - existing_tags
@package.tags.with_name(tags_to_destroy).delete_all if tags_to_destroy.any?
- ::Gitlab::Database.bulk_insert(Packages::Tag.table_name, rows(tags_to_create)) if tags_to_create.any?
+ ::Gitlab::Database.bulk_insert(Packages::Tag.table_name, rows(tags_to_create)) if tags_to_create.any? # rubocop:disable Gitlab/BulkInsert
end
private
diff --git a/app/services/pages/delete_service.rb b/app/services/pages/delete_service.rb
index 7408943e78c..fc5d01a93a1 100644
--- a/app/services/pages/delete_service.rb
+++ b/app/services/pages/delete_service.rb
@@ -3,8 +3,7 @@
module Pages
class DeleteService < BaseService
def execute
- project.remove_pages
- project.pages_domains.destroy_all # rubocop: disable Cop/DestroyAll
+ PagesRemoveWorker.perform_async(project.id)
end
end
end
diff --git a/app/services/product_analytics/build_activity_graph_service.rb b/app/services/product_analytics/build_activity_graph_service.rb
new file mode 100644
index 00000000000..63108d76afd
--- /dev/null
+++ b/app/services/product_analytics/build_activity_graph_service.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module ProductAnalytics
+ class BuildActivityGraphService < BuildGraphService
+ def execute
+ timerange = @params[:timerange].days
+
+ results = product_analytics_events.count_collector_tstamp_by_day(timerange)
+
+ format_results('collector_tstamp', results.transform_keys(&:to_date))
+ end
+ end
+end
diff --git a/app/services/product_analytics/build_graph_service.rb b/app/services/product_analytics/build_graph_service.rb
index 31f9f093bb9..da54ad4de0e 100644
--- a/app/services/product_analytics/build_graph_service.rb
+++ b/app/services/product_analytics/build_graph_service.rb
@@ -13,15 +13,19 @@ module ProductAnalytics
results = product_analytics_events.count_by_graph(graph, timerange)
+ format_results(graph, results)
+ end
+
+ private
+
+ def format_results(name, results)
{
- id: graph,
+ id: name,
keys: results.keys,
values: results.values
}
end
- private
-
def product_analytics_events
@project.product_analytics_events
end
diff --git a/app/services/projects/after_rename_service.rb b/app/services/projects/after_rename_service.rb
index 2a35a07d555..a2cdb87e631 100644
--- a/app/services/projects/after_rename_service.rb
+++ b/app/services/projects/after_rename_service.rb
@@ -96,9 +96,19 @@ module Projects
.rename_project(path_before, project_path, namespace_full_path)
end
- Gitlab::PagesTransfer
- .new
- .rename_project(path_before, project_path, namespace_full_path)
+ if project.pages_deployed?
+ # Block will be evaluated in the context of project so we need
+ # to bind to a local variable to capture it, as the instance
+ # variable and method aren't available on Project
+ path_before_local = @path_before
+
+ project.run_after_commit_or_now do
+ Gitlab::PagesTransfer
+ .new
+ .async
+ .rename_project(path_before_local, path, namespace.full_path)
+ end
+ end
end
def log_completion
@@ -110,8 +120,7 @@ module Projects
def migrate_to_hashed_storage?
Gitlab::CurrentSettings.hashed_storage_enabled? &&
- project.storage_upgradable? &&
- Feature.disabled?(:skip_hashed_storage_upgrade)
+ project.storage_upgradable?
end
def send_move_instructions?
diff --git a/app/services/projects/alerting/notify_service.rb b/app/services/projects/alerting/notify_service.rb
index f883c8c7bd8..bfce5f1ad63 100644
--- a/app/services/projects/alerting/notify_service.rb
+++ b/app/services/projects/alerting/notify_service.rb
@@ -42,12 +42,39 @@ module Projects
end
def process_existing_alert(alert)
- alert.register_new_event!
+ if am_alert_params[:ended_at].present?
+ process_resolved_alert(alert)
+ else
+ alert.register_new_event!
+ end
+
+ alert
+ end
+
+ def process_resolved_alert(alert)
+ return unless auto_close_incident?
+
+ if alert.resolve(am_alert_params[:ended_at])
+ close_issue(alert.issue)
+ end
+
+ alert
+ end
+
+ def close_issue(issue)
+ return if issue.blank? || issue.closed?
+
+ ::Issues::CloseService
+ .new(project, User.alert_bot)
+ .execute(issue, system_note: false)
+
+ SystemNoteService.auto_resolve_prometheus_alert(issue, project, User.alert_bot) if issue.reset.closed?
end
def create_alert
- alert = AlertManagement::Alert.create(am_alert_params)
+ alert = AlertManagement::Alert.create(am_alert_params.except(:ended_at))
alert.execute_services if alert.persisted?
+ SystemNoteService.create_new_alert(alert, 'Generic Alert Endpoint')
alert
end
diff --git a/app/services/projects/container_repository/gitlab/delete_tags_service.rb b/app/services/projects/container_repository/gitlab/delete_tags_service.rb
index 18049648e26..cee94b994a3 100644
--- a/app/services/projects/container_repository/gitlab/delete_tags_service.rb
+++ b/app/services/projects/container_repository/gitlab/delete_tags_service.rb
@@ -5,6 +5,11 @@ module Projects
module Gitlab
class DeleteTagsService
include BaseServiceUtility
+ include ::Gitlab::Utils::StrongMemoize
+
+ DISABLED_TIMEOUTS = [nil, 0].freeze
+
+ TimeoutError = Class.new(StandardError)
def initialize(container_repository, tag_names)
@container_repository = container_repository
@@ -17,12 +22,42 @@ module Projects
def execute
return success(deleted: []) if @tag_names.empty?
+ delete_tags
+ rescue TimeoutError => e
+ ::Gitlab::ErrorTracking.track_exception(e, tags_count: @tag_names&.size, container_repository_id: @container_repository&.id)
+ error('timeout while deleting tags')
+ end
+
+ private
+
+ def delete_tags
+ start_time = Time.zone.now
+
deleted_tags = @tag_names.select do |name|
+ raise TimeoutError if timeout?(start_time)
+
@container_repository.delete_tag_by_name(name)
end
deleted_tags.any? ? success(deleted: deleted_tags) : error('could not delete tags')
end
+
+ def timeout?(start_time)
+ return false unless throttling_enabled?
+ return false if service_timeout.in?(DISABLED_TIMEOUTS)
+
+ (Time.zone.now - start_time) > service_timeout
+ end
+
+ def throttling_enabled?
+ strong_memoize(:feature_flag) do
+ Feature.enabled?(:container_registry_expiration_policies_throttling)
+ end
+ end
+
+ def service_timeout
+ ::Gitlab::CurrentSettings.current_application_settings.container_registry_delete_tags_service_timeout
+ end
end
end
end
diff --git a/app/services/projects/container_repository/third_party/delete_tags_service.rb b/app/services/projects/container_repository/third_party/delete_tags_service.rb
index 6504172109e..404642acf72 100644
--- a/app/services/projects/container_repository/third_party/delete_tags_service.rb
+++ b/app/services/projects/container_repository/third_party/delete_tags_service.rb
@@ -15,7 +15,7 @@ module Projects
# This is a hack as the registry doesn't support deleting individual
# tags. This code effectively pushes a dummy image and assigns the tag to it.
# This way when the tag is deleted only the dummy image is affected.
- # This is used to preverse compatibility with third-party registries that
+ # This is used to preserve compatibility with third-party registries that
# don't support fast delete.
# See https://gitlab.com/gitlab-org/gitlab/issues/15737 for a discussion
def execute
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 33ed1151407..68b40fdd8f1 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -114,8 +114,13 @@ module Projects
# completes), and any other affected users in the background
def setup_authorizations
if @project.group
- current_user.project_authorizations.create!(project: @project,
- access_level: @project.group.max_member_access_for_user(current_user))
+ group_access_level = @project.group.max_member_access_for_user(current_user,
+ only_concrete_membership: true)
+
+ if group_access_level > GroupMember::NO_ACCESS
+ current_user.project_authorizations.create!(project: @project,
+ access_level: group_access_level)
+ end
if Feature.enabled?(:specialized_project_authorization_workers)
AuthorizedProjectUpdate::ProjectCreateWorker.perform_async(@project.id)
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index 37487261f2c..bec75657530 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -28,7 +28,7 @@ module Projects
Projects::UnlinkForkService.new(project, current_user).execute
- attempt_destroy_transaction(project)
+ attempt_destroy(project)
system_hook_service.execute_hooks_for(project, :destroy)
log_info("Project \"#{project.full_path}\" was deleted")
@@ -98,29 +98,35 @@ module Projects
log_error("Deletion failed on #{project.full_path} with the following message: #{message}")
end
- def attempt_destroy_transaction(project)
+ def attempt_destroy(project)
unless remove_registry_tags
raise_error(s_('DeleteProject|Failed to remove some tags in project container registry. Please try again or contact administrator.'))
end
project.leave_pool_repository
- Project.transaction do
- log_destroy_event
- trash_relation_repositories!
- trash_project_repositories!
-
- # Rails attempts to load all related records into memory before
- # destroying: https://github.com/rails/rails/issues/22510
- # This ensures we delete records in batches.
- #
- # Exclude container repositories because its before_destroy would be
- # called multiple times, and it doesn't destroy any database records.
- project.destroy_dependent_associations_in_batches(exclude: [:container_repositories, :snippets])
- project.destroy!
+ if Gitlab::Ci::Features.project_transactionless_destroy?(project)
+ destroy_project_related_records(project)
+ else
+ Project.transaction { destroy_project_related_records(project) }
end
end
+ def destroy_project_related_records(project)
+ log_destroy_event
+ trash_relation_repositories!
+ trash_project_repositories!
+
+ # Rails attempts to load all related records into memory before
+ # destroying: https://github.com/rails/rails/issues/22510
+ # This ensures we delete records in batches.
+ #
+ # Exclude container repositories because its before_destroy would be
+ # called multiple times, and it doesn't destroy any database records.
+ project.destroy_dependent_associations_in_batches(exclude: [:container_repositories, :snippets])
+ project.destroy!
+ end
+
def log_destroy_event
log_info("Attempting to destroy #{project.full_path} (#{project.id})")
end
diff --git a/app/services/projects/download_service.rb b/app/services/projects/download_service.rb
index aba175eb79b..9810db84605 100644
--- a/app/services/projects/download_service.rb
+++ b/app/services/projects/download_service.rb
@@ -27,7 +27,7 @@ module Projects
end
def http?(url)
- url =~ /\A#{URI.regexp(%w(http https))}\z/
+ url =~ /\A#{URI::DEFAULT_PARSER.make_regexp(%w(http https))}\z/
end
def valid_domain?(url)
diff --git a/app/services/projects/fork_service.rb b/app/services/projects/fork_service.rb
index bb660d47887..0b4963e356a 100644
--- a/app/services/projects/fork_service.rb
+++ b/app/services/projects/fork_service.rb
@@ -10,8 +10,8 @@ module Projects
forked_project
end
- def valid_fork_targets
- @valid_fork_targets ||= ForkTargetsFinder.new(@project, current_user).execute
+ def valid_fork_targets(options = {})
+ @valid_fork_targets ||= ForkTargetsFinder.new(@project, current_user).execute(options)
end
def valid_fork_target?(namespace = target_namespace)
diff --git a/app/services/projects/lfs_pointers/lfs_download_service.rb b/app/services/projects/lfs_pointers/lfs_download_service.rb
index 52c73bcff03..d6e5b825e13 100644
--- a/app/services/projects/lfs_pointers/lfs_download_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_download_service.rb
@@ -6,6 +6,9 @@ module Projects
class LfsDownloadService < BaseService
SizeError = Class.new(StandardError)
OidError = Class.new(StandardError)
+ ResponseError = Class.new(StandardError)
+
+ LARGE_FILE_SIZE = 1.megabytes
attr_reader :lfs_download_object
delegate :oid, :size, :credentials, :sanitized_url, to: :lfs_download_object, prefix: :lfs
@@ -19,6 +22,7 @@ module Projects
def execute
return unless project&.lfs_enabled? && lfs_download_object
return error("LFS file with oid #{lfs_oid} has invalid attributes") unless lfs_download_object.valid?
+ return link_existing_lfs_object! if Feature.enabled?(:lfs_link_existing_object, project, default_enabled: true) && lfs_size > LARGE_FILE_SIZE && lfs_object
wrap_download_errors do
download_lfs_file!
@@ -29,7 +33,7 @@ module Projects
def wrap_download_errors(&block)
yield
- rescue SizeError, OidError, StandardError => e
+ rescue SizeError, OidError, ResponseError, StandardError => e
error("LFS file with oid #{lfs_oid} could't be downloaded from #{lfs_sanitized_url}: #{e.message}")
end
@@ -56,15 +60,13 @@ module Projects
def download_and_save_file!(file)
digester = Digest::SHA256.new
- response = Gitlab::HTTP.get(lfs_sanitized_url, download_headers) do |fragment|
+ fetch_file do |fragment|
digester << fragment
file.write(fragment)
raise_size_error! if file.size > lfs_size
end
- raise StandardError, "Received error code #{response.code}" unless response.success?
-
raise_size_error! if file.size != lfs_size
raise_oid_error! if digester.hexdigest != lfs_oid
end
@@ -78,6 +80,12 @@ module Projects
end
end
+ def fetch_file(&block)
+ response = Gitlab::HTTP.get(lfs_sanitized_url, download_headers, &block)
+
+ raise ResponseError, "Received error code #{response.code}" unless response.success?
+ end
+
def with_tmp_file
create_tmp_storage_dir
@@ -123,6 +131,29 @@ module Projects
super
end
+
+ def lfs_object
+ @lfs_object ||= LfsObject.find_by_oid(lfs_oid)
+ end
+
+ def link_existing_lfs_object!
+ existing_file = lfs_object.file.open
+ buffer_size = 0
+ result = fetch_file do |fragment|
+ unless fragment == existing_file.read(fragment.size)
+ break error("LFS file with oid #{lfs_oid} cannot be linked with an existing LFS object")
+ end
+
+ buffer_size += fragment.size
+ break success if buffer_size > LARGE_FILE_SIZE
+ end
+
+ project.lfs_objects << lfs_object
+
+ result
+ ensure
+ existing_file&.close
+ end
end
end
end
diff --git a/app/services/projects/open_issues_count_service.rb b/app/services/projects/open_issues_count_service.rb
index 82632d63e5b..dc450311db2 100644
--- a/app/services/projects/open_issues_count_service.rb
+++ b/app/services/projects/open_issues_count_service.rb
@@ -68,10 +68,12 @@ module Projects
# Check https://gitlab.com/gitlab-org/gitlab-foss/issues/38418 description.
# rubocop: disable CodeReuse/ActiveRecord
def self.query(projects, public_only: true)
+ issues_filtered_by_type = Issue.opened.with_issue_type(Issue::TYPES_FOR_LIST)
+
if public_only
- Issue.opened.public_only.where(project: projects)
+ issues_filtered_by_type.public_only.where(project: projects)
else
- Issue.opened.where(project: projects)
+ issues_filtered_by_type.where(project: projects)
end
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/projects/propagate_service_template.rb b/app/services/projects/propagate_service_template.rb
deleted file mode 100644
index 54d09b354a1..00000000000
--- a/app/services/projects/propagate_service_template.rb
+++ /dev/null
@@ -1,83 +0,0 @@
-# frozen_string_literal: true
-
-module Projects
- class PropagateServiceTemplate
- BATCH_SIZE = 100
-
- delegate :data_fields_present?, to: :template
-
- def self.propagate(template)
- new(template).propagate
- end
-
- def initialize(template)
- @template = template
- end
-
- def propagate
- return unless template.active?
-
- propagate_projects_with_template
- end
-
- private
-
- attr_reader :template
-
- def propagate_projects_with_template
- loop do
- batch = Project.uncached { Project.ids_without_integration(template, BATCH_SIZE) }
-
- bulk_create_from_template(batch) unless batch.empty?
-
- break if batch.size < BATCH_SIZE
- end
- end
-
- def bulk_create_from_template(batch)
- service_list = ServiceList.new(batch, service_hash).to_array
-
- Project.transaction do
- results = bulk_insert(*service_list)
-
- if data_fields_present?
- data_list = DataList.new(results, data_fields_hash, template.data_fields.class).to_array
-
- bulk_insert(*data_list)
- end
-
- run_callbacks(batch)
- end
- end
-
- def bulk_insert(klass, columns, values_array)
- items_to_insert = values_array.map { |array| Hash[columns.zip(array)] }
-
- klass.insert_all(items_to_insert, returning: [:id])
- end
-
- def service_hash
- @service_hash ||= template.to_service_hash
- end
-
- def data_fields_hash
- @data_fields_hash ||= template.to_data_fields_hash
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def run_callbacks(batch)
- if template.issue_tracker?
- Project.where(id: batch).update_all(has_external_issue_tracker: true)
- end
-
- if active_external_wiki?
- Project.where(id: batch).update_all(has_external_wiki: true)
- end
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def active_external_wiki?
- template.type == 'ExternalWikiService'
- end
- end
-end
diff --git a/app/services/projects/transfer_service.rb b/app/services/projects/transfer_service.rb
index 0fb70feec86..dba5177718d 100644
--- a/app/services/projects/transfer_service.rb
+++ b/app/services/projects/transfer_service.rb
@@ -88,15 +88,14 @@ module Projects
# Move uploads
move_project_uploads(project)
- # Move pages
- Gitlab::PagesTransfer.new.move_project(project.path, @old_namespace.full_path, @new_namespace.full_path)
-
project.old_path_with_namespace = @old_path
update_repository_configuration(@new_path)
execute_system_hooks
end
+
+ move_pages(project)
rescue Exception # rubocop:disable Lint/RescueException
rollback_side_effects
raise
@@ -181,6 +180,13 @@ module Projects
)
end
+ def move_pages(project)
+ return unless project.pages_deployed?
+
+ transfer = Gitlab::PagesTransfer.new.async
+ transfer.move_project(project.path, @old_namespace.full_path, @new_namespace.full_path)
+ end
+
def old_wiki_repo_path
"#{old_path}#{::Gitlab::GlRepository::WIKI.path_suffix}"
end
diff --git a/app/services/projects/unlink_fork_service.rb b/app/services/projects/unlink_fork_service.rb
index b3cf27373cd..52aea8c51a5 100644
--- a/app/services/projects/unlink_fork_service.rb
+++ b/app/services/projects/unlink_fork_service.rb
@@ -2,21 +2,13 @@
module Projects
class UnlinkForkService < BaseService
- # If a fork is given, it:
- #
- # - Saves LFS objects to the root project
- # - Close existing MRs coming from it
- # - Is removed from the fork network
- #
- # If a root of fork(s) is given, it does the same,
- # but not updating LFS objects (there'll be no related root to cache it).
+ # Close existing MRs coming from the project and remove it from the fork network
def execute
fork_network = @project.fork_network
+ forked_from = @project.forked_from_project
return unless fork_network
- save_lfs_objects
-
merge_requests = fork_network
.merge_requests
.opened
@@ -41,7 +33,7 @@ module Projects
# When the project getting out of the network is a node with parent
# and children, both the parent and the node needs a cache refresh.
- [@project.forked_from_project, @project].compact.each do |project|
+ [forked_from, @project].compact.each do |project|
refresh_forks_count(project)
end
end
@@ -51,22 +43,5 @@ module Projects
def refresh_forks_count(project)
Projects::ForksCountService.new(project).refresh_cache
end
-
- # TODO: Remove this method once all LfsObjectsProject records are backfilled
- # for forks.
- #
- # See https://gitlab.com/gitlab-org/gitlab/issues/122002 for more info.
- def save_lfs_objects
- return unless @project.forked?
-
- lfs_storage_project = @project.lfs_storage_project
-
- return unless lfs_storage_project
- return if lfs_storage_project == @project # that project is being unlinked
-
- lfs_storage_project.lfs_objects.find_each do |lfs_object|
- lfs_object.projects << @project unless lfs_object.projects.include?(@project)
- end
- end
end
end
diff --git a/app/services/projects/update_pages_configuration_service.rb b/app/services/projects/update_pages_configuration_service.rb
index 88c17d502df..67d388dc8a3 100644
--- a/app/services/projects/update_pages_configuration_service.rb
+++ b/app/services/projects/update_pages_configuration_service.rb
@@ -22,9 +22,6 @@ module Projects
end
success
- rescue => e
- Gitlab::ErrorTracking.track_exception(e)
- error(e.message, pass_back: { exception: e })
end
private
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index 334f5993d15..ea37f2e4ec0 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -52,7 +52,7 @@ module Projects
def success
@status.success
- @project.mark_pages_as_deployed
+ @project.mark_pages_as_deployed(artifacts_archive: build.job_artifacts_archive)
super
end
diff --git a/app/services/projects/update_remote_mirror_service.rb b/app/services/projects/update_remote_mirror_service.rb
index 7961f689259..5c41f00aac2 100644
--- a/app/services/projects/update_remote_mirror_service.rb
+++ b/app/services/projects/update_remote_mirror_service.rb
@@ -31,6 +31,9 @@ module Projects
remote_mirror.update_start!
remote_mirror.ensure_remote!
+ # LFS objects must be sent first, or the push has dangling pointers
+ send_lfs_objects!(remote_mirror)
+
response = remote_mirror.update_repository
if response.divergent_refs.any?
@@ -43,6 +46,23 @@ module Projects
end
end
+ def send_lfs_objects!(remote_mirror)
+ return unless Feature.enabled?(:push_mirror_syncs_lfs, project)
+ return unless project.lfs_enabled?
+
+ # TODO: Support LFS sync over SSH
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/249587
+ return unless remote_mirror.url =~ /\Ahttps?:\/\//i
+ return unless remote_mirror.password_auth?
+
+ Lfs::PushService.new(
+ project,
+ current_user,
+ url: remote_mirror.bare_url,
+ credentials: remote_mirror.credentials
+ ).execute
+ end
+
def retry_or_fail(mirror, message, tries)
if tries < MAX_TRIES
mirror.mark_for_retry!(message)
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index c9ba7cde199..bb430811497 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -144,11 +144,7 @@ module Projects
def update_pages_config
return unless project.pages_deployed?
- if Feature.enabled?(:async_update_pages_config, project)
- PagesUpdateConfigurationWorker.perform_async(project.id)
- else
- Projects::UpdatePagesConfigurationService.new(project).execute
- end
+ PagesUpdateConfigurationWorker.perform_async(project.id)
end
def changing_pages_https_only?
diff --git a/app/services/prometheus/proxy_service.rb b/app/services/prometheus/proxy_service.rb
index 33635796771..c1bafd03b48 100644
--- a/app/services/prometheus/proxy_service.rb
+++ b/app/services/prometheus/proxy_service.rb
@@ -44,8 +44,8 @@ module Prometheus
def self.from_cache(proxyable_class_name, proxyable_id, method, path, params)
proxyable_class = begin
proxyable_class_name.constantize
- rescue NameError
- nil
+ rescue NameError
+ nil
end
return unless proxyable_class
diff --git a/app/services/quick_actions/interpret_service.rb b/app/services/quick_actions/interpret_service.rb
index a781eacc40e..de1cd7cd981 100644
--- a/app/services/quick_actions/interpret_service.rb
+++ b/app/services/quick_actions/interpret_service.rb
@@ -10,6 +10,7 @@ module QuickActions
include Gitlab::QuickActions::MergeRequestActions
include Gitlab::QuickActions::CommitActions
include Gitlab::QuickActions::CommonActions
+ include Gitlab::QuickActions::RelateActions
attr_reader :quick_action_target
diff --git a/app/services/resource_events/change_state_service.rb b/app/services/resource_events/change_state_service.rb
index 202972c1efd..cd6d82df46f 100644
--- a/app/services/resource_events/change_state_service.rb
+++ b/app/services/resource_events/change_state_service.rb
@@ -13,14 +13,14 @@ module ResourceEvents
ResourceStateEvent.create(
user: user,
- issue: issue,
- merge_request: merge_request,
+ resource.class.underscore => resource,
source_commit: commit_id_of(mentionable_source),
source_merge_request_id: merge_request_id_of(mentionable_source),
state: ResourceStateEvent.states[state],
close_after_error_tracking_resolve: close_after_error_tracking_resolve,
close_auto_resolve_prometheus_alert: close_auto_resolve_prometheus_alert,
- created_at: Time.zone.now)
+ created_at: Time.zone.now
+ )
resource.expire_note_etag_cache
end
@@ -56,17 +56,5 @@ module ResourceEvents
mentionable_source.id
end
-
- def issue
- return unless resource.is_a?(Issue)
-
- resource
- end
-
- def merge_request
- return unless resource.is_a?(MergeRequest)
-
- resource
- end
end
end
diff --git a/app/services/search/global_service.rb b/app/services/search/global_service.rb
index c841cbfaa00..fab02697cf0 100644
--- a/app/services/search/global_service.rb
+++ b/app/services/search/global_service.rb
@@ -5,16 +5,16 @@ module Search
include Gitlab::Utils::StrongMemoize
attr_accessor :current_user, :params
- attr_reader :default_project_filter
def initialize(user, params)
@current_user, @params = user, params.dup
- @default_project_filter = true
end
def execute
- Gitlab::SearchResults.new(current_user, projects, params[:search],
- default_project_filter: default_project_filter)
+ Gitlab::SearchResults.new(current_user,
+ params[:search],
+ projects,
+ filters: { state: params[:state] })
end
def projects
diff --git a/app/services/search/group_service.rb b/app/services/search/group_service.rb
index 4dbd9eb14bb..68778aa2768 100644
--- a/app/services/search/group_service.rb
+++ b/app/services/search/group_service.rb
@@ -7,13 +7,16 @@ module Search
def initialize(user, group, params)
super(user, params)
- @default_project_filter = false
@group = group
end
def execute
Gitlab::GroupSearchResults.new(
- current_user, projects, group, params[:search], default_project_filter: default_project_filter
+ current_user,
+ params[:search],
+ projects,
+ group: group,
+ filters: { state: params[:state] }
)
end
diff --git a/app/services/search/project_service.rb b/app/services/search/project_service.rb
index 17a322c2665..5eba909c23b 100644
--- a/app/services/search/project_service.rb
+++ b/app/services/search/project_service.rb
@@ -10,9 +10,10 @@ module Search
def execute
Gitlab::ProjectSearchResults.new(current_user,
- project,
params[:search],
- params[:repository_ref])
+ project: project,
+ repository_ref: params[:repository_ref],
+ filters: { state: params[:state] })
end
def scope
diff --git a/app/services/snippets/base_service.rb b/app/services/snippets/base_service.rb
index d9e8326f159..53a04e5a398 100644
--- a/app/services/snippets/base_service.rb
+++ b/app/services/snippets/base_service.rb
@@ -46,7 +46,7 @@ module Snippets
snippet.errors.add(:snippet_actions, 'have invalid data')
end
- snippet_error_response(snippet, 403)
+ snippet_error_response(snippet, 422)
end
def snippet_error_response(snippet, http_status)
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
index dab47de8a36..5c9b2eb1aea 100644
--- a/app/services/snippets/create_service.rb
+++ b/app/services/snippets/create_service.rb
@@ -82,7 +82,7 @@ module Snippets
def create_commit
commit_attrs = {
- branch_name: 'master',
+ branch_name: @snippet.default_branch,
message: 'Initial commit'
}
diff --git a/app/services/snippets/destroy_service.rb b/app/services/snippets/destroy_service.rb
index 977626fcf17..f1f80dbaf86 100644
--- a/app/services/snippets/destroy_service.rb
+++ b/app/services/snippets/destroy_service.rb
@@ -58,3 +58,5 @@ module Snippets
end
end
end
+
+Snippets::DestroyService.prepend_if_ee('EE::Snippets::DestroyService')
diff --git a/app/services/snippets/repository_validation_service.rb b/app/services/snippets/repository_validation_service.rb
index c8197795383..5bf5e692ef4 100644
--- a/app/services/snippets/repository_validation_service.rb
+++ b/app/services/snippets/repository_validation_service.rb
@@ -39,7 +39,7 @@ module Snippets
def check_branch_name_default!
branches = repository.branch_names
- return if branches.first == Gitlab::Checks::SnippetCheck::DEFAULT_BRANCH
+ return if branches.first == snippet.default_branch
raise RepositoryValidationError, _('Repository has an invalid default branch name.')
end
@@ -51,7 +51,7 @@ module Snippets
end
def check_file_count!
- file_count = repository.ls_files(nil).size
+ file_count = repository.ls_files(snippet.default_branch).size
limit = Snippet.max_file_limit(current_user)
if file_count > limit
diff --git a/app/services/snippets/update_service.rb b/app/services/snippets/update_service.rb
index 00146389e22..a0e9ab6ffda 100644
--- a/app/services/snippets/update_service.rb
+++ b/app/services/snippets/update_service.rb
@@ -93,7 +93,7 @@ module Snippets
raise UpdateError unless snippet.snippet_repository
commit_attrs = {
- branch_name: 'master',
+ branch_name: snippet.default_branch,
message: 'Update snippet'
}
diff --git a/app/services/static_site_editor/config_service.rb b/app/services/static_site_editor/config_service.rb
new file mode 100644
index 00000000000..987ee071976
--- /dev/null
+++ b/app/services/static_site_editor/config_service.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module StaticSiteEditor
+ class ConfigService < ::BaseContainerService
+ ValidationError = Class.new(StandardError)
+
+ def execute
+ @project = container
+ check_access!
+
+ ServiceResponse.success(payload: data)
+ rescue ValidationError => e
+ ServiceResponse.error(message: e.message)
+ end
+
+ private
+
+ attr_reader :project
+
+ def check_access!
+ unless can?(current_user, :download_code, project)
+ raise ValidationError, 'Insufficient permissions to read configuration'
+ end
+ end
+
+ def data
+ check_for_duplicate_keys!
+ generated_data.merge(file_data)
+ end
+
+ def generated_data
+ @generated_data ||= Gitlab::StaticSiteEditor::Config::GeneratedConfig.new(
+ project.repository,
+ params.fetch(:ref),
+ params.fetch(:path),
+ params[:return_url]
+ ).data
+ end
+
+ def file_data
+ @file_data ||= Gitlab::StaticSiteEditor::Config::FileConfig.new.data
+ end
+
+ def check_for_duplicate_keys!
+ duplicate_keys = generated_data.keys & file_data.keys
+ raise ValidationError.new("Duplicate key(s) '#{duplicate_keys}' found.") if duplicate_keys.present?
+ end
+ end
+end
diff --git a/app/services/submit_usage_ping_service.rb b/app/services/submit_usage_ping_service.rb
index 9191943caa7..2fbeaf4405c 100644
--- a/app/services/submit_usage_ping_service.rb
+++ b/app/services/submit_usage_ping_service.rb
@@ -51,11 +51,11 @@ class SubmitUsagePingService
end
def store_metrics(response)
- metrics = response['conv_index'] || response['dev_ops_score']
+ metrics = response['conv_index'] || response['dev_ops_score'] # leaving dev_ops_score here, as the response data comes from the gitlab-version-com
return unless metrics.present?
- DevOpsScore::Metric.create!(
+ DevOpsReport::Metric.create!(
metrics.slice(*METRICS)
)
end
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 6702596f17c..df042fdc393 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -41,8 +41,12 @@ module SystemNoteService
::SystemNotes::IssuablesService.new(noteable: issuable, project: project, author: author).change_issuable_assignees(old_assignees)
end
- def change_milestone(noteable, project, author, milestone)
- ::SystemNotes::IssuablesService.new(noteable: noteable, project: project, author: author).change_milestone(milestone)
+ def relate_issue(noteable, noteable_ref, user)
+ ::SystemNotes::IssuablesService.new(noteable: noteable, project: noteable.project, author: user).relate_issue(noteable_ref)
+ end
+
+ def unrelate_issue(noteable, noteable_ref, user)
+ ::SystemNotes::IssuablesService.new(noteable: noteable, project: noteable.project, author: user).unrelate_issue(noteable_ref)
end
# Called when the due_date of a Noteable is changed
@@ -300,6 +304,10 @@ module SystemNoteService
::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project, author: author).new_alert_issue(issue)
end
+ def create_new_alert(alert, monitoring_tool)
+ ::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project).create_new_alert(monitoring_tool)
+ end
+
private
def merge_requests_service(noteable, project, author)
diff --git a/app/services/system_notes/alert_management_service.rb b/app/services/system_notes/alert_management_service.rb
index f835376727a..376f2c1cfbf 100644
--- a/app/services/system_notes/alert_management_service.rb
+++ b/app/services/system_notes/alert_management_service.rb
@@ -2,6 +2,21 @@
module SystemNotes
class AlertManagementService < ::SystemNotes::BaseService
+ # Called when the a new AlertManagement::Alert has been created
+ #
+ # alert - AlertManagement::Alert object.
+ #
+ # Example Note text:
+ #
+ # "GitLab Alert Bot logged an alert from Prometheus"
+ #
+ # Returns the created Note object
+ def create_new_alert(monitoring_tool)
+ body = "logged an alert from **#{monitoring_tool}**"
+
+ create_note(NoteSummary.new(noteable, project, User.alert_bot, body, action: 'new_alert_added'))
+ end
+
# Called when the status of an AlertManagement::Alert has changed
#
# alert - AlertManagement::Alert object.
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index 7535db54130..2252503d97e 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -2,6 +2,34 @@
module SystemNotes
class IssuablesService < ::SystemNotes::BaseService
+ #
+ # noteable_ref - Referenced noteable object
+ #
+ # Example Note text:
+ #
+ # "marked this issue as related to gitlab-foss#9001"
+ #
+ # Returns the created Note object
+ def relate_issue(noteable_ref)
+ body = "marked this issue as related to #{noteable_ref.to_reference(noteable.project)}"
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'relate'))
+ end
+
+ #
+ # noteable_ref - Referenced noteable object
+ #
+ # Example Note text:
+ #
+ # "removed the relation with gitlab-foss#9001"
+ #
+ # Returns the created Note object
+ def unrelate_issue(noteable_ref)
+ body = "removed the relation with #{noteable_ref.to_reference(noteable.project)}"
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'unrelate'))
+ end
+
# Called when the assignee of a Noteable is changed or removed
#
# assignee - User being assigned, or nil
@@ -16,6 +44,8 @@ module SystemNotes
def change_assignee(assignee)
body = assignee.nil? ? 'removed assignee' : "assigned to #{assignee.to_reference}"
+ issue_activity_counter.track_issue_assignee_changed_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'assignee'))
end
@@ -46,25 +76,9 @@ module SystemNotes
body = text_parts.join(' and ')
- create_note(NoteSummary.new(noteable, project, author, body, action: 'assignee'))
- end
-
- # Called when the milestone of a Noteable is changed
- #
- # milestone - Milestone being assigned, or nil
- #
- # Example Note text:
- #
- # "removed milestone"
- #
- # "changed milestone to 7.11"
- #
- # Returns the created Note object
- def change_milestone(milestone)
- format = milestone&.group_milestone? ? :name : :iid
- body = milestone.nil? ? 'removed milestone' : "changed milestone to #{milestone.to_reference(project, format: format)}"
+ issue_activity_counter.track_issue_assignee_changed_action(author: author) if noteable.is_a?(Issue)
- create_note(NoteSummary.new(noteable, project, author, body, action: 'milestone'))
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'assignee'))
end
# Called when the title of a Noteable is changed
@@ -86,6 +100,8 @@ module SystemNotes
body = "changed title from **#{marked_old_title}** to **#{marked_new_title}**"
+ issue_activity_counter.track_issue_title_changed_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
end
@@ -103,6 +119,8 @@ module SystemNotes
def change_description
body = 'changed the description'
+ issue_activity_counter.track_issue_description_changed_action(author: author) if noteable.is_a?(Issue)
+
create_note(NoteSummary.new(noteable, project, author, body, action: 'description'))
end
@@ -199,9 +217,13 @@ module SystemNotes
if noteable.confidential
body = 'made the issue confidential'
action = 'confidential'
+
+ issue_activity_counter.track_issue_made_confidential_action(author: author) if noteable.is_a?(Issue)
else
body = 'made the issue visible to everyone'
action = 'visible'
+
+ issue_activity_counter.track_issue_made_visible_action(author: author) if noteable.is_a?(Issue)
end
create_note(NoteSummary.new(noteable, project, author, body, action: action))
@@ -343,6 +365,10 @@ module SystemNotes
noteable.respond_to?(:resource_state_events) &&
::Feature.enabled?(:track_resource_state_change_events, noteable.project, default_enabled: true)
end
+
+ def issue_activity_counter
+ Gitlab::UsageDataCounters::IssueActivityUniqueCounter
+ end
end
end
diff --git a/app/services/todo_service.rb b/app/services/todo_service.rb
index a3db2ae7947..0eb099753cb 100644
--- a/app/services/todo_service.rb
+++ b/app/services/todo_service.rb
@@ -8,6 +8,7 @@
# TodoService.new.new_issue(issue, current_user)
#
class TodoService
+ include Gitlab::Utils::UsageData
# When create an issue we should:
#
# * create a todo for assignee if issue is assigned
@@ -57,6 +58,14 @@ class TodoService
create_assignment_todo(issuable, current_user, old_assignees)
end
+ # When we reassign an reviewable object (merge request) we should:
+ #
+ # * create a pending todo for new reviewer if object is assigned
+ #
+ def reassigned_reviewable(issuable, current_user, old_reviewers = [])
+ create_reviewer_todo(issuable, current_user, old_reviewers)
+ end
+
# When create a merge request we should:
#
# * creates a pending todo for assignee if merge request is assigned
@@ -209,6 +218,9 @@ class TodoService
Array(users).map do |user|
next if pending_todos(user, attributes).exists?
+ issue_type = attributes.delete(:issue_type)
+ track_todo_creation(user, issue_type)
+
todo = Todo.create(attributes.merge(user_id: user.id))
user.update_todos_count_cache
todo
@@ -217,6 +229,7 @@ class TodoService
def new_issuable(issuable, author)
create_assignment_todo(issuable, author)
+ create_reviewer_todo(issuable, author) if issuable.allows_reviewers?
create_mention_todos(issuable.project, issuable, author)
end
@@ -250,6 +263,14 @@ class TodoService
end
end
+ def create_reviewer_todo(target, author, old_reviewers = [])
+ if target.reviewers.any?
+ reviewers = target.reviewers - old_reviewers
+ attributes = attributes_for_todo(target.project, target, author, Todo::REVIEW_REQUESTED)
+ create_todos(reviewers, attributes)
+ end
+ end
+
def create_mention_todos(parent, target, author, note = nil, skip_users = [])
# Create Todos for directly addressed users
directly_addressed_users = filter_directly_addressed_users(parent, note || target, author, skip_users)
@@ -282,6 +303,8 @@ class TodoService
if target.is_a?(Commit)
attributes.merge!(target_id: nil, commit_id: target.id)
+ elsif target.is_a?(Issue)
+ attributes[:issue_type] = target.issue_type
end
attributes
@@ -329,6 +352,12 @@ class TodoService
def pending_todos(user, criteria = {})
PendingTodosFinder.new(user, criteria).execute
end
+
+ def track_todo_creation(user, issue_type)
+ return unless issue_type == 'incident'
+
+ track_usage_event(:incident_management_incident_todo, user.id)
+ end
end
TodoService.prepend_if_ee('EE::TodoService')
diff --git a/app/services/two_factor/base_service.rb b/app/services/two_factor/base_service.rb
new file mode 100644
index 00000000000..7d3f63f3442
--- /dev/null
+++ b/app/services/two_factor/base_service.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module TwoFactor
+ class BaseService
+ include BaseServiceUtility
+
+ attr_reader :current_user, :params, :user
+
+ def initialize(current_user, params = {})
+ @current_user, @params = current_user, params
+ @user = params.delete(:user)
+ end
+ end
+end
diff --git a/app/services/two_factor/destroy_service.rb b/app/services/two_factor/destroy_service.rb
new file mode 100644
index 00000000000..b8bbe215d6e
--- /dev/null
+++ b/app/services/two_factor/destroy_service.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module TwoFactor
+ class DestroyService < ::TwoFactor::BaseService
+ def execute
+ return error(_('You are not authorized to perform this action')) unless can?(current_user, :disable_two_factor, user)
+ return error(_('Two-factor authentication is not enabled for this user')) unless user.two_factor_enabled?
+
+ result = disable_two_factor
+
+ notification_service.disabled_two_factor(user) if result[:status] == :success
+
+ result
+ end
+
+ private
+
+ def disable_two_factor
+ ::Users::UpdateService.new(current_user, user: user).execute do |user|
+ user.disable_two_factor!
+ end
+ end
+ end
+end
diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb
index f06f00a5c3f..2fc46f033dd 100644
--- a/app/services/users/build_service.rb
+++ b/app/services/users/build_service.rb
@@ -91,7 +91,6 @@ module Users
def signup_params
[
:email,
- :email_confirmation,
:password_automatically_set,
:name,
:first_name,
diff --git a/app/services/users/signup_service.rb b/app/services/users/signup_service.rb
index 1031cec44cb..1087ae76216 100644
--- a/app/services/users/signup_service.rb
+++ b/app/services/users/signup_service.rb
@@ -27,7 +27,7 @@ module Users
def inject_validators
class << @user
validates :role, presence: true
- validates :setup_for_company, inclusion: { in: [true, false], message: :blank }
+ validates :setup_for_company, inclusion: { in: [true, false], message: :blank } if Gitlab.com?
end
end
end
diff --git a/app/services/webauthn/authenticate_service.rb b/app/services/webauthn/authenticate_service.rb
new file mode 100644
index 00000000000..a4513c62c2d
--- /dev/null
+++ b/app/services/webauthn/authenticate_service.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+module Webauthn
+ class AuthenticateService < BaseService
+ def initialize(user, device_response, challenge)
+ @user = user
+ @device_response = device_response
+ @challenge = challenge
+ end
+
+ def execute
+ parsed_device_response = Gitlab::Json.parse(@device_response)
+
+ # appid is set for legacy U2F devices, will be used in a future iteration
+ # rp_id = @app_id
+ # unless parsed_device_response['clientExtensionResults'] && parsed_device_response['clientExtensionResults']['appid']
+ # rp_id = URI(@app_id).host
+ # end
+
+ webauthn_credential = WebAuthn::Credential.from_get(parsed_device_response)
+ encoded_raw_id = Base64.strict_encode64(webauthn_credential.raw_id)
+ stored_webauthn_credential = @user.webauthn_registrations.find_by_credential_xid(encoded_raw_id)
+
+ encoder = WebAuthn.configuration.encoder
+
+ if stored_webauthn_credential &&
+ validate_webauthn_credential(webauthn_credential) &&
+ verify_webauthn_credential(webauthn_credential, stored_webauthn_credential, @challenge, encoder)
+
+ stored_webauthn_credential.update!(counter: webauthn_credential.sign_count)
+ return true
+ end
+
+ false
+ rescue JSON::ParserError, WebAuthn::SignCountVerificationError, WebAuthn::Error
+ false
+ end
+
+ ##
+ # Validates that webauthn_credential is syntactically valid
+ #
+ # duplicated from WebAuthn::PublicKeyCredential#verify
+ # which can't be used here as we need to call WebAuthn::AuthenticatorAssertionResponse#verify instead
+ # (which is done in #verify_webauthn_credential)
+ def validate_webauthn_credential(webauthn_credential)
+ webauthn_credential.type == WebAuthn::TYPE_PUBLIC_KEY &&
+ webauthn_credential.raw_id && webauthn_credential.id &&
+ webauthn_credential.raw_id == WebAuthn.standard_encoder.decode(webauthn_credential.id)
+ end
+
+ ##
+ # Verifies that webauthn_credential matches stored_credential with the given challenge
+ #
+ def verify_webauthn_credential(webauthn_credential, stored_credential, challenge, encoder)
+ webauthn_credential.response.verify(
+ encoder.decode(challenge),
+ public_key: encoder.decode(stored_credential.public_key),
+ sign_count: stored_credential.counter)
+ end
+ end
+end
diff --git a/app/services/webauthn/register_service.rb b/app/services/webauthn/register_service.rb
new file mode 100644
index 00000000000..21be22027a8
--- /dev/null
+++ b/app/services/webauthn/register_service.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Webauthn
+ class RegisterService < BaseService
+ def initialize(user, params, challenge)
+ @user = user
+ @params = params
+ @challenge = challenge
+ end
+
+ def execute
+ registration = WebauthnRegistration.new
+
+ begin
+ webauthn_credential = WebAuthn::Credential.from_create(Gitlab::Json.parse(@params[:device_response]))
+ webauthn_credential.verify(@challenge)
+
+ registration.update(
+ credential_xid: Base64.strict_encode64(webauthn_credential.raw_id),
+ public_key: webauthn_credential.public_key,
+ counter: webauthn_credential.sign_count,
+ name: @params[:name],
+ user: @user
+ )
+ rescue JSON::ParserError
+ registration.errors.add(:base, _('Your WebAuthn device did not send a valid JSON response.'))
+ rescue WebAuthn::Error => e
+ registration.errors.add(:base, e.message)
+ end
+
+ registration
+ end
+ end
+end
diff --git a/app/services/wiki_pages/destroy_service.rb b/app/services/wiki_pages/destroy_service.rb
index ab5abe1c82b..1d566f98760 100644
--- a/app/services/wiki_pages/destroy_service.rb
+++ b/app/services/wiki_pages/destroy_service.rb
@@ -3,11 +3,14 @@
module WikiPages
class DestroyService < WikiPages::BaseService
def execute(page)
- if page&.delete
+ if page.delete
execute_hooks(page)
+ ServiceResponse.success(payload: { page: page })
+ else
+ ServiceResponse.error(
+ message: _('Could not delete wiki page'), payload: { page: page }
+ )
end
-
- page
end
def usage_counter_action
diff --git a/app/services/wiki_pages/update_service.rb b/app/services/wiki_pages/update_service.rb
index 5ac6902e0b0..f2fc6b37c34 100644
--- a/app/services/wiki_pages/update_service.rb
+++ b/app/services/wiki_pages/update_service.rb
@@ -8,9 +8,13 @@ module WikiPages
if page.update(@params)
execute_hooks(page)
+ ServiceResponse.success(payload: { page: page })
+ else
+ ServiceResponse.error(
+ message: _('Could not update wiki page'),
+ payload: { page: page }
+ )
end
-
- page
end
def usage_counter_action