summaryrefslogtreecommitdiff
path: root/app/services
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-09-20 13:18:24 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2021-09-20 13:18:24 +0000
commit0653e08efd039a5905f3fa4f6e9cef9f5d2f799c (patch)
tree4dcc884cf6d81db44adae4aa99f8ec1233a41f55 /app/services
parent744144d28e3e7fddc117924fef88de5d9674fe4c (diff)
downloadgitlab-ce-0653e08efd039a5905f3fa4f6e9cef9f5d2f799c.tar.gz
Add latest changes from gitlab-org/gitlab@14-3-stable-eev14.3.0-rc42
Diffstat (limited to 'app/services')
-rw-r--r--app/services/auth/container_registry_authentication_service.rb18
-rw-r--r--app/services/base_container_service.rb10
-rw-r--r--app/services/base_group_service.rb12
-rw-r--r--app/services/base_service.rb1
-rw-r--r--app/services/boards/issues/list_service.rb8
-rw-r--r--app/services/ci/after_requeue_job_service.rb13
-rw-r--r--app/services/ci/archive_trace_service.rb9
-rw-r--r--app/services/ci/drop_pipeline_service.rb8
-rw-r--r--app/services/ci/external_pull_requests/create_pipeline_service.rb10
-rw-r--r--app/services/ci/pipeline_schedules/calculate_next_run_service.rb1
-rw-r--r--app/services/ci/pipelines/add_job_service.rb4
-rw-r--r--app/services/ci/queue/build_queue_service.rb36
-rw-r--r--app/services/ci/queue/builds_table_strategy.rb8
-rw-r--r--app/services/ci/queue/pending_builds_strategy.rb22
-rw-r--r--app/services/ci/register_job_service.rb63
-rw-r--r--app/services/ci/stuck_builds/drop_helpers.rb58
-rw-r--r--app/services/ci/stuck_builds/drop_service.rb62
-rw-r--r--app/services/ci/update_build_queue_service.rb14
-rw-r--r--app/services/ci/update_pending_build_service.rb45
-rw-r--r--app/services/clusters/agents/refresh_authorization_service.rb106
-rw-r--r--app/services/concerns/members/bulk_create_users.rb76
-rw-r--r--app/services/customer_relations/organizations/base_service.rb17
-rw-r--r--app/services/customer_relations/organizations/create_service.rb30
-rw-r--r--app/services/customer_relations/organizations/update_service.rb24
-rw-r--r--app/services/dependency_proxy/image_ttl_group_policies/update_service.rb41
-rw-r--r--app/services/design_management/delete_designs_service.rb1
-rw-r--r--app/services/draft_notes/publish_service.rb20
-rw-r--r--app/services/emails/destroy_service.rb6
-rw-r--r--app/services/environments/auto_stop_service.rb12
-rw-r--r--app/services/environments/stop_service.rb16
-rw-r--r--app/services/error_tracking/collect_error_service.rb32
-rw-r--r--app/services/error_tracking/list_issues_service.rb4
-rw-r--r--app/services/feature_flags/base_service.rb9
-rw-r--r--app/services/feature_flags/create_service.rb4
-rw-r--r--app/services/feature_flags/update_service.rb38
-rw-r--r--app/services/git/base_hooks_service.rb10
-rw-r--r--app/services/git/branch_hooks_service.rb37
-rw-r--r--app/services/git/tag_hooks_service.rb6
-rw-r--r--app/services/groups/open_issues_count_service.rb27
-rw-r--r--app/services/groups/update_shared_runners_service.rb9
-rw-r--r--app/services/issuable/bulk_update_service.rb19
-rw-r--r--app/services/issuable_base_service.rb5
-rw-r--r--app/services/issue_rebalancing_service.rb136
-rw-r--r--app/services/issues/base_service.rb10
-rw-r--r--app/services/issues/build_service.rb13
-rw-r--r--app/services/issues/relative_position_rebalancing_service.rb193
-rw-r--r--app/services/issues/update_service.rb70
-rw-r--r--app/services/members/create_service.rb17
-rw-r--r--app/services/members/creator_service.rb64
-rw-r--r--app/services/members/groups/bulk_creator_service.rb9
-rw-r--r--app/services/members/invite_service.rb2
-rw-r--r--app/services/members/mailgun.rb8
-rw-r--r--app/services/members/mailgun/process_webhook_service.rb39
-rw-r--r--app/services/members/projects/bulk_creator_service.rb9
-rw-r--r--app/services/merge_requests/merge_to_ref_service.rb8
-rw-r--r--app/services/merge_requests/mergeability_check_service.rb4
-rw-r--r--app/services/merge_requests/squash_service.rb17
-rw-r--r--app/services/packages/composer/version_parser_service.rb11
-rw-r--r--app/services/packages/create_package_service.rb6
-rw-r--r--app/services/packages/generic/create_package_file_service.rb3
-rw-r--r--app/services/packages/maven/find_or_create_package_service.rb2
-rw-r--r--app/services/packages/nuget/update_package_from_metadata_service.rb33
-rw-r--r--app/services/pages/delete_service.rb3
-rw-r--r--app/services/pages/legacy_storage_lease.rb19
-rw-r--r--app/services/pages/migrate_legacy_storage_to_deployment_service.rb15
-rw-r--r--app/services/projects/batch_count_service.rb16
-rw-r--r--app/services/projects/batch_forks_count_service.rb23
-rw-r--r--app/services/projects/count_service.rb2
-rw-r--r--app/services/projects/create_service.rb19
-rw-r--r--app/services/projects/fork_service.rb3
-rw-r--r--app/services/projects/forks_count_service.rb2
-rw-r--r--app/services/projects/group_links/destroy_service.rb20
-rw-r--r--app/services/projects/move_deploy_keys_projects_service.rb2
-rw-r--r--app/services/projects/move_forks_service.rb2
-rw-r--r--app/services/projects/move_lfs_objects_projects_service.rb2
-rw-r--r--app/services/projects/move_notification_settings_service.rb2
-rw-r--r--app/services/projects/move_project_authorizations_service.rb2
-rw-r--r--app/services/projects/move_project_group_links_service.rb2
-rw-r--r--app/services/projects/move_project_members_service.rb2
-rw-r--r--app/services/projects/move_users_star_projects_service.rb2
-rw-r--r--app/services/projects/open_issues_count_service.rb98
-rw-r--r--app/services/projects/operations/update_service.rb1
-rw-r--r--app/services/projects/transfer_service.rb23
-rw-r--r--app/services/projects/update_pages_service.rb104
-rw-r--r--app/services/projects/update_service.rb11
-rw-r--r--app/services/protected_branches/api_service.rb2
-rw-r--r--app/services/protected_branches/base_service.rb17
-rw-r--r--app/services/protected_branches/create_service.rb2
-rw-r--r--app/services/protected_branches/destroy_service.rb2
-rw-r--r--app/services/protected_branches/update_service.rb10
-rw-r--r--app/services/repositories/changelog_service.rb2
-rw-r--r--app/services/search/global_service.rb2
-rw-r--r--app/services/service_ping/submit_service.rb17
-rw-r--r--app/services/suggestions/apply_service.rb2
-rw-r--r--app/services/suggestions/create_service.rb2
-rw-r--r--app/services/system_note_service.rb4
-rw-r--r--app/services/system_notes/issuables_service.rb6
-rw-r--r--app/services/todos/destroy/design_service.rb28
-rw-r--r--app/services/users/ban_service.rb4
-rw-r--r--app/services/users/banned_user_base_service.rb5
-rw-r--r--app/services/users/dismiss_group_callout_service.rb11
-rw-r--r--app/services/users/dismiss_user_callout_service.rb10
-rw-r--r--app/services/users/migrate_to_ghost_user_service.rb29
-rw-r--r--app/services/users/reject_service.rb6
-rw-r--r--app/services/users/unban_service.rb6
-rw-r--r--app/services/wiki_pages/event_create_service.rb6
106 files changed, 1434 insertions, 719 deletions
diff --git a/app/services/auth/container_registry_authentication_service.rb b/app/services/auth/container_registry_authentication_service.rb
index a2683647c72..bc734465750 100644
--- a/app/services/auth/container_registry_authentication_service.rb
+++ b/app/services/auth/container_registry_authentication_service.rb
@@ -45,7 +45,12 @@ module Auth
token.expire_time = token_expire_at
token[:access] = names.map do |name|
- { type: 'repository', name: name, actions: actions }
+ {
+ type: 'repository',
+ name: name,
+ actions: actions,
+ migration_eligible: migration_eligible(repository_path: name)
+ }.compact
end
token.encoded
@@ -119,13 +124,20 @@ module Auth
type: type,
name: path.to_s,
actions: authorized_actions,
- migration_eligible: migration_eligible(requested_project, authorized_actions)
+ migration_eligible: self.class.migration_eligible(project: requested_project)
}.compact
end
- def migration_eligible(project, actions)
+ def self.migration_eligible(project: nil, repository_path: nil)
return unless Feature.enabled?(:container_registry_migration_phase1)
+ # project has precedence over repository_path. If only the latter is provided, we find the corresponding Project.
+ unless project
+ return unless repository_path
+
+ project = ContainerRegistry::Path.new(repository_path).repository_project
+ end
+
# The migration process will start by allowing only specific test and gitlab-org projects using the
# `container_registry_migration_phase1_allow` FF. We'll then move on to a percentage rollout using this same FF.
# To remove the risk of impacting enterprise customers that rely heavily on the registry during the percentage
diff --git a/app/services/base_container_service.rb b/app/services/base_container_service.rb
index 8492b3ce92c..190d159e7f1 100644
--- a/app/services/base_container_service.rb
+++ b/app/services/base_container_service.rb
@@ -2,12 +2,12 @@
# Base class, scoped by container (project or group).
#
-# New or existing services which only require project as a container
-# should subclass BaseProjectService.
+# New or existing services which only require a project or group container
+# should subclass BaseProjectService or BaseGroupService.
#
-# If you require a different but specific, non-polymorphic container (such
-# as group), consider creating a new subclass such as BaseGroupService,
-# and update the related comment at the top of the original BaseService.
+# If you require a different but specific, non-polymorphic container
+# consider creating a new subclass, and update the related comment at
+# the top of the original BaseService.
class BaseContainerService
include BaseServiceUtility
diff --git a/app/services/base_group_service.rb b/app/services/base_group_service.rb
new file mode 100644
index 00000000000..c95b6f5af60
--- /dev/null
+++ b/app/services/base_group_service.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+# Base class, scoped by group
+class BaseGroupService < ::BaseContainerService # rubocop:disable Gitlab/NamespacedClass
+ attr_accessor :group
+
+ def initialize(group:, current_user: nil, params: {})
+ super(container: group, current_user: current_user, params: params)
+
+ @group = group
+ end
+end
diff --git a/app/services/base_service.rb b/app/services/base_service.rb
index 3030287e035..275ebcc7bcd 100644
--- a/app/services/base_service.rb
+++ b/app/services/base_service.rb
@@ -10,6 +10,7 @@
#
# - BaseContainerService for services scoped by container (project or group)
# - BaseProjectService for services scoped to projects
+# - BaseGroupService for services scoped to groups
#
# or, create a new base class and update this comment.
class BaseService
diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb
index 8806e6788ff..9a3e3bc3bdb 100644
--- a/app/services/boards/issues/list_service.rb
+++ b/app/services/boards/issues/list_service.rb
@@ -9,6 +9,14 @@ module Boards
IssuesFinder.valid_params
end
+ # It is a class method because we cannot apply it
+ # prior to knowing how many items should be fetched for a list.
+ def self.initialize_relative_positions(board, current_user, issues)
+ if Gitlab::Database.read_write? && !board.disabled_for?(current_user)
+ Issue.move_nulls_to_end(issues)
+ end
+ end
+
private
def order(items)
diff --git a/app/services/ci/after_requeue_job_service.rb b/app/services/ci/after_requeue_job_service.rb
index f717dd0862c..9101ae91967 100644
--- a/app/services/ci/after_requeue_job_service.rb
+++ b/app/services/ci/after_requeue_job_service.rb
@@ -10,16 +10,9 @@ module Ci
private
def process_subsequent_jobs(processable)
- if Feature.enabled?(:ci_same_stage_job_needs, processable.project, default_enabled: :yaml)
- (stage_dependent_jobs(processable) | needs_dependent_jobs(processable))
- .each do |processable|
- process(processable)
- end
- else
- skipped_jobs(processable).after_stage(processable.stage_idx)
- .find_each do |job|
- process(job)
- end
+ (stage_dependent_jobs(processable) | needs_dependent_jobs(processable))
+ .each do |processable|
+ process(processable)
end
end
diff --git a/app/services/ci/archive_trace_service.rb b/app/services/ci/archive_trace_service.rb
index bc3219fbd79..995b58c6882 100644
--- a/app/services/ci/archive_trace_service.rb
+++ b/app/services/ci/archive_trace_service.rb
@@ -3,6 +3,13 @@
module Ci
class ArchiveTraceService
def execute(job, worker_name:)
+ unless job.trace.can_attempt_archival_now?
+ Sidekiq.logger.warn(class: worker_name,
+ message: job.trace.archival_attempts_message,
+ job_id: job.id)
+ return
+ end
+
# TODO: Remove this logging once we confirmed new live trace architecture is functional.
# See https://gitlab.com/gitlab-com/gl-infra/infrastructure/issues/4667.
unless job.has_live_trace?
@@ -25,6 +32,8 @@ module Ci
rescue ::Gitlab::Ci::Trace::AlreadyArchivedError
# It's already archived, thus we can safely ignore this exception.
rescue StandardError => e
+ job.trace.increment_archival_attempts!
+
# Tracks this error with application logs, Sentry, and Prometheus.
# If `archive!` keeps failing for over a week, that could incur data loss.
# (See more https://docs.gitlab.com/ee/administration/job_logs.html#new-incremental-logging-architecture)
diff --git a/app/services/ci/drop_pipeline_service.rb b/app/services/ci/drop_pipeline_service.rb
index 16d3abcbfa0..5772ab8f29c 100644
--- a/app/services/ci/drop_pipeline_service.rb
+++ b/app/services/ci/drop_pipeline_service.rb
@@ -2,8 +2,7 @@
module Ci
class DropPipelineService
- PRELOADED_COMMIT_STATUS_RELATIONS = [:project, :pipeline].freeze
- PRELOADED_CI_BUILD_RELATIONS = [:metadata, :deployment, :taggings].freeze
+ PRELOADED_RELATIONS = [:project, :pipeline, :metadata, :deployment, :taggings].freeze
# execute service asynchronously for each cancelable pipeline
def execute_async_for_all(pipelines, failure_reason, context_user)
@@ -30,11 +29,8 @@ module Ci
private
- # rubocop: disable CodeReuse/ActiveRecord
def preload_associations_for_drop(commit_status_batch)
- ActiveRecord::Associations::Preloader.new.preload(commit_status_batch, PRELOADED_COMMIT_STATUS_RELATIONS)
- ActiveRecord::Associations::Preloader.new.preload(commit_status_batch.select { |job| job.is_a?(Ci::Build) }, PRELOADED_CI_BUILD_RELATIONS)
+ Preloaders::CommitStatusPreloader.new(commit_status_batch).execute(PRELOADED_RELATIONS)
end
- # rubocop: enable CodeReuse/ActiveRecord
end
end
diff --git a/app/services/ci/external_pull_requests/create_pipeline_service.rb b/app/services/ci/external_pull_requests/create_pipeline_service.rb
index 83499524a8e..dd93ca4708e 100644
--- a/app/services/ci/external_pull_requests/create_pipeline_service.rb
+++ b/app/services/ci/external_pull_requests/create_pipeline_service.rb
@@ -16,8 +16,14 @@ module Ci
private
def create_pipeline_for(pull_request)
- Ci::CreatePipelineService.new(project, current_user, create_params(pull_request))
- .execute(:external_pull_request_event, external_pull_request: pull_request)
+ if ::Feature.enabled?(:ci_create_external_pr_pipeline_async, project, default_enabled: :yaml)
+ Ci::ExternalPullRequests::CreatePipelineWorker.perform_async(
+ project.id, current_user.id, pull_request.id
+ )
+ else
+ Ci::CreatePipelineService.new(project, current_user, create_params(pull_request))
+ .execute(:external_pull_request_event, external_pull_request: pull_request)
+ end
end
def create_params(pull_request)
diff --git a/app/services/ci/pipeline_schedules/calculate_next_run_service.rb b/app/services/ci/pipeline_schedules/calculate_next_run_service.rb
index 9c8f6b47288..6c8ccb017e9 100644
--- a/app/services/ci/pipeline_schedules/calculate_next_run_service.rb
+++ b/app/services/ci/pipeline_schedules/calculate_next_run_service.rb
@@ -8,7 +8,6 @@ module Ci
def execute(schedule, fallback_method:)
@schedule = schedule
- return fallback_method.call unless ::Feature.enabled?(:ci_daily_limit_for_pipeline_schedules, project, default_enabled: :yaml)
return fallback_method.call unless plan_cron&.cron_valid?
now = Time.zone.now
diff --git a/app/services/ci/pipelines/add_job_service.rb b/app/services/ci/pipelines/add_job_service.rb
index 41f9903e48c..53536b6fdf9 100644
--- a/app/services/ci/pipelines/add_job_service.rb
+++ b/app/services/ci/pipelines/add_job_service.rb
@@ -21,14 +21,14 @@ module Ci
Ci::Pipeline.transaction do
yield(job)
- job.update_older_statuses_retried! if Feature.enabled?(:ci_fix_commit_status_retried, pipeline.project, default_enabled: :yaml)
+ job.update_older_statuses_retried!
end
end
else
Ci::Pipeline.transaction do
yield(job)
- job.update_older_statuses_retried! if Feature.enabled?(:ci_fix_commit_status_retried, pipeline.project, default_enabled: :yaml)
+ job.update_older_statuses_retried!
end
end
diff --git a/app/services/ci/queue/build_queue_service.rb b/app/services/ci/queue/build_queue_service.rb
index 99408d529b2..3276c427923 100644
--- a/app/services/ci/queue/build_queue_service.rb
+++ b/app/services/ci/queue/build_queue_service.rb
@@ -24,26 +24,30 @@ module Ci
# rubocop:disable CodeReuse/ActiveRecord
def builds_for_group_runner
- # Workaround for weird Rails bug, that makes `runner.groups.to_sql` to return `runner_id = NULL`
- groups = ::Group.joins(:runner_namespaces).merge(runner.runner_namespaces)
+ if strategy.use_denormalized_namespace_traversal_ids?
+ strategy.builds_for_group_runner
+ else
+ # Workaround for weird Rails bug, that makes `runner.groups.to_sql` to return `runner_id = NULL`
+ groups = ::Group.joins(:runner_namespaces).merge(runner.runner_namespaces)
- hierarchy_groups = Gitlab::ObjectHierarchy
- .new(groups)
- .base_and_descendants
+ hierarchy_groups = Gitlab::ObjectHierarchy
+ .new(groups)
+ .base_and_descendants
- projects = Project.where(namespace_id: hierarchy_groups)
- .with_group_runners_enabled
- .with_builds_enabled
- .without_deleted
+ projects = Project.where(namespace_id: hierarchy_groups)
+ .with_group_runners_enabled
+ .with_builds_enabled
+ .without_deleted
- relation = new_builds.where(project: projects)
+ relation = new_builds.where(project: projects)
- order(relation)
+ order(relation)
+ end
end
def builds_for_project_runner
relation = new_builds
- .where(project: runner.projects.without_deleted.with_builds_enabled)
+ .where(project: runner_projects_relation)
order(relation)
end
@@ -83,6 +87,14 @@ module Ci
end
end
end
+
+ def runner_projects_relation
+ if ::Feature.enabled?(:ci_pending_builds_project_runners_decoupling, runner, default_enabled: :yaml)
+ runner.runner_projects.select(:project_id)
+ else
+ runner.projects.without_deleted.with_builds_enabled
+ end
+ end
end
end
end
diff --git a/app/services/ci/queue/builds_table_strategy.rb b/app/services/ci/queue/builds_table_strategy.rb
index d0a343cb9d4..ac449a5289e 100644
--- a/app/services/ci/queue/builds_table_strategy.rb
+++ b/app/services/ci/queue/builds_table_strategy.rb
@@ -31,6 +31,10 @@ module Ci
end
end
+ def builds_for_group_runner
+ raise NotImplementedError
+ end
+
def builds_matching_tag_ids(relation, ids)
# pick builds that does not have other tags than runner's one
relation.matches_tag_ids(ids)
@@ -61,6 +65,10 @@ module Ci
false
end
+ def use_denormalized_namespace_traversal_ids?
+ false
+ end
+
private
def running_builds_for_shared_runners
diff --git a/app/services/ci/queue/pending_builds_strategy.rb b/app/services/ci/queue/pending_builds_strategy.rb
index efe3a981d3a..7a913e47df4 100644
--- a/app/services/ci/queue/pending_builds_strategy.rb
+++ b/app/services/ci/queue/pending_builds_strategy.rb
@@ -16,12 +16,26 @@ module Ci
builds_ordered_for_shared_runners(shared_builds)
end
+ def builds_for_group_runner
+ return new_builds.none if runner.namespace_ids.empty?
+
+ new_builds.where('ci_pending_builds.namespace_traversal_ids && ARRAY[?]::int[]', runner.namespace_ids)
+ end
+
def builds_matching_tag_ids(relation, ids)
- relation.merge(CommitStatus.matches_tag_ids(ids, table: 'ci_pending_builds', column: 'build_id'))
+ if ::Feature.enabled?(:ci_queueing_denormalize_tags_information, runner, default_enabled: :yaml)
+ relation.for_tags(runner.tags_ids)
+ else
+ relation.merge(CommitStatus.matches_tag_ids(ids, table: 'ci_pending_builds', column: 'build_id'))
+ end
end
def builds_with_any_tags(relation)
- relation.merge(CommitStatus.with_any_tags(table: 'ci_pending_builds', column: 'build_id'))
+ if ::Feature.enabled?(:ci_queueing_denormalize_tags_information, runner, default_enabled: :yaml)
+ relation.where('cardinality(tag_ids) > 0')
+ else
+ relation.merge(CommitStatus.with_any_tags(table: 'ci_pending_builds', column: 'build_id'))
+ end
end
def order(relation)
@@ -44,6 +58,10 @@ module Ci
::Feature.enabled?(:ci_queueing_denormalize_ci_minutes_information, runner, type: :development, default_enabled: :yaml)
end
+ def use_denormalized_namespace_traversal_ids?
+ ::Feature.enabled?(:ci_queueing_denormalize_namespace_traversal_ids, runner, type: :development, default_enabled: :yaml)
+ end
+
private
def builds_available_for_shared_runners
diff --git a/app/services/ci/register_job_service.rb b/app/services/ci/register_job_service.rb
index dc046e1d164..c46ddd22558 100644
--- a/app/services/ci/register_job_service.rb
+++ b/app/services/ci/register_job_service.rb
@@ -103,40 +103,42 @@ module Ci
# rubocop: disable CodeReuse/ActiveRecord
def each_build(params, &blk)
- queue = ::Ci::Queue::BuildQueueService.new(runner)
-
- builds = begin
- if runner.instance_type?
- queue.builds_for_shared_runner
- elsif runner.group_type?
- queue.builds_for_group_runner
- else
- queue.builds_for_project_runner
+ ::Gitlab::Database.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/339429') do
+ queue = ::Ci::Queue::BuildQueueService.new(runner)
+
+ builds = begin
+ if runner.instance_type?
+ queue.builds_for_shared_runner
+ elsif runner.group_type?
+ queue.builds_for_group_runner
+ else
+ queue.builds_for_project_runner
+ end
end
- end
- if runner.ref_protected?
- builds = queue.builds_for_protected_runner(builds)
- end
+ if runner.ref_protected?
+ builds = queue.builds_for_protected_runner(builds)
+ end
- # pick builds that does not have other tags than runner's one
- builds = queue.builds_matching_tag_ids(builds, runner.tags.ids)
+ # pick builds that does not have other tags than runner's one
+ builds = queue.builds_matching_tag_ids(builds, runner.tags.ids)
- # pick builds that have at least one tag
- unless runner.run_untagged?
- builds = queue.builds_with_any_tags(builds)
- end
+ # pick builds that have at least one tag
+ unless runner.run_untagged?
+ builds = queue.builds_with_any_tags(builds)
+ end
- # pick builds that older than specified age
- if params.key?(:job_age)
- builds = queue.builds_queued_before(builds, params[:job_age].seconds.ago)
- end
+ # pick builds that older than specified age
+ if params.key?(:job_age)
+ builds = queue.builds_queued_before(builds, params[:job_age].seconds.ago)
+ end
- build_ids = retrieve_queue(-> { queue.execute(builds) })
+ build_ids = retrieve_queue(-> { queue.execute(builds) })
- @metrics.observe_queue_size(-> { build_ids.size }, @runner.runner_type)
+ @metrics.observe_queue_size(-> { build_ids.size }, @runner.runner_type)
- build_ids.each { |build_id| yield Ci::Build.find(build_id) }
+ build_ids.each { |build_id| yield Ci::Build.find(build_id) }
+ end
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -269,6 +271,15 @@ module Ci
missing_dependency_failure: -> (build, _) { !build.has_valid_build_dependencies? },
runner_unsupported: -> (build, params) { !build.supported_runner?(params.dig(:info, :features)) },
archived_failure: -> (build, _) { build.archived? }
+ }.merge(builds_enabled_checks)
+ end
+
+ def builds_enabled_checks
+ return {} unless ::Feature.enabled?(:ci_queueing_builds_enabled_checks, runner, default_enabled: :yaml)
+
+ {
+ project_deleted: -> (build, _) { build.project.pending_delete? },
+ builds_disabled: -> (build, _) { !build.project.builds_enabled? }
}
end
end
diff --git a/app/services/ci/stuck_builds/drop_helpers.rb b/app/services/ci/stuck_builds/drop_helpers.rb
new file mode 100644
index 00000000000..f79b805c23d
--- /dev/null
+++ b/app/services/ci/stuck_builds/drop_helpers.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Ci
+ module StuckBuilds
+ module DropHelpers
+ def drop(builds, failure_reason:)
+ fetch(builds) do |build|
+ drop_build :outdated, build, failure_reason
+ end
+ end
+
+ def drop_stuck(builds, failure_reason:)
+ fetch(builds) do |build|
+ break unless build.stuck?
+
+ drop_build :stuck, build, failure_reason
+ end
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def fetch(builds)
+ loop do
+ jobs = builds.includes(:tags, :runner, project: [:namespace, :route])
+ .limit(100)
+ .to_a
+
+ break if jobs.empty?
+
+ jobs.each do |job|
+ Gitlab::ApplicationContext.with_context(project: job.project) { yield(job) }
+ end
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def drop_build(type, build, reason)
+ Gitlab::AppLogger.info "#{self.class}: Dropping #{type} build #{build.id} for runner #{build.runner_id} (status: #{build.status}, failure_reason: #{reason})"
+ Gitlab::OptimisticLocking.retry_lock(build, 3, name: 'stuck_ci_jobs_worker_drop_build') do |b|
+ b.drop(reason)
+ end
+ rescue StandardError => ex
+ build.doom!
+
+ track_exception_for_build(ex, build)
+ end
+
+ def track_exception_for_build(ex, build)
+ Gitlab::ErrorTracking.track_exception(ex,
+ build_id: build.id,
+ build_name: build.name,
+ build_stage: build.stage,
+ pipeline_id: build.pipeline_id,
+ project_id: build.project_id
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/ci/stuck_builds/drop_service.rb b/app/services/ci/stuck_builds/drop_service.rb
new file mode 100644
index 00000000000..3fee9a94381
--- /dev/null
+++ b/app/services/ci/stuck_builds/drop_service.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+module Ci
+ module StuckBuilds
+ class DropService
+ include DropHelpers
+
+ BUILD_RUNNING_OUTDATED_TIMEOUT = 1.hour
+ BUILD_PENDING_OUTDATED_TIMEOUT = 1.day
+ BUILD_SCHEDULED_OUTDATED_TIMEOUT = 1.hour
+ BUILD_PENDING_STUCK_TIMEOUT = 1.hour
+ BUILD_LOOKBACK = 5.days
+
+ def execute
+ Gitlab::AppLogger.info "#{self.class}: Cleaning stuck builds"
+
+ drop(running_timed_out_builds, failure_reason: :stuck_or_timeout_failure)
+
+ drop(
+ pending_builds(BUILD_PENDING_OUTDATED_TIMEOUT.ago),
+ failure_reason: :stuck_or_timeout_failure
+ )
+
+ drop(scheduled_timed_out_builds, failure_reason: :stale_schedule)
+
+ drop_stuck(
+ pending_builds(BUILD_PENDING_STUCK_TIMEOUT.ago),
+ failure_reason: :stuck_or_timeout_failure
+ )
+ end
+
+ private
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ # We're adding the ordering clause by `created_at` and `project_id`
+ # because we want to force the query planner to use the
+ # `ci_builds_gitlab_monitor_metrics` index all the time.
+ def pending_builds(timeout)
+ if Feature.enabled?(:ci_new_query_for_pending_stuck_jobs)
+ Ci::Build.pending.created_at_before(timeout).updated_at_before(timeout).order(created_at: :asc, project_id: :asc)
+ else
+ Ci::Build.pending.updated_before(lookback: BUILD_LOOKBACK.ago, timeout: timeout)
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def scheduled_timed_out_builds
+ Ci::Build.where(status: :scheduled).where( # rubocop: disable CodeReuse/ActiveRecord
+ 'ci_builds.scheduled_at IS NOT NULL AND ci_builds.scheduled_at < ?',
+ BUILD_SCHEDULED_OUTDATED_TIMEOUT.ago
+ )
+ end
+
+ def running_timed_out_builds
+ Ci::Build.running.where( # rubocop: disable CodeReuse/ActiveRecord
+ 'ci_builds.updated_at < ?',
+ BUILD_RUNNING_OUTDATED_TIMEOUT.ago
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/ci/update_build_queue_service.rb b/app/services/ci/update_build_queue_service.rb
index eea09e9ac67..c1cbf031ca1 100644
--- a/app/services/ci/update_build_queue_service.rb
+++ b/app/services/ci/update_build_queue_service.rb
@@ -99,15 +99,17 @@ module Ci
private
def tick_for(build, runners)
- runners = runners.with_recent_runner_queue
- runners = runners.with_tags if Feature.enabled?(:ci_preload_runner_tags, default_enabled: :yaml)
+ ::Gitlab::Database.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/339937') do
+ runners = runners.with_recent_runner_queue
+ runners = runners.with_tags if Feature.enabled?(:ci_preload_runner_tags, default_enabled: :yaml)
- metrics.observe_active_runners(-> { runners.to_a.size })
+ metrics.observe_active_runners(-> { runners.to_a.size })
- runners.each do |runner|
- metrics.increment_runner_tick(runner)
+ runners.each do |runner|
+ metrics.increment_runner_tick(runner)
- runner.pick_build!(build)
+ runner.pick_build!(build)
+ end
end
end
diff --git a/app/services/ci/update_pending_build_service.rb b/app/services/ci/update_pending_build_service.rb
new file mode 100644
index 00000000000..dcba06e60bf
--- /dev/null
+++ b/app/services/ci/update_pending_build_service.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module Ci
+ class UpdatePendingBuildService
+ VALID_PARAMS = %i[instance_runners_enabled].freeze
+
+ InvalidParamsError = Class.new(StandardError)
+ InvalidModelError = Class.new(StandardError)
+
+ def initialize(model, update_params)
+ @model = model
+ @update_params = update_params
+
+ validations!
+ end
+
+ def execute
+ return unless ::Feature.enabled?(:ci_pending_builds_maintain_shared_runners_data, @model, default_enabled: :yaml)
+
+ @model.pending_builds.each_batch do |relation|
+ relation.update_all(@update_params)
+ end
+ end
+
+ private
+
+ def validations!
+ validate_model! && validate_params!
+ end
+
+ def validate_model!
+ raise InvalidModelError unless @model.is_a?(::Project) || @model.is_a?(::Group)
+
+ true
+ end
+
+ def validate_params!
+ extra_params = @update_params.keys - VALID_PARAMS
+
+ raise InvalidParamsError, "Unvalid params: #{extra_params.join(', ')}" unless extra_params.empty?
+
+ true
+ end
+ end
+end
diff --git a/app/services/clusters/agents/refresh_authorization_service.rb b/app/services/clusters/agents/refresh_authorization_service.rb
new file mode 100644
index 00000000000..a9e3340dbf5
--- /dev/null
+++ b/app/services/clusters/agents/refresh_authorization_service.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Agents
+ class RefreshAuthorizationService
+ include Gitlab::Utils::StrongMemoize
+
+ AUTHORIZED_ENTITY_LIMIT = 100
+
+ delegate :project, to: :agent, private: true
+ delegate :root_ancestor, to: :project, private: true
+
+ def initialize(agent, config:)
+ @agent = agent
+ @config = config
+ end
+
+ def execute
+ refresh_projects!
+ refresh_groups!
+
+ true
+ end
+
+ private
+
+ attr_reader :agent, :config
+
+ def refresh_projects!
+ if allowed_project_configurations.present?
+ project_ids = allowed_project_configurations.map { |config| config.fetch(:project_id) }
+
+ agent.with_lock do
+ agent.project_authorizations.upsert_all(allowed_project_configurations, unique_by: [:agent_id, :project_id])
+ agent.project_authorizations.where.not(project_id: project_ids).delete_all # rubocop: disable CodeReuse/ActiveRecord
+ end
+ else
+ agent.project_authorizations.delete_all(:delete_all)
+ end
+ end
+
+ def refresh_groups!
+ if allowed_group_configurations.present?
+ group_ids = allowed_group_configurations.map { |config| config.fetch(:group_id) }
+
+ agent.with_lock do
+ agent.group_authorizations.upsert_all(allowed_group_configurations, unique_by: [:agent_id, :group_id])
+ agent.group_authorizations.where.not(group_id: group_ids).delete_all # rubocop: disable CodeReuse/ActiveRecord
+ end
+ else
+ agent.group_authorizations.delete_all(:delete_all)
+ end
+ end
+
+ def allowed_project_configurations
+ strong_memoize(:allowed_project_configurations) do
+ project_entries = extract_config_entries(entity: 'projects')
+
+ if project_entries
+ allowed_projects.where_full_path_in(project_entries.keys).map do |project|
+ { project_id: project.id, config: project_entries[project.full_path] }
+ end
+ end
+ end
+ end
+
+ def allowed_group_configurations
+ strong_memoize(:allowed_group_configurations) do
+ group_entries = extract_config_entries(entity: 'groups')
+
+ if group_entries
+ allowed_groups.where_full_path_in(group_entries.keys).map do |group|
+ { group_id: group.id, config: group_entries[group.full_path] }
+ end
+ end
+ end
+ end
+
+ def extract_config_entries(entity:)
+ config.dig('ci_access', entity)
+ &.first(AUTHORIZED_ENTITY_LIMIT)
+ &.index_by { |config| config.delete('id') }
+ end
+
+ def allowed_projects
+ if group_root_ancestor?
+ root_ancestor.all_projects
+ else
+ ::Project.none
+ end
+ end
+
+ def allowed_groups
+ if group_root_ancestor?
+ root_ancestor.self_and_descendants
+ else
+ ::Group.none
+ end
+ end
+
+ def group_root_ancestor?
+ root_ancestor.group?
+ end
+ end
+ end
+end
diff --git a/app/services/concerns/members/bulk_create_users.rb b/app/services/concerns/members/bulk_create_users.rb
new file mode 100644
index 00000000000..4498f40c396
--- /dev/null
+++ b/app/services/concerns/members/bulk_create_users.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+module Members
+ module BulkCreateUsers
+ extend ActiveSupport::Concern
+
+ included do
+ class << self
+ def add_users(source, users, access_level, current_user: nil, expires_at: nil)
+ return [] unless users.present?
+
+ emails, users, existing_members = parse_users_list(source, users)
+
+ Member.transaction do
+ (emails + users).map! do |user|
+ new(source,
+ user,
+ access_level,
+ existing_members: existing_members,
+ current_user: current_user,
+ expires_at: expires_at)
+ .execute
+ end
+ end
+ end
+
+ private
+
+ def parse_users_list(source, list)
+ emails = []
+ user_ids = []
+ users = []
+ existing_members = {}
+
+ list.each do |item|
+ case item
+ when User
+ users << item
+ when Integer
+ user_ids << item
+ when /\A\d+\Z/
+ user_ids << item.to_i
+ when Devise.email_regexp
+ emails << item
+ end
+ end
+
+ if user_ids.present?
+ # the below will automatically discard invalid user_ids
+ users.concat(User.id_in(user_ids))
+ # helps not have to perform another query per user id to see if the member exists later on when fetching
+ existing_members = source.members_and_requesters.where(user_id: user_ids).index_by(&:user_id) # rubocop:disable CodeReuse/ActiveRecord
+ end
+
+ users.uniq! # de-duplicate just in case as there is no controlling if user records and ids are sent multiple times
+
+ [emails, users, existing_members]
+ end
+ end
+ end
+
+ def initialize(source, user, access_level, **args)
+ super
+
+ @existing_members = args[:existing_members] || (raise ArgumentError, "existing_members must be included in the args hash")
+ end
+
+ private
+
+ attr_reader :existing_members
+
+ def find_or_initialize_member_by_user
+ existing_members[user.id] || source.members.build(user_id: user.id)
+ end
+ end
+end
diff --git a/app/services/customer_relations/organizations/base_service.rb b/app/services/customer_relations/organizations/base_service.rb
new file mode 100644
index 00000000000..63261534b37
--- /dev/null
+++ b/app/services/customer_relations/organizations/base_service.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module CustomerRelations
+ module Organizations
+ class BaseService < ::BaseGroupService
+ private
+
+ def allowed?
+ current_user&.can?(:admin_organization, group)
+ end
+
+ def error(message)
+ ServiceResponse.error(message: message)
+ end
+ end
+ end
+end
diff --git a/app/services/customer_relations/organizations/create_service.rb b/app/services/customer_relations/organizations/create_service.rb
new file mode 100644
index 00000000000..9c223796eaf
--- /dev/null
+++ b/app/services/customer_relations/organizations/create_service.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module CustomerRelations
+ module Organizations
+ class CreateService < BaseService
+ # returns the created organization
+ def execute
+ return error_no_permissions unless allowed?
+
+ params[:group_id] = group.id
+
+ organization = Organization.create(params)
+
+ return error_creating(organization) unless organization.persisted?
+
+ ServiceResponse.success(payload: organization)
+ end
+
+ private
+
+ def error_no_permissions
+ error('You have insufficient permissions to create an organization for this group')
+ end
+
+ def error_creating(organization)
+ error(organization&.errors&.full_messages || 'Failed to create organization')
+ end
+ end
+ end
+end
diff --git a/app/services/customer_relations/organizations/update_service.rb b/app/services/customer_relations/organizations/update_service.rb
new file mode 100644
index 00000000000..9d8f908db14
--- /dev/null
+++ b/app/services/customer_relations/organizations/update_service.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module CustomerRelations
+ module Organizations
+ class UpdateService < BaseService
+ def execute(organization)
+ return error_no_permissions unless allowed?
+ return error_updating(organization) unless organization.update(params)
+
+ ServiceResponse.success(payload: organization)
+ end
+
+ private
+
+ def error_no_permissions
+ error('You have insufficient permissions to update an organization for this group')
+ end
+
+ def error_updating(organization)
+ error(organization&.errors&.full_messages || 'Failed to update organization')
+ end
+ end
+ end
+end
diff --git a/app/services/dependency_proxy/image_ttl_group_policies/update_service.rb b/app/services/dependency_proxy/image_ttl_group_policies/update_service.rb
new file mode 100644
index 00000000000..3c0b6412dc5
--- /dev/null
+++ b/app/services/dependency_proxy/image_ttl_group_policies/update_service.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module DependencyProxy
+ module ImageTtlGroupPolicies
+ class UpdateService < BaseContainerService
+ include Gitlab::Utils::StrongMemoize
+
+ ALLOWED_ATTRIBUTES = %i[enabled ttl].freeze
+
+ def execute
+ return ServiceResponse.error(message: 'Access Denied', http_status: 403) unless allowed?
+ return ServiceResponse.error(message: 'Dependency proxy image TTL Policy not found', http_status: 404) unless dependency_proxy_image_ttl_policy
+
+ if dependency_proxy_image_ttl_policy.update(dependency_proxy_image_ttl_policy_params)
+ ServiceResponse.success(payload: { dependency_proxy_image_ttl_policy: dependency_proxy_image_ttl_policy })
+ else
+ ServiceResponse.error(
+ message: dependency_proxy_image_ttl_policy.errors.full_messages.to_sentence || 'Bad request',
+ http_status: 400
+ )
+ end
+ end
+
+ private
+
+ def dependency_proxy_image_ttl_policy
+ strong_memoize(:dependency_proxy_image_ttl_policy) do
+ container.dependency_proxy_image_ttl_policy
+ end
+ end
+
+ def allowed?
+ Ability.allowed?(current_user, :admin_dependency_proxy, container)
+ end
+
+ def dependency_proxy_image_ttl_policy_params
+ params.slice(*ALLOWED_ATTRIBUTES)
+ end
+ end
+ end
+end
diff --git a/app/services/design_management/delete_designs_service.rb b/app/services/design_management/delete_designs_service.rb
index 7f76bcc5626..9ed03a994c4 100644
--- a/app/services/design_management/delete_designs_service.rb
+++ b/app/services/design_management/delete_designs_service.rb
@@ -17,6 +17,7 @@ module DesignManagement
version = delete_designs!
EventCreateService.new.destroy_designs(designs, current_user)
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_removed_action(author: current_user)
+ TodosDestroyer::DestroyedDesignsWorker.perform_async(designs.map(&:id))
success(version: version)
end
diff --git a/app/services/draft_notes/publish_service.rb b/app/services/draft_notes/publish_service.rb
index 3a1db16aaf4..a82a6e22a5a 100644
--- a/app/services/draft_notes/publish_service.rb
+++ b/app/services/draft_notes/publish_service.rb
@@ -34,22 +34,24 @@ module DraftNotes
created_notes = draft_notes.map do |draft_note|
draft_note.review = review
- create_note_from_draft(draft_note, skip_capture_diff_note_position: true)
+ create_note_from_draft(draft_note, skip_capture_diff_note_position: true, skip_keep_around_commits: true)
end
capture_diff_note_positions(created_notes)
+ keep_around_commits(created_notes)
draft_notes.delete_all
set_reviewed
notification_service.async.new_review(review)
MergeRequests::ResolvedDiscussionNotificationService.new(project: project, current_user: current_user).execute(merge_request)
end
- def create_note_from_draft(draft, skip_capture_diff_note_position: false)
+ def create_note_from_draft(draft, skip_capture_diff_note_position: false, skip_keep_around_commits: false)
# Make sure the diff file is unfolded in order to find the correct line
# codes.
draft.diff_file&.unfold_diff_lines(draft.original_position)
- note = Notes::CreateService.new(draft.project, draft.author, draft.publish_params).execute(
+ note_params = draft.publish_params.merge(skip_keep_around_commits: skip_keep_around_commits)
+ note = Notes::CreateService.new(draft.project, draft.author, note_params).execute(
skip_capture_diff_note_position: skip_capture_diff_note_position
)
@@ -86,5 +88,17 @@ module DraftNotes
capture_service.execute(note.discussion) if note.diff_note? && note.start_of_discussion?
end
end
+
+ def keep_around_commits(notes)
+ shas = notes.flat_map do |note|
+ note.shas if note.diff_note?
+ end.uniq
+
+ # We are allowing this since gitaly call will be created for each sha and
+ # even though they're unique, there will still be multiple Gitaly calls.
+ Gitlab::GitalyClient.allow_n_plus_1_calls do
+ project.repository.keep_around(*shas)
+ end
+ end
end
end
diff --git a/app/services/emails/destroy_service.rb b/app/services/emails/destroy_service.rb
index 288bee84ef7..d10833e66cb 100644
--- a/app/services/emails/destroy_service.rb
+++ b/app/services/emails/destroy_service.rb
@@ -3,14 +3,14 @@
module Emails
class DestroyService < ::Emails::BaseService
def execute(email)
- email.destroy && update_secondary_emails!
+ email.destroy && update_secondary_emails!(email.email)
end
private
- def update_secondary_emails!
+ def update_secondary_emails!(deleted_email)
result = ::Users::UpdateService.new(@current_user, user: @user).execute do |user|
- user.update_secondary_emails!
+ user.unset_secondary_emails_matching_deleted_email!(deleted_email)
end
result[:status] == :success
diff --git a/app/services/environments/auto_stop_service.rb b/app/services/environments/auto_stop_service.rb
index 4e3aec64283..686ba050326 100644
--- a/app/services/environments/auto_stop_service.rb
+++ b/app/services/environments/auto_stop_service.rb
@@ -28,11 +28,17 @@ module Environments
private
def stop_in_batch
- environments = Environment.auto_stoppable(BATCH_SIZE)
+ environments = Environment.preload_project.select(:id, :project_id).auto_stoppable(BATCH_SIZE)
- return false unless environments.exists?
+ return false if environments.empty?
- Environments::StopService.execute_in_batch(environments)
+ Environments::AutoStopWorker.bulk_perform_async_with_contexts(
+ environments,
+ arguments_proc: -> (environment) { environment.id },
+ context_proc: -> (environment) { { project: environment.project } }
+ )
+
+ true
end
end
end
diff --git a/app/services/environments/stop_service.rb b/app/services/environments/stop_service.rb
index 089aea11296..d9c66bd13fe 100644
--- a/app/services/environments/stop_service.rb
+++ b/app/services/environments/stop_service.rb
@@ -22,22 +22,6 @@ module Environments
merge_request.environments.each { |environment| execute(environment) }
end
- ##
- # This method is for stopping multiple environments in a batch style.
- # The maximum acceptable count of environments is roughly 5000. Please
- # apply acceptable `LIMIT` clause to the `environments` relation.
- def self.execute_in_batch(environments)
- stop_actions = environments.stop_actions.load
-
- environments.update_all(auto_stop_at: nil, state: 'stopped')
-
- stop_actions.each do |stop_action|
- stop_action.play(stop_action.user)
- rescue StandardError => e
- Gitlab::ErrorTracking.track_exception(e, deployable_id: stop_action.id)
- end
- end
-
private
def environments
diff --git a/app/services/error_tracking/collect_error_service.rb b/app/services/error_tracking/collect_error_service.rb
index bc1f238d81f..477453a693e 100644
--- a/app/services/error_tracking/collect_error_service.rb
+++ b/app/services/error_tracking/collect_error_service.rb
@@ -9,18 +9,18 @@ module ErrorTracking
error = project.error_tracking_errors.report_error(
name: exception['type'], # Example: ActionView::MissingTemplate
description: exception['value'], # Example: Missing template posts/show in...
- actor: event['transaction'], # Example: PostsController#show
+ actor: actor, # Example: PostsController#show
platform: event['platform'], # Example: ruby
- timestamp: event['timestamp']
+ timestamp: timestamp
)
# The payload field contains all the data on error including stacktrace in jsonb.
# Together with occured_at these are 2 main attributes that we need to save here.
error.events.create!(
environment: event['environment'],
- description: exception['type'],
+ description: exception['value'],
level: event['level'],
- occurred_at: event['timestamp'],
+ occurred_at: timestamp,
payload: event
)
end
@@ -34,5 +34,29 @@ module ErrorTracking
def exception
event['exception']['values'].first
end
+
+ def actor
+ return event['transaction'] if event['transaction']
+
+ # Some SDK do not have transaction attribute.
+ # So we build it by combining function name and module name from
+ # the last item in stacktrace.
+ last_line = exception.dig('stacktrace', 'frames').last
+
+ "#{last_line['function']}(#{last_line['module']})"
+ end
+
+ def timestamp
+ return @timestamp if @timestamp
+
+ @timestamp = (event['timestamp'] || Time.zone.now)
+
+ # Some SDK send timestamp in numeric format like '1630945472.13'.
+ if @timestamp.to_s =~ /\A\d+(\.\d+)?\z/
+ @timestamp = Time.zone.at(@timestamp.to_f)
+ end
+
+ @timestamp
+ end
end
end
diff --git a/app/services/error_tracking/list_issues_service.rb b/app/services/error_tracking/list_issues_service.rb
index 5ddba748fd4..86c7791e759 100644
--- a/app/services/error_tracking/list_issues_service.rb
+++ b/app/services/error_tracking/list_issues_service.rb
@@ -74,7 +74,9 @@ module ErrorTracking
# We are going to support more options in the future.
# For now we implement the bare minimum for rendering the list in UI.
filter_opts = {
- status: opts[:issue_status]
+ status: opts[:issue_status],
+ sort: opts[:sort],
+ limit: opts[:limit]
}
errors = ErrorTracking::ErrorsFinder.new(current_user, project, filter_opts).execute
diff --git a/app/services/feature_flags/base_service.rb b/app/services/feature_flags/base_service.rb
index d041703803b..9ae9ab4de63 100644
--- a/app/services/feature_flags/base_service.rb
+++ b/app/services/feature_flags/base_service.rb
@@ -48,10 +48,11 @@ module FeatureFlags
end
end
- def created_scope_message(scope)
- "Created rule #{scope.environment_scope} "\
- "and set it as #{scope.active ? "active" : "inactive"} "\
- "with strategies #{scope.strategies}."
+ def created_strategy_message(strategy)
+ scopes = strategy.scopes
+ .map { |scope| %Q("#{scope.environment_scope}") }
+ .join(', ')
+ %Q(Created strategy "#{strategy.name}" with scopes #{scopes}.)
end
def feature_flag_by_name
diff --git a/app/services/feature_flags/create_service.rb b/app/services/feature_flags/create_service.rb
index 5111f914447..65f8f8e33f6 100644
--- a/app/services/feature_flags/create_service.rb
+++ b/app/services/feature_flags/create_service.rb
@@ -24,8 +24,8 @@ module FeatureFlags
def audit_message(feature_flag)
message_parts = ["Created feature flag #{feature_flag.name} with description \"#{feature_flag.description}\"."]
- message_parts += feature_flag.scopes.map do |scope|
- created_scope_message(scope)
+ message_parts += feature_flag.strategies.map do |strategy|
+ created_strategy_message(strategy)
end
message_parts.join(" ")
diff --git a/app/services/feature_flags/update_service.rb b/app/services/feature_flags/update_service.rb
index 01e4f661d75..ccfd1b57d44 100644
--- a/app/services/feature_flags/update_service.rb
+++ b/app/services/feature_flags/update_service.rb
@@ -2,10 +2,9 @@
module FeatureFlags
class UpdateService < FeatureFlags::BaseService
- AUDITABLE_SCOPE_ATTRIBUTES_HUMAN_NAMES = {
- 'active' => 'active state',
- 'environment_scope' => 'environment scope',
- 'strategies' => 'strategies'
+ AUDITABLE_STRATEGY_ATTRIBUTES_HUMAN_NAMES = {
+ 'scopes' => 'environment scopes',
+ 'parameters' => 'parameters'
}.freeze
def execute(feature_flag)
@@ -41,7 +40,7 @@ module FeatureFlags
def audit_message(feature_flag)
changes = changed_attributes_messages(feature_flag)
- changes += changed_scopes_messages(feature_flag)
+ changes += changed_strategies_messages(feature_flag)
return if changes.empty?
@@ -56,29 +55,30 @@ module FeatureFlags
end
end
- def changed_scopes_messages(feature_flag)
- feature_flag.scopes.map do |scope|
- if scope.new_record?
- created_scope_message(scope)
- elsif scope.marked_for_destruction?
- deleted_scope_message(scope)
+ def changed_strategies_messages(feature_flag)
+ feature_flag.strategies.map do |strategy|
+ if strategy.new_record?
+ created_strategy_message(strategy)
+ elsif strategy.marked_for_destruction?
+ deleted_strategy_message(strategy)
else
- updated_scope_message(scope)
+ updated_strategy_message(strategy)
end
- end.compact # updated_scope_message can return nil if nothing has been changed
+ end.compact # updated_strategy_message can return nil if nothing has been changed
end
- def deleted_scope_message(scope)
- "Deleted rule #{scope.environment_scope}."
+ def deleted_strategy_message(strategy)
+ scopes = strategy.scopes.map { |scope| scope.environment_scope }.join(', ')
+ "Deleted strategy #{strategy.name} with environment scopes #{scopes}."
end
- def updated_scope_message(scope)
- changes = scope.changes.slice(*AUDITABLE_SCOPE_ATTRIBUTES_HUMAN_NAMES.keys)
+ def updated_strategy_message(strategy)
+ changes = strategy.changes.slice(*AUDITABLE_STRATEGY_ATTRIBUTES_HUMAN_NAMES.keys)
return if changes.empty?
- message = "Updated rule #{scope.environment_scope} "
+ message = "Updated strategy #{strategy.name} "
message += changes.map do |attribute_name, change|
- name = AUDITABLE_SCOPE_ATTRIBUTES_HUMAN_NAMES[attribute_name]
+ name = AUDITABLE_STRATEGY_ATTRIBUTES_HUMAN_NAMES[attribute_name]
"#{name} from #{change.first} to #{change.second}"
end.join(' ')
diff --git a/app/services/git/base_hooks_service.rb b/app/services/git/base_hooks_service.rb
index aee2f685e97..63f3f73905a 100644
--- a/app/services/git/base_hooks_service.rb
+++ b/app/services/git/base_hooks_service.rb
@@ -25,17 +25,13 @@ module Git
raise NotImplementedError, "Please implement #{self.class}##{__method__}"
end
- # The changeset, ordered with the newest commit last
- def commits
- raise NotImplementedError, "Please implement #{self.class}##{__method__}"
- end
-
+ # This should return PROCESS_COMMIT_LIMIT commits, ordered with newest last
def limited_commits
- @limited_commits ||= commits.last(PROCESS_COMMIT_LIMIT)
+ raise NotImplementedError, "Please implement #{self.class}##{__method__}"
end
def commits_count
- commits.count
+ raise NotImplementedError, "Please implement #{self.class}##{__method__}"
end
def event_message
diff --git a/app/services/git/branch_hooks_service.rb b/app/services/git/branch_hooks_service.rb
index 7a22d7ffcdf..9b113be5465 100644
--- a/app/services/git/branch_hooks_service.rb
+++ b/app/services/git/branch_hooks_service.rb
@@ -18,20 +18,25 @@ module Git
:push_hooks
end
- def commits
- strong_memoize(:commits) do
+ def limited_commits
+ strong_memoize(:limited_commits) { threshold_commits.last(PROCESS_COMMIT_LIMIT) }
+ end
+
+ # Taking limit+1 commits allows us to detect when the limit is in effect
+ def threshold_commits
+ strong_memoize(:threshold_commits) do
if creating_default_branch?
# The most recent PROCESS_COMMIT_LIMIT commits in the default branch.
# They are returned newest-to-oldest, but we need to present them oldest-to-newest
- project.repository.commits(newrev, limit: PROCESS_COMMIT_LIMIT).reverse
+ project.repository.commits(newrev, limit: PROCESS_COMMIT_LIMIT + 1).reverse!
elsif creating_branch?
# Use the pushed commits that aren't reachable by the default branch
# as a heuristic. This may include more commits than are actually
# pushed, but that shouldn't matter because we check for existing
# cross-references later.
- project.repository.commits_between(project.default_branch, newrev)
+ project.repository.commits_between(project.default_branch, newrev, limit: PROCESS_COMMIT_LIMIT + 1)
elsif updating_branch?
- project.repository.commits_between(oldrev, newrev)
+ project.repository.commits_between(oldrev, newrev, limit: PROCESS_COMMIT_LIMIT + 1)
else # removing branch
[]
end
@@ -39,9 +44,21 @@ module Git
end
def commits_count
- return count_commits_in_branch if creating_default_branch?
+ strong_memoize(:commits_count) do
+ next threshold_commits.count if
+ strong_memoized?(:threshold_commits) &&
+ threshold_commits.count <= PROCESS_COMMIT_LIMIT
- super
+ if creating_default_branch?
+ project.repository.commit_count_for_ref(ref)
+ elsif creating_branch?
+ project.repository.count_commits_between(project.default_branch, newrev)
+ elsif updating_branch?
+ project.repository.count_commits_between(oldrev, newrev)
+ else # removing branch
+ 0
+ end
+ end
end
override :invalidated_file_types
@@ -179,12 +196,6 @@ module Git
creating_branch? && default_branch?
end
- def count_commits_in_branch
- strong_memoize(:count_commits_in_branch) do
- project.repository.commit_count_for_ref(ref)
- end
- end
-
def default_branch?
strong_memoize(:default_branch) do
[nil, branch_name].include?(project.default_branch)
diff --git a/app/services/git/tag_hooks_service.rb b/app/services/git/tag_hooks_service.rb
index d83924fec28..01174d8a942 100644
--- a/app/services/git/tag_hooks_service.rb
+++ b/app/services/git/tag_hooks_service.rb
@@ -8,10 +8,14 @@ module Git
:tag_push_hooks
end
- def commits
+ def limited_commits
[tag_commit].compact
end
+ def commits_count
+ limited_commits.count
+ end
+
def event_message
tag&.message
end
diff --git a/app/services/groups/open_issues_count_service.rb b/app/services/groups/open_issues_count_service.rb
index 17cf3d38987..c18d239998b 100644
--- a/app/services/groups/open_issues_count_service.rb
+++ b/app/services/groups/open_issues_count_service.rb
@@ -3,11 +3,15 @@
module Groups
# Service class for counting and caching the number of open issues of a group.
class OpenIssuesCountService < Groups::CountService
- PUBLIC_COUNT_KEY = 'group_public_open_issues_count'
- TOTAL_COUNT_KEY = 'group_total_open_issues_count'
+ # TOTAL_COUNT_KEY includes confidential and hidden issues (admin)
+ # TOTAL_COUNT_WITHOUT_HIDDEN_KEY includes confidential issues but not hidden issues (reporter and above)
+ # PUBLIC_COUNT_WITHOUT_HIDDEN_KEY does not include confidential or hidden issues (guest)
+ TOTAL_COUNT_KEY = 'group_open_issues_including_hidden_count'
+ TOTAL_COUNT_WITHOUT_HIDDEN_KEY = 'group_open_issues_without_hidden_count'
+ PUBLIC_COUNT_WITHOUT_HIDDEN_KEY = 'group_open_public_issues_without_hidden_count'
def clear_all_cache_keys
- [cache_key(PUBLIC_COUNT_KEY), cache_key(TOTAL_COUNT_KEY)].each do |key|
+ [cache_key(TOTAL_COUNT_KEY), cache_key(TOTAL_COUNT_WITHOUT_HIDDEN_KEY), cache_key(PUBLIC_COUNT_WITHOUT_HIDDEN_KEY)].each do |key|
Rails.cache.delete(key)
end
end
@@ -15,7 +19,19 @@ module Groups
private
def cache_key_name
- public_only? ? PUBLIC_COUNT_KEY : TOTAL_COUNT_KEY
+ if include_hidden?
+ TOTAL_COUNT_KEY
+ elsif public_only?
+ PUBLIC_COUNT_WITHOUT_HIDDEN_KEY
+ else
+ TOTAL_COUNT_WITHOUT_HIDDEN_KEY
+ end
+ end
+
+ def include_hidden?
+ strong_memoize(:user_is_admin) do
+ user&.can_admin_all_resources?
+ end
end
def public_only?
@@ -35,7 +51,8 @@ module Groups
state: 'opened',
non_archived: true,
include_subgroups: true,
- public_only: public_only?
+ public_only: public_only?,
+ include_hidden: include_hidden?
).execute
end
diff --git a/app/services/groups/update_shared_runners_service.rb b/app/services/groups/update_shared_runners_service.rb
index 639c5bf6ae0..eb6b46a5613 100644
--- a/app/services/groups/update_shared_runners_service.rb
+++ b/app/services/groups/update_shared_runners_service.rb
@@ -8,6 +8,7 @@ module Groups
validate_params
update_shared_runners
+ update_pending_builds!
success
@@ -26,5 +27,13 @@ module Groups
def update_shared_runners
group.update_shared_runners_setting!(params[:shared_runners_setting])
end
+
+ def update_pending_builds!
+ return unless group.previous_changes.include?('shared_runners_enabled')
+
+ update_params = { instance_runners_enabled: group.shared_runners_enabled }
+
+ ::Ci::UpdatePendingBuildService.new(group, update_params).execute
+ end
end
end
diff --git a/app/services/issuable/bulk_update_service.rb b/app/services/issuable/bulk_update_service.rb
index cd32cd78728..238f5ebddae 100644
--- a/app/services/issuable/bulk_update_service.rb
+++ b/app/services/issuable/bulk_update_service.rb
@@ -57,7 +57,11 @@ module Issuable
items.each do |issuable|
next unless can?(current_user, :"update_#{type}", issuable)
- update_class.new(**update_class.constructor_container_arg(issuable.issuing_parent), current_user: current_user, params: params).execute(issuable)
+ update_class.new(
+ **update_class.constructor_container_arg(issuable.issuing_parent),
+ current_user: current_user,
+ params: dup_params
+ ).execute(issuable)
end
items
@@ -78,6 +82,19 @@ module Issuable
.includes_for_bulk_update
end
+ # Duplicates params and its top-level values
+ # We cannot use deep_dup because ActiveRecord objects will result
+ # to new records with no id assigned
+ def dup_params
+ dup = HashWithIndifferentAccess.new
+
+ params.each do |key, value|
+ dup[key] = value.is_a?(ActiveRecord::Base) ? value : value.dup
+ end
+
+ dup
+ end
+
def response_success(message: nil, payload: nil)
ServiceResponse.success(message: message, payload: payload)
end
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 0984238517e..59e521853de 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -51,9 +51,12 @@ class IssuableBaseService < ::BaseProjectService
params.delete(:canonical_issue_id)
params.delete(:project)
params.delete(:discussion_locked)
- params.delete(:confidential)
end
+ # confidential attribute is a special type of metadata and needs to be allowed to be set
+ # by non-members on issues in public projects so that security issues can be reported as confidential.
+ params.delete(:confidential) unless can?(current_user, :set_confidentiality, issuable)
+
filter_assignees(issuable)
filter_milestone
filter_labels
diff --git a/app/services/issue_rebalancing_service.rb b/app/services/issue_rebalancing_service.rb
deleted file mode 100644
index 142d287370f..00000000000
--- a/app/services/issue_rebalancing_service.rb
+++ /dev/null
@@ -1,136 +0,0 @@
-# frozen_string_literal: true
-
-class IssueRebalancingService
- MAX_ISSUE_COUNT = 10_000
- BATCH_SIZE = 100
- SMALLEST_BATCH_SIZE = 5
- RETRIES_LIMIT = 3
- TooManyIssues = Class.new(StandardError)
-
- TIMING_CONFIGURATION = [
- [0.1.seconds, 0.05.seconds], # short timings, lock_timeout: 100ms, sleep after LockWaitTimeout: 50ms
- [0.5.seconds, 0.05.seconds],
- [1.second, 0.5.seconds],
- [1.second, 0.5.seconds],
- [5.seconds, 1.second]
- ].freeze
-
- def initialize(projects_collection)
- @root_namespace = projects_collection.take.root_namespace # rubocop:disable CodeReuse/ActiveRecord
- @base = Issue.in_projects(projects_collection)
- end
-
- def execute
- return unless Feature.enabled?(:rebalance_issues, root_namespace)
-
- raise TooManyIssues, "#{issue_count} issues" if issue_count > MAX_ISSUE_COUNT
-
- start = RelativePositioning::START_POSITION - (gaps / 2) * gap_size
-
- if Feature.enabled?(:issue_rebalancing_optimization)
- Issue.transaction do
- assign_positions(start, indexed_ids)
- .sort_by(&:first)
- .each_slice(BATCH_SIZE) do |pairs_with_position|
- if Feature.enabled?(:issue_rebalancing_with_retry)
- update_positions_with_retry(pairs_with_position, 'rebalance issue positions in batches ordered by id')
- else
- update_positions(pairs_with_position, 'rebalance issue positions in batches ordered by id')
- end
- end
- end
- else
- Issue.transaction do
- indexed_ids.each_slice(BATCH_SIZE) do |pairs|
- pairs_with_position = assign_positions(start, pairs)
-
- if Feature.enabled?(:issue_rebalancing_with_retry)
- update_positions_with_retry(pairs_with_position, 'rebalance issue positions')
- else
- update_positions(pairs_with_position, 'rebalance issue positions')
- end
- end
- end
- end
- end
-
- private
-
- attr_reader :root_namespace, :base
-
- # rubocop: disable CodeReuse/ActiveRecord
- def indexed_ids
- base.reorder(:relative_position, :id).pluck(:id).each_with_index
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def assign_positions(start, pairs)
- pairs.map do |id, index|
- [id, start + (index * gap_size)]
- end
- end
-
- def update_positions_with_retry(pairs_with_position, query_name)
- retries = 0
- batch_size = pairs_with_position.size
-
- until pairs_with_position.empty?
- begin
- update_positions(pairs_with_position.first(batch_size), query_name)
- pairs_with_position = pairs_with_position.drop(batch_size)
- retries = 0
- rescue ActiveRecord::StatementTimeout, ActiveRecord::QueryCanceled => ex
- raise ex if batch_size < SMALLEST_BATCH_SIZE
-
- if (retries += 1) == RETRIES_LIMIT
- # shrink the batch size in half when RETRIES limit is reached and update still fails perhaps because batch size is still too big
- batch_size = (batch_size / 2).to_i
- retries = 0
- end
-
- retry
- end
- end
- end
-
- def update_positions(pairs_with_position, query_name)
- values = pairs_with_position.map do |id, index|
- "(#{id}, #{index})"
- end.join(', ')
-
- Gitlab::Database::WithLockRetries.new(timing_configuration: TIMING_CONFIGURATION, klass: self.class).run do
- run_update_query(values, query_name)
- end
- end
-
- def run_update_query(values, query_name)
- Issue.connection.exec_query(<<~SQL, query_name)
- WITH cte(cte_id, new_pos) AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
- SELECT *
- FROM (VALUES #{values}) as t (id, pos)
- )
- UPDATE #{Issue.table_name}
- SET relative_position = cte.new_pos
- FROM cte
- WHERE cte_id = id
- SQL
- end
-
- def issue_count
- @issue_count ||= base.count
- end
-
- def gaps
- issue_count - 1
- end
-
- def gap_size
- # We could try to split the available range over the number of gaps we need,
- # but IDEAL_DISTANCE * MAX_ISSUE_COUNT is only 0.1% of the available range,
- # so we are guaranteed not to exhaust it by using this static value.
- #
- # If we raise MAX_ISSUE_COUNT or IDEAL_DISTANCE significantly, this may
- # change!
- RelativePositioning::IDEAL_DISTANCE
- end
-end
diff --git a/app/services/issues/base_service.rb b/app/services/issues/base_service.rb
index 5e0a86fdeee..6dce9fd6e73 100644
--- a/app/services/issues/base_service.rb
+++ b/app/services/issues/base_service.rb
@@ -34,6 +34,13 @@ module Issues
private
+ def find_work_item_type_id(issue_type)
+ work_item_type = WorkItem::Type.default_by_type(issue_type)
+ work_item_type ||= WorkItem::Type.default_issue_type
+
+ work_item_type.id
+ end
+
def filter_params(issue)
super
@@ -84,7 +91,8 @@ module Issues
# @param object [Issue, Project]
def issue_type_allowed?(object)
- can?(current_user, :"create_#{params[:issue_type]}", object)
+ WorkItem::Type.base_types.key?(params[:issue_type]) &&
+ can?(current_user, :"create_#{params[:issue_type]}", object)
end
# @param issue [Issue]
diff --git a/app/services/issues/build_service.rb b/app/services/issues/build_service.rb
index 5cb138946d7..7fdc8daf15c 100644
--- a/app/services/issues/build_service.rb
+++ b/app/services/issues/build_service.rb
@@ -6,6 +6,7 @@ module Issues
def execute
filter_resolve_discussion_params
+
@issue = project.issues.new(issue_params).tap do |issue|
ensure_milestone_available(issue)
end
@@ -60,6 +61,13 @@ module Issues
def issue_params
@issue_params ||= build_issue_params
+
+ # If :issue_type is nil then params[:issue_type] was either nil
+ # or not permitted. Either way, the :issue_type will default
+ # to the column default of `issue`. And that means we need to
+ # ensure the work_item_type_id is set
+ @issue_params[:work_item_type_id] = get_work_item_type_id(@issue_params[:issue_type])
+ @issue_params
end
private
@@ -81,6 +89,11 @@ module Issues
{ author: current_user }
.merge(issue_params_with_info_from_discussions)
.merge(allowed_issue_params)
+ .with_indifferent_access
+ end
+
+ def get_work_item_type_id(issue_type = :issue)
+ find_work_item_type_id(issue_type)
end
end
end
diff --git a/app/services/issues/relative_position_rebalancing_service.rb b/app/services/issues/relative_position_rebalancing_service.rb
new file mode 100644
index 00000000000..7d199f99a24
--- /dev/null
+++ b/app/services/issues/relative_position_rebalancing_service.rb
@@ -0,0 +1,193 @@
+# frozen_string_literal: true
+
+module Issues
+ class RelativePositionRebalancingService
+ UPDATE_BATCH_SIZE = 100
+ PREFETCH_ISSUES_BATCH_SIZE = 10_000
+ SMALLEST_BATCH_SIZE = 5
+ RETRIES_LIMIT = 3
+
+ TooManyConcurrentRebalances = Class.new(StandardError)
+
+ def initialize(projects)
+ @projects_collection = (projects.is_a?(Array) ? Project.id_in(projects) : projects).projects_order_id_asc
+ @root_namespace = @projects_collection.take.root_namespace # rubocop:disable CodeReuse/ActiveRecord
+ @caching = ::Gitlab::Issues::Rebalancing::State.new(@root_namespace, @projects_collection)
+ end
+
+ def execute
+ return unless Feature.enabled?(:rebalance_issues, root_namespace)
+
+ # Given can_start_rebalance? and track_new_running_rebalance are not atomic
+ # it can happen that we end up with more than Rebalancing::State::MAX_NUMBER_OF_CONCURRENT_REBALANCES running.
+ # Considering the number of allowed Rebalancing::State::MAX_NUMBER_OF_CONCURRENT_REBALANCES is small we should be ok,
+ # but should be something to consider if we'd want to scale this up.
+ error_message = "#{caching.concurrent_running_rebalances_count} concurrent re-balances currently running"
+ raise TooManyConcurrentRebalances, error_message unless caching.can_start_rebalance?
+
+ block_issue_repositioning! unless root_namespace.issue_repositioning_disabled?
+ caching.track_new_running_rebalance
+ index = caching.get_current_index
+
+ loop do
+ issue_ids = get_issue_ids(index, PREFETCH_ISSUES_BATCH_SIZE)
+ pairs_with_index = assign_indexes(issue_ids, index)
+
+ pairs_with_index.each_slice(UPDATE_BATCH_SIZE) do |pairs_batch|
+ update_positions_with_retry(pairs_batch, 're-balance issue positions in batches ordered by position')
+ end
+
+ index = caching.get_current_index
+
+ break if index >= caching.issue_count - 1
+ end
+
+ caching.cleanup_cache
+ unblock_issue_repositioning!
+ end
+
+ private
+
+ attr_reader :root_namespace, :projects_collection, :caching
+
+ def block_issue_repositioning!
+ Feature.enable(:block_issue_repositioning, root_namespace)
+ end
+
+ def unblock_issue_repositioning!
+ Feature.disable(:block_issue_repositioning, root_namespace)
+ end
+
+ def get_issue_ids(index, limit)
+ issue_ids = caching.get_cached_issue_ids(index, limit)
+
+ # if we have a list of cached issues and no current project id cached,
+ # then we successfully cached issues for all projects
+ return issue_ids if issue_ids.any? && caching.get_current_project_id.blank?
+
+ # if we got no issue ids at the start of re-balancing then we did not cache any issue ids yet
+ preload_issue_ids
+
+ caching.get_cached_issue_ids(index, limit)
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def preload_issue_ids
+ index = 0
+ cached_project_id = caching.get_current_project_id
+
+ collection = projects_collection
+ collection = projects_collection.where(Project.arel_table[:id].gteq(cached_project_id.to_i)) if cached_project_id.present?
+
+ collection.each do |project|
+ caching.cache_current_project_id(project.id)
+ index += 1
+ scope = Issue.in_projects(project).reorder(custom_reorder).select(:id, :relative_position)
+
+ with_retry(PREFETCH_ISSUES_BATCH_SIZE, 100) do |batch_size|
+ Gitlab::Pagination::Keyset::Iterator.new(scope: scope).each_batch(of: batch_size) do |batch|
+ caching.cache_issue_ids(batch)
+ end
+ end
+ end
+
+ caching.remove_current_project_id_cache
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def assign_indexes(ids, start_index)
+ ids.each_with_index.map do |id, idx|
+ [id, start_index + idx]
+ end
+ end
+
+ # The method runs in a loop where we try for RETRIES_LIMIT=3 times, to run the update statement on
+ # a number of records(batch size). Method gets an array of (id, value) pairs as argument that is used
+ # to build the update query matching by id and updating relative_position = value. If we get a statement
+ # timeout, we split the batch size in half and try(for 3 times again) to batch update on a smaller number of records.
+ # On success, because we know the batch size and we always pick from the beginning of the array param,
+ # we can remove first batch_size number of items from array and continue with the successful batch_size for next batches.
+ # On failures we continue to split batch size to a SMALLEST_BATCH_SIZE limit, which is now set at 5.
+ #
+ # e.g.
+ # 0. items | previous batch size|new batch size | comment
+ # 1. 100 | 100 | 100 | 3 failures -> split the batch size in half
+ # 2. 100 | 100 | 50 | 3 failures -> split the batch size in half again
+ # 3. 100 | 50 | 25 | 3 succeed -> so we drop 25 items 3 times, 4th fails -> split the batch size in half again
+ # 5. 25 | 25 | 12 | 3 failures -> split the batch size in half
+ # 6. 25 | 12 | 6 | 3 failures -> we exit because smallest batch size is 5 and we'll be at 3 if we split again
+
+ def update_positions_with_retry(pairs_with_index, query_name)
+ retry_batch_size = pairs_with_index.size
+
+ until pairs_with_index.empty?
+ with_retry(retry_batch_size, SMALLEST_BATCH_SIZE) do |batch_size|
+ retry_batch_size = batch_size
+ update_positions(pairs_with_index.first(batch_size), query_name)
+ # pairs_with_index[batch_size - 1] - can be nil for last batch
+ # if last batch is smaller than batch_size, so we just get the last pair.
+ last_pair_in_batch = pairs_with_index[batch_size - 1] || pairs_with_index.last
+ caching.cache_current_index(last_pair_in_batch.last + 1)
+ pairs_with_index = pairs_with_index.drop(batch_size)
+ end
+ end
+ end
+
+ def update_positions(pairs_with_position, query_name)
+ values = pairs_with_position.map do |id, index|
+ "(#{id}, #{start_position + (index * gap_size)})"
+ end.join(', ')
+
+ run_update_query(values, query_name)
+ end
+
+ def run_update_query(values, query_name)
+ Issue.connection.exec_query(<<~SQL, query_name)
+ WITH cte(cte_id, new_pos) AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (
+ SELECT *
+ FROM (VALUES #{values}) as t (id, pos)
+ )
+ UPDATE #{Issue.table_name}
+ SET relative_position = cte.new_pos
+ FROM cte
+ WHERE cte_id = id
+ SQL
+ end
+
+ def gaps
+ caching.issue_count - 1
+ end
+
+ def gap_size
+ RelativePositioning::MAX_GAP
+ end
+
+ def start_position
+ @start_position ||= (RelativePositioning::START_POSITION - (gaps / 2) * gap_size).to_i
+ end
+
+ def custom_reorder
+ ::Gitlab::Pagination::Keyset::Order.build([Issue.column_order_relative_position, Issue.column_order_id_asc])
+ end
+
+ def with_retry(initial_batch_size, exit_batch_size)
+ retries = 0
+ batch_size = initial_batch_size
+
+ begin
+ yield batch_size
+ retries = 0
+ rescue ActiveRecord::StatementTimeout, ActiveRecord::QueryCanceled => ex
+ raise ex if batch_size < exit_batch_size
+
+ if (retries += 1) == RETRIES_LIMIT
+ # shrink the batch size in half when RETRIES limit is reached and update still fails perhaps because batch size is still too big
+ batch_size = (batch_size / 2).to_i
+ retries = 0
+ end
+
+ retry
+ end
+ end
+ end
+end
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index 9ede5ef728b..d120b007af2 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -26,6 +26,8 @@ module Issues
end
def before_update(issue, skip_spam_check: false)
+ change_work_item_type(issue)
+
return if skip_spam_check
Spam::SpamActionService.new(
@@ -36,6 +38,14 @@ module Issues
).execute
end
+ def change_work_item_type(issue)
+ return unless issue.changed_attributes['issue_type']
+
+ type_id = find_work_item_type_id(issue.issue_type)
+
+ issue.work_item_type_id = type_id
+ end
+
def handle_changes(issue, options)
super
old_associations = options.fetch(:old_associations, {})
@@ -54,29 +64,12 @@ module Issues
end
handle_assignee_changes(issue, old_assignees)
-
- if issue.previous_changes.include?('confidential')
- # don't enqueue immediately to prevent todos removal in case of a mistake
- TodosDestroyer::ConfidentialIssueWorker.perform_in(Todo::WAIT_FOR_DELETE, issue.id) if issue.confidential?
- create_confidentiality_note(issue)
- track_usage_event(:incident_management_incident_change_confidential, current_user.id)
- end
-
- added_labels = issue.labels - old_labels
-
- if added_labels.present?
- notification_service.async.relabeled_issue(issue, added_labels, current_user)
- end
-
+ handle_confidential_change(issue)
+ handle_added_labels(issue, old_labels)
handle_milestone_change(issue)
-
- added_mentions = issue.mentioned_users(current_user) - old_mentioned_users
-
- if added_mentions.present?
- notification_service.async.new_mentions_in_issue(issue, added_mentions, current_user)
- end
-
+ handle_added_mentions(issue, old_mentioned_users)
handle_severity_change(issue, old_severity)
+ handle_issue_type_change(issue)
end
def handle_assignee_changes(issue, old_assignees)
@@ -156,6 +149,23 @@ module Issues
MergeRequests::CreateFromIssueService.new(project: project, current_user: current_user, mr_params: create_merge_request_params).execute
end
+ def handle_confidential_change(issue)
+ if issue.previous_changes.include?('confidential')
+ # don't enqueue immediately to prevent todos removal in case of a mistake
+ TodosDestroyer::ConfidentialIssueWorker.perform_in(Todo::WAIT_FOR_DELETE, issue.id) if issue.confidential?
+ create_confidentiality_note(issue)
+ track_usage_event(:incident_management_incident_change_confidential, current_user.id)
+ end
+ end
+
+ def handle_added_labels(issue, old_labels)
+ added_labels = issue.labels - old_labels
+
+ if added_labels.present?
+ notification_service.async.relabeled_issue(issue, added_labels, current_user)
+ end
+ end
+
def handle_milestone_change(issue)
return unless issue.previous_changes.include?('milestone_id')
@@ -184,6 +194,14 @@ module Issues
end
end
+ def handle_added_mentions(issue, old_mentioned_users)
+ added_mentions = issue.mentioned_users(current_user) - old_mentioned_users
+
+ if added_mentions.present?
+ notification_service.async.new_mentions_in_issue(issue, added_mentions, current_user)
+ end
+ end
+
def handle_severity_change(issue, old_severity)
return unless old_severity && issue.severity != old_severity
@@ -218,6 +236,16 @@ module Issues
def remove_incident_label?(issue)
issue.issue_type != params[:issue_type] && issue.incident?
end
+
+ def handle_issue_type_change(issue)
+ return unless issue.previous_changes.include?('issue_type')
+
+ do_handle_issue_type_change(issue)
+ end
+
+ def do_handle_issue_type_change(issue)
+ SystemNoteService.change_issue_type(issue, current_user)
+ end
end
end
diff --git a/app/services/members/create_service.rb b/app/services/members/create_service.rb
index 3e809b11024..0cc62e661a3 100644
--- a/app/services/members/create_service.rb
+++ b/app/services/members/create_service.rb
@@ -4,9 +4,12 @@ module Members
class CreateService < Members::BaseService
BlankInvitesError = Class.new(StandardError)
TooManyInvitesError = Class.new(StandardError)
+ MembershipLockedError = Class.new(StandardError)
DEFAULT_INVITE_LIMIT = 100
+ attr_reader :membership_locked
+
def initialize(*args)
super
@@ -17,18 +20,22 @@ module Members
def execute
validate_invite_source!
- validate_invites!
+ validate_invitable!
add_members
enqueue_onboarding_progress_action
result
- rescue BlankInvitesError, TooManyInvitesError => e
+ rescue BlankInvitesError, TooManyInvitesError, MembershipLockedError => e
error(e.message)
end
+ def single_member
+ members.last
+ end
+
private
- attr_reader :source, :errors, :invites, :member_created_namespace_id
+ attr_reader :source, :errors, :invites, :member_created_namespace_id, :members
def invites_from_params
params[:user_ids]
@@ -38,7 +45,7 @@ module Members
raise ArgumentError, s_('AddMember|No invite source provided.') unless invite_source.present?
end
- def validate_invites!
+ def validate_invitable!
raise BlankInvitesError, blank_invites_message if invites.blank?
return unless user_limit && invites.size > user_limit
@@ -52,7 +59,7 @@ module Members
end
def add_members
- members = source.add_users(
+ @members = source.add_users(
invites,
params[:access_level],
expires_at: params[:expires_at],
diff --git a/app/services/members/creator_service.rb b/app/services/members/creator_service.rb
index f6972f81162..7b0bebff760 100644
--- a/app/services/members/creator_service.rb
+++ b/app/services/members/creator_service.rb
@@ -12,54 +12,6 @@ module Members
def access_levels
raise NotImplementedError
end
-
- def add_users(source, users, access_level, current_user: nil, expires_at: nil)
- return [] unless users.present?
-
- emails, users, existing_members = parse_users_list(source, users)
-
- Member.transaction do
- (emails + users).map! do |user|
- new(source,
- user,
- access_level,
- existing_members: existing_members,
- current_user: current_user,
- expires_at: expires_at)
- .execute
- end
- end
- end
-
- private
-
- def parse_users_list(source, list)
- emails = []
- user_ids = []
- users = []
- existing_members = {}
-
- list.each do |item|
- case item
- when User
- users << item
- when Integer
- user_ids << item
- when /\A\d+\Z/
- user_ids << item.to_i
- when Devise.email_regexp
- emails << item
- end
- end
-
- if user_ids.present?
- users.concat(User.id_in(user_ids))
- # the below will automatically discard invalid user_ids
- existing_members = source.members_and_requesters.where(user_id: user_ids).index_by(&:user_id) # rubocop:todo CodeReuse/ActiveRecord
- end
-
- [emails, users, existing_members]
- end
end
def initialize(source, user, access_level, **args)
@@ -149,18 +101,10 @@ module Members
end
def find_or_initialize_member_by_user
- if existing_members
- # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/334062
- # i'm not so sure this is needed as the parse_users_list looks at members_and_requesters...
- # so it is like we could just do a find or initialize by here and be fine
- existing_members[user.id] || source.members.build(user_id: user.id)
- else
- source.members_and_requesters.find_or_initialize_by(user_id: user.id) # rubocop:todo CodeReuse/ActiveRecord
- end
- end
-
- def existing_members
- args[:existing_members]
+ # have to use members and requesters here since project/group limits on requested_at being nil for members and
+ # wouldn't be found in `source.members` if it already existed
+ # this of course will not treat active invites the same since we aren't searching on email
+ source.members_and_requesters.find_or_initialize_by(user_id: user.id) # rubocop:disable CodeReuse/ActiveRecord
end
def ldap
diff --git a/app/services/members/groups/bulk_creator_service.rb b/app/services/members/groups/bulk_creator_service.rb
new file mode 100644
index 00000000000..57cec241584
--- /dev/null
+++ b/app/services/members/groups/bulk_creator_service.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module Members
+ module Groups
+ class BulkCreatorService < Members::Groups::CreatorService
+ include Members::BulkCreateUsers
+ end
+ end
+end
diff --git a/app/services/members/invite_service.rb b/app/services/members/invite_service.rb
index 6298943977b..257a986b8dd 100644
--- a/app/services/members/invite_service.rb
+++ b/app/services/members/invite_service.rb
@@ -18,7 +18,7 @@ module Members
params[:email]
end
- def validate_invites!
+ def validate_invitable!
super
# we need the below due to add_users hitting Members::CreatorService.parse_users_list and ignoring invalid emails
diff --git a/app/services/members/mailgun.rb b/app/services/members/mailgun.rb
new file mode 100644
index 00000000000..43fb5a14ef1
--- /dev/null
+++ b/app/services/members/mailgun.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+module Members
+ module Mailgun
+ INVITE_EMAIL_TAG = 'invite_email'
+ INVITE_EMAIL_TOKEN_KEY = :invite_token
+ end
+end
diff --git a/app/services/members/mailgun/process_webhook_service.rb b/app/services/members/mailgun/process_webhook_service.rb
new file mode 100644
index 00000000000..e359a83ad42
--- /dev/null
+++ b/app/services/members/mailgun/process_webhook_service.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module Members
+ module Mailgun
+ class ProcessWebhookService
+ ProcessWebhookServiceError = Class.new(StandardError)
+
+ def initialize(payload)
+ @payload = payload
+ end
+
+ def execute
+ @member = Member.find_by_invite_token(invite_token)
+ update_member_and_log if member
+ rescue ProcessWebhookServiceError => e
+ Gitlab::ErrorTracking.track_exception(e)
+ end
+
+ private
+
+ attr_reader :payload, :member
+
+ def update_member_and_log
+ log_update_event if member.update(invite_email_success: false)
+ end
+
+ def log_update_event
+ Gitlab::AppLogger.info "UPDATED MEMBER INVITE_EMAIL_SUCCESS: member_id: #{member.id}"
+ end
+
+ def invite_token
+ # may want to validate schema in some way using ::JSONSchemer.schema(SCHEMA_PATH).valid?(message) if this
+ # gets more complex
+ payload.dig('user-variables', ::Members::Mailgun::INVITE_EMAIL_TOKEN_KEY) ||
+ raise(ProcessWebhookServiceError, "Failed to receive #{::Members::Mailgun::INVITE_EMAIL_TOKEN_KEY} in user-variables: #{payload}")
+ end
+ end
+ end
+end
diff --git a/app/services/members/projects/bulk_creator_service.rb b/app/services/members/projects/bulk_creator_service.rb
new file mode 100644
index 00000000000..68e71e35d12
--- /dev/null
+++ b/app/services/members/projects/bulk_creator_service.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module Members
+ module Projects
+ class BulkCreatorService < Members::Projects::CreatorService
+ include Members::BulkCreateUsers
+ end
+ end
+end
diff --git a/app/services/merge_requests/merge_to_ref_service.rb b/app/services/merge_requests/merge_to_ref_service.rb
index eda652c4b9a..8519cbac3cb 100644
--- a/app/services/merge_requests/merge_to_ref_service.rb
+++ b/app/services/merge_requests/merge_to_ref_service.rb
@@ -13,12 +13,12 @@ module MergeRequests
class MergeToRefService < MergeRequests::MergeBaseService
extend ::Gitlab::Utils::Override
- def execute(merge_request)
+ def execute(merge_request, cache_merge_to_ref_calls = false)
@merge_request = merge_request
error_check!
- commit_id = commit
+ commit_id = commit(cache_merge_to_ref_calls)
raise_error('Conflicts detected during merge') unless commit_id
@@ -65,8 +65,8 @@ module MergeRequests
params[:allow_conflicts] || false
end
- def commit
- if Feature.enabled?(:cache_merge_to_ref_calls, project, default_enabled: false)
+ def commit(cache_merge_to_ref_calls = false)
+ if cache_merge_to_ref_calls
Rails.cache.fetch(cache_key, expires_in: 1.day) do
extracted_merge_to_ref
end
diff --git a/app/services/merge_requests/mergeability_check_service.rb b/app/services/merge_requests/mergeability_check_service.rb
index 3e294aeaa07..c3498c5ce97 100644
--- a/app/services/merge_requests/mergeability_check_service.rb
+++ b/app/services/merge_requests/mergeability_check_service.rb
@@ -157,7 +157,9 @@ module MergeRequests
def merge_to_ref
params = { allow_conflicts: Feature.enabled?(:display_merge_conflicts_in_diff, project) }
- result = MergeRequests::MergeToRefService.new(project: project, current_user: merge_request.author, params: params).execute(merge_request)
+ result = MergeRequests::MergeToRefService
+ .new(project: project, current_user: merge_request.author, params: params)
+ .execute(merge_request, true)
result[:status] == :success
end
diff --git a/app/services/merge_requests/squash_service.rb b/app/services/merge_requests/squash_service.rb
index 31c49b3ae70..102f78c6a9b 100644
--- a/app/services/merge_requests/squash_service.rb
+++ b/app/services/merge_requests/squash_service.rb
@@ -2,8 +2,6 @@
module MergeRequests
class SquashService < MergeRequests::BaseService
- SquashInProgressError = Class.new(RuntimeError)
-
def execute
# If performing a squash would result in no change, then
# immediately return a success message without performing a squash
@@ -13,14 +11,7 @@ module MergeRequests
return error(s_('MergeRequests|This project does not allow squashing commits when merge requests are accepted.')) if squash_forbidden?
- if squash_in_progress?
- return error(s_('MergeRequests|Squash task canceled: another squash is already in progress.'))
- end
-
squash! || error(s_('MergeRequests|Failed to squash. Should be done manually.'))
-
- rescue SquashInProgressError
- error(s_('MergeRequests|An error occurred while checking whether another squash is in progress.'))
end
private
@@ -35,14 +26,6 @@ module MergeRequests
false
end
- def squash_in_progress?
- merge_request.squash_in_progress?
- rescue StandardError => e
- log_error(exception: e, message: 'Failed to check squash in progress')
-
- raise SquashInProgressError, e.message
- end
-
def squash_forbidden?
target_project.squash_never?
end
diff --git a/app/services/packages/composer/version_parser_service.rb b/app/services/packages/composer/version_parser_service.rb
index 36275d1b680..516c306d2a5 100644
--- a/app/services/packages/composer/version_parser_service.rb
+++ b/app/services/packages/composer/version_parser_service.rb
@@ -12,18 +12,19 @@ module Packages
if @tag_name.present?
@tag_name.delete_prefix('v')
elsif @branch_name.present?
- branch_sufix_or_prefix(@branch_name.match(Gitlab::Regex.composer_package_version_regex))
+ branch_suffix_or_prefix(@branch_name.match(Gitlab::Regex.composer_package_version_regex))
end
end
private
- def branch_sufix_or_prefix(match)
+ def branch_suffix_or_prefix(match)
if match
- if match.captures[1] == '.x'
- match.captures[0] + '-dev'
+ captures = match.captures.reject(&:blank?)
+ if captures[-1] == '.x'
+ captures[0] + '-dev'
else
- match.captures[0] + '.x-dev'
+ captures[0] + '.x-dev'
end
else
"dev-#{@branch_name}"
diff --git a/app/services/packages/create_package_service.rb b/app/services/packages/create_package_service.rb
index 3dc06497d9f..7e1b6ecbe51 100644
--- a/app/services/packages/create_package_service.rb
+++ b/app/services/packages/create_package_service.rb
@@ -5,15 +5,19 @@ module Packages
protected
def find_or_create_package!(package_type, name: params[:name], version: params[:version])
+ # safe_find_or_create_by! was originally called here.
+ # We merely switched to `find_or_create_by!`
+ # rubocop: disable CodeReuse/ActiveRecord
project
.packages
.with_package_type(package_type)
- .safe_find_or_create_by!(name: name, version: version) do |package|
+ .find_or_create_by!(name: name, version: version) do |package|
package.status = params[:status] if params[:status]
package.creator = package_creator
add_build_info(package)
end
+ # rubocop: enable CodeReuse/ActiveRecord
end
def create_package!(package_type, attrs = {})
diff --git a/app/services/packages/generic/create_package_file_service.rb b/app/services/packages/generic/create_package_file_service.rb
index 42a191fb415..78c97000654 100644
--- a/app/services/packages/generic/create_package_file_service.rb
+++ b/app/services/packages/generic/create_package_file_service.rb
@@ -29,7 +29,8 @@ module Packages
package.update_column(:status, params[:status]) if params[:status] && params[:status] != package.status
- package.build_infos.safe_find_or_create_by!(pipeline: params[:build].pipeline) if params[:build].present?
+ package.create_build_infos!(params[:build])
+
package
end
diff --git a/app/services/packages/maven/find_or_create_package_service.rb b/app/services/packages/maven/find_or_create_package_service.rb
index c7ffd468864..b29adf4e11a 100644
--- a/app/services/packages/maven/find_or_create_package_service.rb
+++ b/app/services/packages/maven/find_or_create_package_service.rb
@@ -51,7 +51,7 @@ module Packages
.execute
end
- package.build_infos.safe_find_or_create_by!(pipeline: params[:build].pipeline) if params[:build].present?
+ package.create_build_infos!(params[:build])
ServiceResponse.success(payload: { package: package })
end
diff --git a/app/services/packages/nuget/update_package_from_metadata_service.rb b/app/services/packages/nuget/update_package_from_metadata_service.rb
index 6ffe4f097f4..d1e47ad00a1 100644
--- a/app/services/packages/nuget/update_package_from_metadata_service.rb
+++ b/app/services/packages/nuget/update_package_from_metadata_service.rb
@@ -21,11 +21,7 @@ module Packages
try_obtain_lease do
@package_file.transaction do
- if use_new_package_file_updater?
- new_execute
- else
- legacy_execute
- end
+ process_package_update
end
end
rescue ActiveRecord::RecordInvalid => e
@@ -34,7 +30,7 @@ module Packages
private
- def new_execute
+ def process_package_update
package_to_destroy = nil
target_package = @package_file.package
@@ -50,36 +46,11 @@ module Packages
end
update_package(target_package)
-
::Packages::UpdatePackageFileService.new(@package_file, package_id: target_package.id, file_name: package_filename)
.execute
-
package_to_destroy&.destroy!
end
- def legacy_execute
- if existing_package
- package = link_to_existing_package
- elsif symbol_package?
- raise InvalidMetadataError, 'symbol package is invalid, matching package does not exist'
- else
- package = update_linked_package
- end
-
- update_package(package)
-
- # Updating file_name updates the path where the file is stored.
- # We must pass the file again so that CarrierWave can handle the update
- @package_file.update!(
- file_name: package_filename,
- file: @package_file.file
- )
- end
-
- def use_new_package_file_updater?
- ::Feature.enabled?(:packages_nuget_new_package_file_updater, @package_file.project, default_enabled: :yaml)
- end
-
def update_package(package)
return if symbol_package?
diff --git a/app/services/pages/delete_service.rb b/app/services/pages/delete_service.rb
index f7d3d70aad6..8d33e6c1000 100644
--- a/app/services/pages/delete_service.rb
+++ b/app/services/pages/delete_service.rb
@@ -12,9 +12,6 @@ module Pages
PagesDomain.for_project(project).delete_all
DestroyPagesDeploymentsWorker.perform_async(project.id)
-
- # TODO: remove this call https://gitlab.com/gitlab-org/gitlab/-/issues/320775
- PagesRemoveWorker.perform_async(project.id) if ::Settings.pages.local_store.enabled
end
end
end
diff --git a/app/services/pages/legacy_storage_lease.rb b/app/services/pages/legacy_storage_lease.rb
deleted file mode 100644
index 1849def0183..00000000000
--- a/app/services/pages/legacy_storage_lease.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-module Pages
- module LegacyStorageLease
- extend ActiveSupport::Concern
-
- include ::ExclusiveLeaseGuard
-
- LEASE_TIMEOUT = 1.hour
-
- def lease_key
- "pages_legacy_storage:#{project.id}"
- end
-
- def lease_timeout
- LEASE_TIMEOUT
- end
- end
-end
diff --git a/app/services/pages/migrate_legacy_storage_to_deployment_service.rb b/app/services/pages/migrate_legacy_storage_to_deployment_service.rb
index 95c7107eb62..9c1671fbc15 100644
--- a/app/services/pages/migrate_legacy_storage_to_deployment_service.rb
+++ b/app/services/pages/migrate_legacy_storage_to_deployment_service.rb
@@ -2,10 +2,7 @@
module Pages
class MigrateLegacyStorageToDeploymentService
- ExclusiveLeaseTakenError = Class.new(StandardError)
-
include BaseServiceUtility
- include ::Pages::LegacyStorageLease
attr_reader :project
@@ -16,18 +13,6 @@ module Pages
end
def execute
- result = try_obtain_lease do
- execute_unsafe
- end
-
- raise ExclusiveLeaseTakenError, "Can't migrate pages for project #{project.id}: exclusive lease taken" if result.nil?
-
- result
- end
-
- private
-
- def execute_unsafe
zip_result = ::Pages::ZipDirectoryService.new(project.pages_path, ignore_invalid_entries: @ignore_invalid_entries).execute
if zip_result[:status] == :error
diff --git a/app/services/projects/batch_count_service.rb b/app/services/projects/batch_count_service.rb
index aec3b32da89..455c7211ab2 100644
--- a/app/services/projects/batch_count_service.rb
+++ b/app/services/projects/batch_count_service.rb
@@ -9,13 +9,19 @@ module Projects
@projects = projects
end
- def refresh_cache
- @projects.each do |project|
- service = count_service.new(project)
- unless service.count_stored?
- service.refresh_cache { global_count[project.id].to_i }
+ def refresh_cache_and_retrieve_data
+ count_services = @projects.map { |project| count_service.new(project) }
+ services_by_cache_key = count_services.index_by(&:cache_key)
+
+ results = Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ Rails.cache.fetch_multi(*services_by_cache_key.keys) do |key|
+ service = services_by_cache_key[key]
+
+ global_count[service.project.id].to_i
end
end
+
+ results.transform_keys! { |cache_key| services_by_cache_key[cache_key].project }
end
def project_ids
diff --git a/app/services/projects/batch_forks_count_service.rb b/app/services/projects/batch_forks_count_service.rb
index d12772b40ff..6467744a435 100644
--- a/app/services/projects/batch_forks_count_service.rb
+++ b/app/services/projects/batch_forks_count_service.rb
@@ -5,21 +5,6 @@
# because the service use maps to retrieve the project ids
module Projects
class BatchForksCountService < Projects::BatchCountService
- def refresh_cache_and_retrieve_data
- count_services = @projects.map { |project| count_service.new(project) }
-
- values = Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- Rails.cache.fetch_multi(*(count_services.map { |ser| ser.cache_key } )) { |key| nil }
- end
-
- results_per_service = Hash[count_services.zip(values.values)]
- projects_to_refresh = results_per_service.select { |_k, value| value.nil? }
- projects_to_refresh = recreate_cache(projects_to_refresh)
-
- results_per_service.update(projects_to_refresh)
- results_per_service.transform_keys { |k| k.project }
- end
-
# rubocop: disable CodeReuse/ActiveRecord
def global_count
@global_count ||= begin
@@ -33,13 +18,5 @@ module Projects
def count_service
::Projects::ForksCountService
end
-
- def recreate_cache(projects_to_refresh)
- projects_to_refresh.each_with_object({}) do |(service, _v), hash|
- count = global_count[service.project.id].to_i
- service.refresh_cache { count }
- hash[service] = count
- end
- end
end
end
diff --git a/app/services/projects/count_service.rb b/app/services/projects/count_service.rb
index 3cee80c7bbc..5daea3a2600 100644
--- a/app/services/projects/count_service.rb
+++ b/app/services/projects/count_service.rb
@@ -9,6 +9,8 @@ module Projects
# all caches.
VERSION = 1
+ attr_reader :project
+
def initialize(project)
@project = project
end
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index 302c047a65f..e717491b19d 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -12,6 +12,7 @@ module Projects
@import_data = @params.delete(:import_data)
@relations_block = @params.delete(:relations_block)
@default_branch = @params.delete(:default_branch)
+ @readme_template = @params.delete(:readme_template)
build_topics
end
@@ -89,10 +90,14 @@ module Projects
def after_create_actions
log_info("#{@project.owner.name} created a new project \"#{@project.full_name}\"")
- # Skip writing the config for project imports/forks because it
- # will always fail since the Git directory doesn't exist until
- # a background job creates it (see Project#add_import_job).
- @project.set_full_path unless @project.import?
+ if @project.import?
+ experiment(:combined_registration, user: current_user).track(:import_project)
+ else
+ # Skip writing the config for project imports/forks because it
+ # will always fail since the Git directory doesn't exist until
+ # a background job creates it (see Project#add_import_job).
+ @project.set_full_path
+ end
unless @project.gitlab_project_import?
@project.create_wiki unless skip_wiki?
@@ -149,12 +154,16 @@ module Projects
branch_name: @default_branch.presence || @project.default_branch_or_main,
commit_message: 'Initial commit',
file_path: 'README.md',
- file_content: experiment(:new_project_readme_content, namespace: @project.namespace).run_with(@project)
+ file_content: readme_content
}
Files::CreateService.new(@project, current_user, commit_attrs).execute
end
+ def readme_content
+ @readme_template.presence || experiment(:new_project_readme_content, namespace: @project.namespace).run_with(@project)
+ end
+
def skip_wiki?
!@project.feature_available?(:wiki, current_user) || @skip_wiki
end
diff --git a/app/services/projects/fork_service.rb b/app/services/projects/fork_service.rb
index 3cee1b5975a..74b7d18f401 100644
--- a/app/services/projects/fork_service.rb
+++ b/app/services/projects/fork_service.rb
@@ -61,7 +61,8 @@ module Projects
# initializing the project, as that would cause a foreign key constraint
# exception.
relations_block: -> (project) { build_fork_network_member(project) },
- skip_disk_validation: skip_disk_validation
+ skip_disk_validation: skip_disk_validation,
+ external_authorization_classification_label: @project.external_authorization_classification_label
}
if @project.avatar.present? && @project.avatar.image?
diff --git a/app/services/projects/forks_count_service.rb b/app/services/projects/forks_count_service.rb
index 848d8d54104..ca85e2dc281 100644
--- a/app/services/projects/forks_count_service.rb
+++ b/app/services/projects/forks_count_service.rb
@@ -3,8 +3,6 @@
module Projects
# Service class for getting and caching the number of forks of a project.
class ForksCountService < Projects::CountService
- attr_reader :project
-
def cache_key_name
'forks_count'
end
diff --git a/app/services/projects/group_links/destroy_service.rb b/app/services/projects/group_links/destroy_service.rb
index 01a5b617b46..19df0dc2c73 100644
--- a/app/services/projects/group_links/destroy_service.rb
+++ b/app/services/projects/group_links/destroy_service.rb
@@ -13,19 +13,15 @@ module Projects
end
group_link.destroy.tap do |link|
- if Feature.enabled?(:use_specialized_worker_for_project_auth_recalculation)
- refresh_project_authorizations_asynchronously(link.project)
+ refresh_project_authorizations_asynchronously(link.project)
- # Until we compare the inconsistency rates of the new specialized worker and
- # the old approach, we still run AuthorizedProjectsWorker
- # but with some delay and lower urgency as a safety net.
- link.group.refresh_members_authorized_projects(
- blocking: false,
- priority: UserProjectAccessChangedService::LOW_PRIORITY
- )
- else
- link.group.refresh_members_authorized_projects
- end
+ # Until we compare the inconsistency rates of the new specialized worker and
+ # the old approach, we still run AuthorizedProjectsWorker
+ # but with some delay and lower urgency as a safety net.
+ link.group.refresh_members_authorized_projects(
+ blocking: false,
+ priority: UserProjectAccessChangedService::LOW_PRIORITY
+ )
end
end
diff --git a/app/services/projects/move_deploy_keys_projects_service.rb b/app/services/projects/move_deploy_keys_projects_service.rb
index 51d84af249e..98ba5eb3f13 100644
--- a/app/services/projects/move_deploy_keys_projects_service.rb
+++ b/app/services/projects/move_deploy_keys_projects_service.rb
@@ -5,7 +5,7 @@ module Projects
def execute(source_project, remove_remaining_elements: true)
return unless super
- Project.transaction(requires_new: true) do
+ Project.transaction do
move_deploy_keys_projects
remove_remaining_deploy_keys_projects if remove_remaining_elements
diff --git a/app/services/projects/move_forks_service.rb b/app/services/projects/move_forks_service.rb
index 33f0bab12c9..a96cf4dd3ea 100644
--- a/app/services/projects/move_forks_service.rb
+++ b/app/services/projects/move_forks_service.rb
@@ -5,7 +5,7 @@ module Projects
def execute(source_project, remove_remaining_elements: true)
return unless super && source_project.fork_network
- Project.transaction(requires_new: true) do
+ Project.transaction do
move_fork_network_members
update_root_project
refresh_forks_count
diff --git a/app/services/projects/move_lfs_objects_projects_service.rb b/app/services/projects/move_lfs_objects_projects_service.rb
index 57a8d3d69c6..7107ecc6c95 100644
--- a/app/services/projects/move_lfs_objects_projects_service.rb
+++ b/app/services/projects/move_lfs_objects_projects_service.rb
@@ -5,7 +5,7 @@ module Projects
def execute(source_project, remove_remaining_elements: true)
return unless super
- Project.transaction(requires_new: true) do
+ Project.transaction do
move_lfs_objects_projects
remove_remaining_lfs_objects_project if remove_remaining_elements
diff --git a/app/services/projects/move_notification_settings_service.rb b/app/services/projects/move_notification_settings_service.rb
index efe06f158cc..fb84f10207d 100644
--- a/app/services/projects/move_notification_settings_service.rb
+++ b/app/services/projects/move_notification_settings_service.rb
@@ -5,7 +5,7 @@ module Projects
def execute(source_project, remove_remaining_elements: true)
return unless super
- Project.transaction(requires_new: true) do
+ Project.transaction do
move_notification_settings
remove_remaining_notification_settings if remove_remaining_elements
diff --git a/app/services/projects/move_project_authorizations_service.rb b/app/services/projects/move_project_authorizations_service.rb
index c95ad60ab5e..6ac173a20fc 100644
--- a/app/services/projects/move_project_authorizations_service.rb
+++ b/app/services/projects/move_project_authorizations_service.rb
@@ -9,7 +9,7 @@ module Projects
def execute(source_project, remove_remaining_elements: true)
return unless super
- Project.transaction(requires_new: true) do
+ Project.transaction do
move_project_authorizations
remove_remaining_authorizations if remove_remaining_elements
diff --git a/app/services/projects/move_project_group_links_service.rb b/app/services/projects/move_project_group_links_service.rb
index 349953ff973..5f6a7dd09e1 100644
--- a/app/services/projects/move_project_group_links_service.rb
+++ b/app/services/projects/move_project_group_links_service.rb
@@ -9,7 +9,7 @@ module Projects
def execute(source_project, remove_remaining_elements: true)
return unless super
- Project.transaction(requires_new: true) do
+ Project.transaction do
move_group_links
remove_remaining_project_group_links if remove_remaining_elements
diff --git a/app/services/projects/move_project_members_service.rb b/app/services/projects/move_project_members_service.rb
index 9a1b7c6d1b6..011bd17c8cc 100644
--- a/app/services/projects/move_project_members_service.rb
+++ b/app/services/projects/move_project_members_service.rb
@@ -9,7 +9,7 @@ module Projects
def execute(source_project, remove_remaining_elements: true)
return unless super
- Project.transaction(requires_new: true) do
+ Project.transaction do
move_project_members
remove_remaining_members if remove_remaining_elements
diff --git a/app/services/projects/move_users_star_projects_service.rb b/app/services/projects/move_users_star_projects_service.rb
index 20121d429e2..5490448553f 100644
--- a/app/services/projects/move_users_star_projects_service.rb
+++ b/app/services/projects/move_users_star_projects_service.rb
@@ -9,7 +9,7 @@ module Projects
return unless user_stars.any?
- Project.transaction(requires_new: true) do
+ Project.transaction do
user_stars.update_all(project_id: @project.id)
Project.reset_counters @project.id, :users_star_projects
diff --git a/app/services/projects/open_issues_count_service.rb b/app/services/projects/open_issues_count_service.rb
index dc450311db2..8b7a418edf5 100644
--- a/app/services/projects/open_issues_count_service.rb
+++ b/app/services/projects/open_issues_count_service.rb
@@ -7,8 +7,12 @@ module Projects
include Gitlab::Utils::StrongMemoize
# Cache keys used to store issues count
- PUBLIC_COUNT_KEY = 'public_open_issues_count'
- TOTAL_COUNT_KEY = 'total_open_issues_count'
+ # TOTAL_COUNT_KEY includes confidential and hidden issues (admin)
+ # TOTAL_COUNT_WITHOUT_HIDDEN_KEY includes confidential issues but not hidden issues (reporter and above)
+ # PUBLIC_COUNT_WITHOUT_HIDDEN_KEY does not include confidential or hidden issues (guest)
+ TOTAL_COUNT_KEY = 'project_open_issues_including_hidden_count'
+ TOTAL_COUNT_WITHOUT_HIDDEN_KEY = 'project_open_issues_without_hidden_count'
+ PUBLIC_COUNT_WITHOUT_HIDDEN_KEY = 'project_open_public_issues_without_hidden_count'
def initialize(project, user = nil)
@user = user
@@ -16,16 +20,53 @@ module Projects
super(project)
end
+ # rubocop: disable CodeReuse/ActiveRecord
+ def refresh_cache(&block)
+ if block_given?
+ super(&block)
+ else
+ update_cache_for_key(total_count_cache_key) do
+ issues_with_hidden
+ end
+
+ update_cache_for_key(public_count_without_hidden_cache_key) do
+ issues_without_hidden_without_confidential
+ end
+
+ update_cache_for_key(total_count_without_hidden_cache_key) do
+ issues_without_hidden_with_confidential
+ end
+ end
+ end
+
+ private
+
+ def relation_for_count
+ self.class.query(@project, public_only: public_only?, include_hidden: include_hidden?)
+ end
+
def cache_key_name
- public_only? ? PUBLIC_COUNT_KEY : TOTAL_COUNT_KEY
+ if include_hidden?
+ TOTAL_COUNT_KEY
+ elsif public_only?
+ PUBLIC_COUNT_WITHOUT_HIDDEN_KEY
+ else
+ TOTAL_COUNT_WITHOUT_HIDDEN_KEY
+ end
+ end
+
+ def include_hidden?
+ user_is_admin?
end
def public_only?
!user_is_at_least_reporter?
end
- def relation_for_count
- self.class.query(@project, public_only: public_only?)
+ def user_is_admin?
+ strong_memoize(:user_is_admin) do
+ @user&.can_admin_all_resources?
+ end
end
def user_is_at_least_reporter?
@@ -34,46 +75,43 @@ module Projects
end
end
- def public_count_cache_key
- cache_key(PUBLIC_COUNT_KEY)
+ def total_count_without_hidden_cache_key
+ cache_key(TOTAL_COUNT_WITHOUT_HIDDEN_KEY)
+ end
+
+ def public_count_without_hidden_cache_key
+ cache_key(PUBLIC_COUNT_WITHOUT_HIDDEN_KEY)
end
def total_count_cache_key
cache_key(TOTAL_COUNT_KEY)
end
- # rubocop: disable CodeReuse/ActiveRecord
- def refresh_cache(&block)
- if block_given?
- super(&block)
- else
- count_grouped_by_confidential = self.class.query(@project, public_only: false).group(:confidential).count
- public_count = count_grouped_by_confidential[false] || 0
- total_count = public_count + (count_grouped_by_confidential[true] || 0)
+ def issues_with_hidden
+ self.class.query(@project, public_only: false, include_hidden: true).count
+ end
- update_cache_for_key(public_count_cache_key) do
- public_count
- end
+ def issues_without_hidden_without_confidential
+ self.class.query(@project, public_only: true, include_hidden: false).count
+ end
- update_cache_for_key(total_count_cache_key) do
- total_count
- end
- end
+ def issues_without_hidden_with_confidential
+ self.class.query(@project, public_only: false, include_hidden: false).count
end
- # rubocop: enable CodeReuse/ActiveRecord
- # We only show total issues count for reporters
- # which are allowed to view confidential issues
+ # We only show total issues count for admins, who are allowed to view hidden issues.
+ # We also only show issues count including confidential for reporters, who are allowed to view confidential issues.
# This will still show a discrepancy on issues number but should be less than before.
# Check https://gitlab.com/gitlab-org/gitlab-foss/issues/38418 description.
# rubocop: disable CodeReuse/ActiveRecord
- def self.query(projects, public_only: true)
- issues_filtered_by_type = Issue.opened.with_issue_type(Issue::TYPES_FOR_LIST)
- if public_only
- issues_filtered_by_type.public_only.where(project: projects)
+ def self.query(projects, public_only: true, include_hidden: false)
+ if include_hidden
+ Issue.opened.with_issue_type(Issue::TYPES_FOR_LIST).where(project: projects)
+ elsif public_only
+ Issue.public_only.opened.with_issue_type(Issue::TYPES_FOR_LIST).where(project: projects)
else
- issues_filtered_by_type.where(project: projects)
+ Issue.without_hidden.opened.with_issue_type(Issue::TYPES_FOR_LIST).where(project: projects)
end
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/services/projects/operations/update_service.rb b/app/services/projects/operations/update_service.rb
index 51b8e3c6c54..ef74f3e6e7a 100644
--- a/app/services/projects/operations/update_service.rb
+++ b/app/services/projects/operations/update_service.rb
@@ -94,6 +94,7 @@ module Projects
}
}
params[:error_tracking_setting_attributes][:token] = settings[:token] unless /\A\*+\z/.match?(settings[:token]) # Don't update token if we receive masked value
+ params[:error_tracking_setting_attributes][:integrated] = settings[:integrated] unless settings[:integrated].nil?
params
end
diff --git a/app/services/projects/transfer_service.rb b/app/services/projects/transfer_service.rb
index 074550e104d..27376173f07 100644
--- a/app/services/projects/transfer_service.rb
+++ b/app/services/projects/transfer_service.rb
@@ -20,8 +20,16 @@ module Projects
raise TransferError, s_('TransferProject|Please select a new namespace for your project.')
end
- unless allowed_transfer?(current_user, project)
- raise TransferError, s_('TransferProject|Transfer failed, please contact an admin.')
+ if @new_namespace.id == project.namespace_id
+ raise TransferError, s_('TransferProject|Project is already in this namespace.')
+ end
+
+ unless allowed_transfer_project?(current_user, project)
+ raise TransferError, s_("TransferProject|You don't have permission to transfer this project.")
+ end
+
+ unless allowed_to_transfer_to_namespace?(current_user, @new_namespace)
+ raise TransferError, s_("TransferProject|You don't have permission to transfer projects into that namespace.")
end
transfer(project)
@@ -121,11 +129,12 @@ module Projects
Milestones::TransferService.new(current_user, group, project).execute
end
- def allowed_transfer?(current_user, project)
- @new_namespace &&
- can?(current_user, :change_namespace, project) &&
- @new_namespace.id != project.namespace_id &&
- current_user.can?(:transfer_projects, @new_namespace)
+ def allowed_transfer_project?(current_user, project)
+ current_user.can?(:change_namespace, project)
+ end
+
+ def allowed_to_transfer_to_namespace?(current_user, namespace)
+ current_user.can?(:transfer_projects, namespace)
end
def update_namespace_and_visibility(to_namespace)
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index f96a6ee1255..dc75fe1014a 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -3,18 +3,9 @@
module Projects
class UpdatePagesService < BaseService
InvalidStateError = Class.new(StandardError)
- FailedToExtractError = Class.new(StandardError)
- ExclusiveLeaseTaken = Class.new(StandardError)
-
- include ::Pages::LegacyStorageLease
-
BLOCK_SIZE = 32.kilobytes
PUBLIC_DIR = 'public'
- # this has to be invalid group name,
- # as it shares the namespace with groups
- TMP_EXTRACT_PATH = '@pages.tmp'
-
# old deployment can be cached by pages daemon
# so we need to give pages daemon some time update cache
# 10 minutes is enough, but 30 feels safer
@@ -42,7 +33,6 @@ module Projects
validate_max_entries!
build.artifacts_file.use_file do |artifacts_path|
- deploy_to_legacy_storage(artifacts_path)
create_pages_deployment(artifacts_path, build)
success
end
@@ -78,70 +68,6 @@ module Projects
)
end
- def deploy_to_legacy_storage(artifacts_path)
- # path today used by one project can later be used by another
- # so we can't really scope this feature flag by project or group
- return unless ::Settings.pages.local_store.enabled
-
- return if Feature.enabled?(:skip_pages_deploy_to_legacy_storage, project, default_enabled: :yaml)
-
- # Create temporary directory in which we will extract the artifacts
- make_secure_tmp_dir(tmp_path) do |tmp_path|
- extract_archive!(artifacts_path, tmp_path)
-
- # Check if we did extract public directory
- archive_public_path = File.join(tmp_path, PUBLIC_DIR)
-
- raise InvalidStateError, 'pages miss the public folder' unless Dir.exist?(archive_public_path)
-
- validate_outdated_sha!
-
- deploy_page!(archive_public_path)
- end
- end
-
- def extract_archive!(artifacts_path, temp_path)
- if artifacts.ends_with?('.zip')
- extract_zip_archive!(artifacts_path, temp_path)
- else
- raise InvalidStateError, 'unsupported artifacts format'
- end
- end
-
- def extract_zip_archive!(artifacts_path, temp_path)
- SafeZip::Extract.new(artifacts_path)
- .extract(directories: [PUBLIC_DIR], to: temp_path)
- rescue SafeZip::Extract::Error => e
- raise FailedToExtractError, e.message
- end
-
- def deploy_page!(archive_public_path)
- deployed = try_obtain_lease do
- deploy_page_unsafe!(archive_public_path)
- true
- end
-
- unless deployed
- raise ExclusiveLeaseTaken, "Failed to deploy pages - other deployment is in progress"
- end
- end
-
- def deploy_page_unsafe!(archive_public_path)
- # Do atomic move of pages
- # Move and removal may not be atomic, but they are significantly faster then extracting and removal
- # 1. We move deployed public to previous public path (file removal is slow)
- # 2. We move temporary public to be deployed public
- # 3. We remove previous public path
- FileUtils.mkdir_p(pages_path)
- begin
- FileUtils.move(public_path, previous_public_path)
- rescue StandardError
- end
- FileUtils.move(archive_public_path, public_path)
- ensure
- FileUtils.rm_r(previous_public_path, force: true)
- end
-
def create_pages_deployment(artifacts_path, build)
sha256 = build.job_artifacts_archive.file_sha256
@@ -165,22 +91,6 @@ module Projects
)
end
- def tmp_path
- @tmp_path ||= File.join(::Settings.pages.path, TMP_EXTRACT_PATH)
- end
-
- def pages_path
- @pages_path ||= project.pages_path
- end
-
- def public_path
- @public_path ||= File.join(pages_path, PUBLIC_DIR)
- end
-
- def previous_public_path
- @previous_public_path ||= File.join(pages_path, "#{PUBLIC_DIR}.#{SecureRandom.hex}")
- end
-
def ref
build.ref
end
@@ -216,20 +126,6 @@ module Projects
@pages_deployments_failed_total_counter ||= Gitlab::Metrics.counter(:pages_deployments_failed_total, "Counter of GitLab Pages deployments which failed")
end
- def make_secure_tmp_dir(tmp_path)
- FileUtils.mkdir_p(tmp_path)
- path = Dir.mktmpdir(tmp_dir_prefix, tmp_path)
- begin
- yield(path)
- ensure
- FileUtils.remove_entry_secure(path)
- end
- end
-
- def tmp_dir_prefix
- "project-#{project.id}-build-#{build.id}-"
- end
-
def validate_state!
raise InvalidStateError, 'missing pages artifacts' unless build.artifacts?
raise InvalidStateError, 'missing artifacts metadata' unless build.artifacts_metadata?
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index d6e7f165d72..b87564fcaef 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -105,6 +105,7 @@ module Projects
end
update_pages_config if changing_pages_related_config?
+ update_pending_builds if shared_runners_toggled?
end
def after_rename_service(project)
@@ -178,6 +179,16 @@ module Projects
params[:topic_list] ||= topic_list if topic_list
end
+
+ def update_pending_builds
+ update_params = { instance_runners_enabled: project.shared_runners_enabled }
+
+ ::Ci::UpdatePendingBuildService.new(project, update_params).execute
+ end
+
+ def shared_runners_toggled?
+ project.previous_changes.include?('shared_runners_enabled')
+ end
end
end
diff --git a/app/services/protected_branches/api_service.rb b/app/services/protected_branches/api_service.rb
index 3e5122a1523..d0d0737fd66 100644
--- a/app/services/protected_branches/api_service.rb
+++ b/app/services/protected_branches/api_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module ProtectedBranches
- class ApiService < BaseService
+ class ApiService < ProtectedBranches::BaseService
def create
::ProtectedBranches::CreateService.new(@project, @current_user, protected_branch_params).execute
end
diff --git a/app/services/protected_branches/base_service.rb b/app/services/protected_branches/base_service.rb
new file mode 100644
index 00000000000..f48e02ab4b5
--- /dev/null
+++ b/app/services/protected_branches/base_service.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ProtectedBranches
+ class BaseService < ::BaseService
+ # current_user - The user that performs the action
+ # params - A hash of parameters
+ def initialize(project, current_user = nil, params = {})
+ @project = project
+ @current_user = current_user
+ @params = params
+ end
+
+ def after_execute(*)
+ # overridden in EE::ProtectedBranches module
+ end
+ end
+end
diff --git a/app/services/protected_branches/create_service.rb b/app/services/protected_branches/create_service.rb
index 37083a4a9e4..dada449989a 100644
--- a/app/services/protected_branches/create_service.rb
+++ b/app/services/protected_branches/create_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module ProtectedBranches
- class CreateService < BaseService
+ class CreateService < ProtectedBranches::BaseService
def execute(skip_authorization: false)
raise Gitlab::Access::AccessDeniedError unless skip_authorization || authorized?
diff --git a/app/services/protected_branches/destroy_service.rb b/app/services/protected_branches/destroy_service.rb
index dc177f0ac09..47332ace417 100644
--- a/app/services/protected_branches/destroy_service.rb
+++ b/app/services/protected_branches/destroy_service.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module ProtectedBranches
- class DestroyService < BaseService
+ class DestroyService < ProtectedBranches::BaseService
def execute(protected_branch)
raise Gitlab::Access::AccessDeniedError unless can?(current_user, :destroy_protected_branch, protected_branch)
diff --git a/app/services/protected_branches/update_service.rb b/app/services/protected_branches/update_service.rb
index 1815d92421e..1e70f2d9793 100644
--- a/app/services/protected_branches/update_service.rb
+++ b/app/services/protected_branches/update_service.rb
@@ -1,11 +1,17 @@
# frozen_string_literal: true
module ProtectedBranches
- class UpdateService < BaseService
+ class UpdateService < ProtectedBranches::BaseService
def execute(protected_branch)
raise Gitlab::Access::AccessDeniedError unless can?(current_user, :update_protected_branch, protected_branch)
- protected_branch.update(params)
+ old_merge_access_levels = protected_branch.merge_access_levels.map(&:clone)
+ old_push_access_levels = protected_branch.push_access_levels.map(&:clone)
+
+ if protected_branch.update(params)
+ after_execute(protected_branch: protected_branch, old_merge_access_levels: old_merge_access_levels, old_push_access_levels: old_push_access_levels)
+ end
+
protected_branch
end
end
diff --git a/app/services/repositories/changelog_service.rb b/app/services/repositories/changelog_service.rb
index bac3fdf36da..96db00fbc1b 100644
--- a/app/services/repositories/changelog_service.rb
+++ b/app/services/repositories/changelog_service.rb
@@ -61,7 +61,7 @@ module Repositories
# rubocop: enable Metrics/ParameterLists
def execute
- config = Gitlab::Changelog::Config.from_git(@project)
+ config = Gitlab::Changelog::Config.from_git(@project, @user)
from = start_of_commit_range(config)
# For every entry we want to only include the merge request that
diff --git a/app/services/search/global_service.rb b/app/services/search/global_service.rb
index fe11820fb54..33faf2d6698 100644
--- a/app/services/search/global_service.rb
+++ b/app/services/search/global_service.rb
@@ -24,7 +24,7 @@ module Search
# rubocop: disable CodeReuse/ActiveRecord
def projects
- @projects ||= ProjectsFinder.new(params: { non_archived: true }, current_user: current_user).execute.includes(:topics, :taggings)
+ @projects ||= ProjectsFinder.new(params: { non_archived: true }, current_user: current_user).execute.preload(:topics, :taggings)
end
def allowed_scopes
diff --git a/app/services/service_ping/submit_service.rb b/app/services/service_ping/submit_service.rb
index 09d1670fd1f..3417ce4f583 100644
--- a/app/services/service_ping/submit_service.rb
+++ b/app/services/service_ping/submit_service.rb
@@ -2,8 +2,9 @@
module ServicePing
class SubmitService
- PRODUCTION_URL = 'https://version.gitlab.com/usage_data'
- STAGING_URL = 'https://gitlab-services-version-gitlab-com-staging.gs-staging.gitlab.org/usage_data'
+ PRODUCTION_BASE_URL = 'https://version.gitlab.com'
+ STAGING_BASE_URL = 'https://gitlab-services-version-gitlab-com-staging.gs-staging.gitlab.org'
+ USAGE_DATA_PATH = 'usage_data'
METRICS = %w[leader_issues instance_issues percentage_issues leader_notes instance_notes
percentage_notes leader_milestones instance_milestones percentage_milestones
@@ -41,6 +42,10 @@ module ServicePing
store_metrics(response)
end
+ def url
+ URI.join(base_url, USAGE_DATA_PATH)
+ end
+
private
def submit_payload(usage_data)
@@ -81,12 +86,8 @@ module ServicePing
end
# See https://gitlab.com/gitlab-org/gitlab/-/issues/233615 for details
- def url
- if Rails.env.production?
- PRODUCTION_URL
- else
- STAGING_URL
- end
+ def base_url
+ Rails.env.production? ? PRODUCTION_BASE_URL : STAGING_BASE_URL
end
end
end
diff --git a/app/services/suggestions/apply_service.rb b/app/services/suggestions/apply_service.rb
index 6836700a67d..a0d26e08341 100644
--- a/app/services/suggestions/apply_service.rb
+++ b/app/services/suggestions/apply_service.rb
@@ -33,7 +33,7 @@ module Suggestions
.update_all(commit_id: result[:result], applied: true)
Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter
- .track_apply_suggestion_action(user: current_user)
+ .track_apply_suggestion_action(user: current_user, suggestions: suggestion_set.suggestions)
end
def author
diff --git a/app/services/suggestions/create_service.rb b/app/services/suggestions/create_service.rb
index c5f9fa1eee0..eb98ed57d55 100644
--- a/app/services/suggestions/create_service.rb
+++ b/app/services/suggestions/create_service.rb
@@ -28,7 +28,7 @@ module Suggestions
Gitlab::Database.main.bulk_insert('suggestions', rows) # rubocop:disable Gitlab/BulkInsert
end
- Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter.track_add_suggestion_action(user: @note.author)
+ Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter.track_add_suggestion_action(note: @note)
end
end
end
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 56a6244eebf..e5080718b69 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -331,6 +331,10 @@ module SystemNoteService
::SystemNotes::AlertManagementService.new(noteable: alert, project: alert.project).log_resolving_alert(monitoring_tool)
end
+ def change_issue_type(issue, author)
+ ::SystemNotes::IssuablesService.new(noteable: issue, project: issue.project, author: author).change_issue_type
+ end
+
private
def merge_requests_service(noteable, project, author)
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index ae4f65e785c..62aead352aa 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -380,6 +380,12 @@ module SystemNotes
create_resource_state_event(status: 'closed', close_auto_resolve_prometheus_alert: true)
end
+ def change_issue_type
+ body = "changed issue type to #{noteable.issue_type.humanize(capitalize: false)}"
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'issue_type'))
+ end
+
private
def cross_reference_note_content(gfm_reference)
diff --git a/app/services/todos/destroy/design_service.rb b/app/services/todos/destroy/design_service.rb
new file mode 100644
index 00000000000..a375d659159
--- /dev/null
+++ b/app/services/todos/destroy/design_service.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module Todos
+ module Destroy
+ # Service class for deleting todos that belongs to a deleted/archived design.
+ class DesignService
+ attr_reader :design_ids
+
+ def initialize(design_ids)
+ @design_ids = design_ids
+ end
+
+ def execute
+ todos.delete_all
+ end
+
+ private
+
+ def todos
+ Todo.for_target(deleted_designs.select(:design_id)).for_type(DesignManagement::Design)
+ end
+
+ def deleted_designs
+ DesignManagement::Action.by_design(design_ids).by_event(:deletion)
+ end
+ end
+ end
+end
diff --git a/app/services/users/ban_service.rb b/app/services/users/ban_service.rb
index 88e92ebff9b..959d4be3795 100644
--- a/app/services/users/ban_service.rb
+++ b/app/services/users/ban_service.rb
@@ -8,6 +8,10 @@ module Users
user.ban
end
+ def valid_state?(user)
+ user.active?
+ end
+
def action
:ban
end
diff --git a/app/services/users/banned_user_base_service.rb b/app/services/users/banned_user_base_service.rb
index 16041075941..a582816283a 100644
--- a/app/services/users/banned_user_base_service.rb
+++ b/app/services/users/banned_user_base_service.rb
@@ -8,6 +8,7 @@ module Users
def execute(user)
return permission_error unless allowed?
+ return state_error(user) unless valid_state?(user)
if update_user(user)
log_event(user)
@@ -22,6 +23,10 @@ module Users
attr_reader :current_user
+ def state_error(user)
+ error(_("You cannot %{action} %{state} users." % { action: action.to_s, state: user.state }), :forbidden)
+ end
+
def allowed?
can?(current_user, :admin_all_resources)
end
diff --git a/app/services/users/dismiss_group_callout_service.rb b/app/services/users/dismiss_group_callout_service.rb
new file mode 100644
index 00000000000..8afee6a8187
--- /dev/null
+++ b/app/services/users/dismiss_group_callout_service.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Users
+ class DismissGroupCalloutService < DismissUserCalloutService
+ private
+
+ def callout
+ current_user.find_or_initialize_group_callout(params[:feature_name], params[:group_id])
+ end
+ end
+end
diff --git a/app/services/users/dismiss_user_callout_service.rb b/app/services/users/dismiss_user_callout_service.rb
index f05c44186bb..96f3f3acb57 100644
--- a/app/services/users/dismiss_user_callout_service.rb
+++ b/app/services/users/dismiss_user_callout_service.rb
@@ -3,9 +3,15 @@
module Users
class DismissUserCalloutService < BaseContainerService
def execute
- current_user.find_or_initialize_callout(params[:feature_name]).tap do |callout|
- callout.update(dismissed_at: Time.current) if callout.valid?
+ callout.tap do |record|
+ record.update(dismissed_at: Time.current) if record.valid?
end
end
+
+ private
+
+ def callout
+ current_user.find_or_initialize_callout(params[:feature_name])
+ end
end
end
diff --git a/app/services/users/migrate_to_ghost_user_service.rb b/app/services/users/migrate_to_ghost_user_service.rb
index a471f55e644..515d7821416 100644
--- a/app/services/users/migrate_to_ghost_user_service.rb
+++ b/app/services/users/migrate_to_ghost_user_service.rb
@@ -14,23 +14,30 @@ module Users
def initialize(user)
@user = user
+ @ghost_user = User.ghost
end
def execute
transition = user.block_transition
- user.transaction do
- # Block the user before moving records to prevent a data race.
- # For example, if the user creates an issue after `migrate_issues`
- # runs and before the user is destroyed, the destroy will fail with
- # an exception.
- user.block
+ # Block the user before moving records to prevent a data race.
+ # For example, if the user creates an issue after `migrate_issues`
+ # runs and before the user is destroyed, the destroy will fail with
+ # an exception.
+ user.block
+ begin
+ user.transaction do
+ migrate_records
+ end
+ rescue Exception # rubocop:disable Lint/RescueException
# Reverse the user block if record migration fails
- if !migrate_records_in_transaction && transition
+ if transition
transition.rollback
user.save!
end
+
+ raise
end
user.reset
@@ -38,14 +45,6 @@ module Users
private
- def migrate_records_in_transaction
- user.transaction(requires_new: true) do
- @ghost_user = User.ghost
-
- migrate_records
- end
- end
-
def migrate_records
migrate_issues
migrate_merge_requests
diff --git a/app/services/users/reject_service.rb b/app/services/users/reject_service.rb
index 833c30d9427..459dd81b74d 100644
--- a/app/services/users/reject_service.rb
+++ b/app/services/users/reject_service.rb
@@ -7,8 +7,8 @@ module Users
end
def execute(user)
- return error(_('You are not allowed to reject a user')) unless allowed?
- return error(_('This user does not have a pending request')) unless user.blocked_pending_approval?
+ return error(_('You are not allowed to reject a user'), :forbidden) unless allowed?
+ return error(_('User does not have a pending request'), :conflict) unless user.blocked_pending_approval?
user.delete_async(deleted_by: current_user, params: { hard_delete: true })
@@ -18,7 +18,7 @@ module Users
log_event(user)
- success
+ success(message: 'Success', http_status: :ok)
end
private
diff --git a/app/services/users/unban_service.rb b/app/services/users/unban_service.rb
index 363783cf240..753a02fa752 100644
--- a/app/services/users/unban_service.rb
+++ b/app/services/users/unban_service.rb
@@ -5,7 +5,11 @@ module Users
private
def update_user(user)
- user.activate
+ user.unban
+ end
+
+ def valid_state?(user)
+ user.banned?
end
def action
diff --git a/app/services/wiki_pages/event_create_service.rb b/app/services/wiki_pages/event_create_service.rb
index ebfc2414f9e..1f613bec00e 100644
--- a/app/services/wiki_pages/event_create_service.rb
+++ b/app/services/wiki_pages/event_create_service.rb
@@ -10,11 +10,9 @@ module WikiPages
end
def execute(slug, page, action, event_fingerprint)
- event = Event.transaction do
- wiki_page_meta = WikiPage::Meta.find_or_create(slug, page)
+ wiki_page_meta = WikiPage::Meta.find_or_create(slug, page)
- ::EventCreateService.new.wiki_event(wiki_page_meta, author, action, event_fingerprint)
- end
+ event = ::EventCreateService.new.wiki_event(wiki_page_meta, author, action, event_fingerprint)
ServiceResponse.success(payload: { event: event })
rescue ::EventCreateService::IllegalActionError, ::ActiveRecord::ActiveRecordError => e