diff options
Diffstat (limited to 'app/models')
124 files changed, 1670 insertions, 954 deletions
diff --git a/app/models/ability.rb b/app/models/ability.rb index 6a63a8d46ba..a185448d5ea 100644 --- a/app/models/ability.rb +++ b/app/models/ability.rb @@ -54,6 +54,14 @@ class Ability end end + def feature_flags_readable_by_user(feature_flags, user = nil, filters: {}) + feature_flags = apply_filters_if_needed(feature_flags, user, filters) + + DeclarativePolicy.user_scope do + feature_flags.select { |flag| allowed?(user, :read_feature_flag, flag) } + end + end + def allowed?(user, ability, subject = :global, opts = {}) if subject.is_a?(Hash) opts = subject diff --git a/app/models/alert_management/alert.rb b/app/models/alert_management/alert.rb index 156111ffaf3..679406e68d7 100644 --- a/app/models/alert_management/alert.rb +++ b/app/models/alert_management/alert.rb @@ -210,7 +210,7 @@ module AlertManagement end def self.link_reference_pattern - @link_reference_pattern ||= super("alert_management", /(?<alert>\d+)\/details(\#)?/) + @link_reference_pattern ||= super("alert_management", %r{(?<alert>\d+)/details(\#)?}) end def self.reference_valid?(reference) @@ -225,6 +225,10 @@ module AlertManagement open_statuses.include?(status) end + def open? + self.class.open_status?(status_name) + end + def status_event_for(status) self.class.state_machines[:status].events.transitions_for(self, to: status.to_s.to_sym).first&.event end @@ -248,10 +252,10 @@ module AlertManagement "#{project.to_reference_base(from, full: full)}#{reference}" end - def execute_services - return unless project.has_active_services?(:alert_hooks) + def execute_integrations + return unless project.has_active_integrations?(:alert_hooks) - project.execute_services(hook_data, :alert_hooks) + project.execute_integrations(hook_data, :alert_hooks) end # Representation of the alert's payload. Avoid accessing diff --git a/app/models/alert_management/http_integration.rb b/app/models/alert_management/http_integration.rb index 2caa9a18445..0c3b1679dc3 100644 --- a/app/models/alert_management/http_integration.rb +++ b/app/models/alert_management/http_integration.rb @@ -53,10 +53,6 @@ module AlertManagement endpoint_identifier == LEGACY_IDENTIFIER end - def token_changed? - attribute_changed?(:token) - end - # Blank token assignment triggers token reset def prevent_token_assignment if token.present? && token_changed? diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb index f8047ed9b78..a7140cc0718 100644 --- a/app/models/application_setting.rb +++ b/app/models/application_setting.rb @@ -172,6 +172,11 @@ class ApplicationSetting < ApplicationRecord addressable_url: { enforce_sanitization: true }, if: :gitpod_enabled + validates :mailgun_signing_key, + presence: true, + length: { maximum: 255 }, + if: :mailgun_events_enabled + validates :snowplow_collector_hostname, presence: true, hostname: true, @@ -288,7 +293,7 @@ class ApplicationSetting < ApplicationRecord validates :user_default_internal_regex, js_regex: true, allow_nil: true validates :personal_access_token_prefix, - format: { with: /\A[a-zA-Z0-9_+=\/@:.-]+\z/, + format: { with: %r{\A[a-zA-Z0-9_+=/@:.-]+\z}, message: _("can contain only letters of the Base64 alphabet (RFC4648) with the addition of '@', ':' and '.'") }, length: { maximum: 20, message: _('is too long (maximum is %{count} characters)') }, allow_blank: true @@ -372,6 +377,8 @@ class ApplicationSetting < ApplicationRecord end end + validate :check_valid_runner_registrars + validate :terms_exist, if: :enforce_terms? validates :external_authorization_service_default_label, @@ -550,6 +557,7 @@ class ApplicationSetting < ApplicationRecord attr_encrypted :secret_detection_token_revocation_token, encryption_options_base_32_aes_256_gcm attr_encrypted :cloud_license_auth_token, encryption_options_base_32_aes_256_gcm attr_encrypted :external_pipeline_validation_service_token, encryption_options_base_32_aes_256_gcm + attr_encrypted :mailgun_signing_key, encryption_options_base_32_aes_256_gcm.merge(encode: false) validates :disable_feed_token, inclusion: { in: [true, false], message: _('must be a boolean value') } @@ -582,7 +590,7 @@ class ApplicationSetting < ApplicationRecord end def sourcegraph_url_is_com? - !!(sourcegraph_url =~ /\Ahttps:\/\/(www\.)?sourcegraph\.com/) + !!(sourcegraph_url =~ %r{\Ahttps://(www\.)?sourcegraph\.com}) end def instance_review_permitted? diff --git a/app/models/application_setting/term.rb b/app/models/application_setting/term.rb index bab036f5697..acdd7e4155c 100644 --- a/app/models/application_setting/term.rb +++ b/app/models/application_setting/term.rb @@ -3,12 +3,14 @@ class ApplicationSetting class Term < ApplicationRecord include CacheMarkdownField - has_many :term_agreements + include NullifyIfBlank - validates :terms, presence: true + has_many :term_agreements cache_markdown_field :terms + nullify_if_blank :terms + def self.latest order(:id).last end diff --git a/app/models/application_setting_implementation.rb b/app/models/application_setting_implementation.rb index b613e698471..d7a594af84c 100644 --- a/app/models/application_setting_implementation.rb +++ b/app/models/application_setting_implementation.rb @@ -15,6 +15,7 @@ module ApplicationSettingImplementation # forbidden. FORBIDDEN_KEY_VALUE = KeyRestrictionValidator::FORBIDDEN SUPPORTED_KEY_TYPES = %i[rsa dsa ecdsa ed25519].freeze + VALID_RUNNER_REGISTRAR_TYPES = %w(project group).freeze DEFAULT_PROTECTED_PATHS = [ '/users/password', @@ -103,6 +104,8 @@ module ApplicationSettingImplementation issues_create_limit: 300, local_markdown_version: 0, login_recaptcha_protection_enabled: false, + mailgun_signing_key: nil, + mailgun_events_enabled: false, max_artifacts_size: Settings.artifacts['max_size'], max_attachment_size: Settings.gitlab['max_attachment_size'], max_import_size: 0, @@ -186,6 +189,7 @@ module ApplicationSettingImplementation user_default_external: false, user_default_internal_regex: nil, user_show_add_ssh_key_message: true, + valid_runner_registrars: VALID_RUNNER_REGISTRAR_TYPES, wiki_page_max_content_bytes: 50.megabytes, container_registry_delete_tags_service_timeout: 250, container_registry_expiration_policies_worker_capacity: 0, @@ -373,6 +377,10 @@ module ApplicationSettingImplementation Settings.gitlab.usage_ping_enabled end + def usage_ping_features_enabled? + usage_ping_enabled? && usage_ping_features_enabled + end + def usage_ping_enabled usage_ping_can_be_configured? && super end @@ -507,6 +515,17 @@ module ApplicationSettingImplementation end end + def check_valid_runner_registrars + valid = valid_runner_registrar_combinations.include?(valid_runner_registrars) + errors.add(:valid_runner_registrars, _("%{value} is not included in the list") % { value: valid_runner_registrars }) unless valid + end + + def valid_runner_registrar_combinations + 0.upto(VALID_RUNNER_REGISTRAR_TYPES.size).flat_map do |n| + VALID_RUNNER_REGISTRAR_TYPES.permutation(n).to_a + end + end + def terms_exist return unless enforce_terms? diff --git a/app/models/audit_event.rb b/app/models/audit_event.rb index 11036b76fc1..f17fff742fe 100644 --- a/app/models/audit_event.rb +++ b/app/models/audit_event.rb @@ -99,7 +99,12 @@ class AuditEvent < ApplicationRecord end def parallel_persist - PARALLEL_PERSISTENCE_COLUMNS.each { |col| self[col] = details[col] } + PARALLEL_PERSISTENCE_COLUMNS.each do |name| + original = self[name] || self.details[name] + next unless original + + self[name] = self.details[name] = original + end end end diff --git a/app/models/award_emoji.rb b/app/models/award_emoji.rb index 5ba6100f169..c8f6b9aaedb 100644 --- a/app/models/award_emoji.rb +++ b/app/models/award_emoji.rb @@ -24,8 +24,8 @@ class AwardEmoji < ApplicationRecord scope :named, -> (names) { where(name: names) } scope :awarded_by, -> (users) { where(user: users) } - after_save :expire_etag_cache - after_destroy :expire_etag_cache + after_save :expire_cache + after_destroy :expire_cache class << self def votes_for_collection(ids, type) @@ -60,7 +60,9 @@ class AwardEmoji < ApplicationRecord self.name == UPVOTE_NAME end - def expire_etag_cache + def expire_cache + awardable.try(:bump_updated_at) awardable.try(:expire_etag_cache) + awardable.try(:update_upvotes_count) if upvote? end end diff --git a/app/models/blob.rb b/app/models/blob.rb index 2185233a1ac..5731d38abe4 100644 --- a/app/models/blob.rb +++ b/app/models/blob.rb @@ -27,6 +27,7 @@ class Blob < SimpleDelegator # type. LFS pointers to `.stl` files are assumed to always be the binary kind, # and use the `BinarySTL` viewer. RICH_VIEWERS = [ + BlobViewer::CSV, BlobViewer::Markup, BlobViewer::Notebook, BlobViewer::SVG, diff --git a/app/models/blob_viewer/csv.rb b/app/models/blob_viewer/csv.rb new file mode 100644 index 00000000000..633e3bd63d8 --- /dev/null +++ b/app/models/blob_viewer/csv.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +module BlobViewer + class CSV < Base + include Rich + include ClientSide + + self.binary = false + self.extensions = %w(csv) + self.partial_name = 'csv' + self.switcher_icon = 'table' + end +end diff --git a/app/models/blob_viewer/go_mod.rb b/app/models/blob_viewer/go_mod.rb index ae57e2c0526..d4d117f899c 100644 --- a/app/models/blob_viewer/go_mod.rb +++ b/app/models/blob_viewer/go_mod.rb @@ -5,13 +5,13 @@ module BlobViewer include ServerSide include Gitlab::Utils::StrongMemoize - MODULE_REGEX = / + MODULE_REGEX = %r{ \A (?# beginning of file) module\s+ (?# module directive) (?<name>.*?) (?# module name) - \s*(?:\/\/.*)? (?# comment) + \s*(?://.*)? (?# comment) (?:\n|\z) (?# newline or end of file) - /x.freeze + }x.freeze self.file_types = %i(go_mod go_sum) diff --git a/app/models/blob_viewer/markup.rb b/app/models/blob_viewer/markup.rb index 37a8e01d0f1..6f002a6b224 100644 --- a/app/models/blob_viewer/markup.rb +++ b/app/models/blob_viewer/markup.rb @@ -14,9 +14,7 @@ module BlobViewer {}.tap do |h| h[:rendered] = blob.rendered_markup if blob.respond_to?(:rendered_markup) - if Feature.enabled?(:cached_markdown_blob, blob.project, default_enabled: true) - h[:cache_key] = ['blob', blob.id, 'commit', blob.commit_id] - end + h[:cache_key] = ['blob', blob.id, 'commit', blob.commit_id] end end end diff --git a/app/models/bulk_import.rb b/app/models/bulk_import.rb index 5d646313423..dee55675304 100644 --- a/app/models/bulk_import.rb +++ b/app/models/bulk_import.rb @@ -4,6 +4,8 @@ # projects to a GitLab instance. It associates the import with the responsible # user. class BulkImport < ApplicationRecord + MINIMUM_GITLAB_MAJOR_VERSION = 14 + belongs_to :user, optional: false has_one :configuration, class_name: 'BulkImports::Configuration' @@ -31,4 +33,8 @@ class BulkImport < ApplicationRecord transition any => :failed end end + + def self.all_human_statuses + state_machine.states.map(&:human_name) + end end diff --git a/app/models/bulk_imports/entity.rb b/app/models/bulk_imports/entity.rb index bb543b39a79..24f86b44841 100644 --- a/app/models/bulk_imports/entity.rb +++ b/app/models/bulk_imports/entity.rb @@ -48,6 +48,8 @@ class BulkImports::Entity < ApplicationRecord enum source_type: { group_entity: 0, project_entity: 1 } + scope :by_user_id, ->(user_id) { joins(:bulk_import).where(bulk_imports: { user_id: user_id }) } + state_machine :status, initial: :created do state :created, value: 0 state :started, value: 1 @@ -68,6 +70,10 @@ class BulkImports::Entity < ApplicationRecord end end + def self.all_human_statuses + state_machine.states.map(&:human_name) + end + def encoded_source_full_path ERB::Util.url_encode(source_full_path) end diff --git a/app/models/bulk_imports/export_status.rb b/app/models/bulk_imports/export_status.rb index 98804d18f27..ff165830cf1 100644 --- a/app/models/bulk_imports/export_status.rb +++ b/app/models/bulk_imports/export_status.rb @@ -9,7 +9,7 @@ module BulkImports @relation = relation @entity = @pipeline_tracker.entity @configuration = @entity.bulk_import.configuration - @client = Clients::HTTP.new(uri: @configuration.url, token: @configuration.access_token) + @client = Clients::HTTP.new(url: @configuration.url, token: @configuration.access_token) end def started? diff --git a/app/models/bulk_imports/file_transfer/base_config.rb b/app/models/bulk_imports/file_transfer/base_config.rb index 7396f9d3655..ddea7c3f64c 100644 --- a/app/models/bulk_imports/file_transfer/base_config.rb +++ b/app/models/bulk_imports/file_transfer/base_config.rb @@ -30,7 +30,7 @@ module BulkImports end def portable_relations - import_export_config.dig(:tree, portable_class_sym).keys.map(&:to_s) + import_export_config.dig(:tree, portable_class_sym).keys.map(&:to_s) - skipped_relations end private @@ -66,6 +66,10 @@ module BulkImports def base_export_path raise NotImplementedError end + + def skipped_relations + [] + end end end end diff --git a/app/models/bulk_imports/file_transfer/group_config.rb b/app/models/bulk_imports/file_transfer/group_config.rb index 1f845b387b8..2266cbb484f 100644 --- a/app/models/bulk_imports/file_transfer/group_config.rb +++ b/app/models/bulk_imports/file_transfer/group_config.rb @@ -10,6 +10,10 @@ module BulkImports def import_export_yaml ::Gitlab::ImportExport.group_config_file end + + def skipped_relations + @skipped_relations ||= %w(members) + end end end end diff --git a/app/models/bulk_imports/file_transfer/project_config.rb b/app/models/bulk_imports/file_transfer/project_config.rb index e42b5bfce3d..8a57f51c1c5 100644 --- a/app/models/bulk_imports/file_transfer/project_config.rb +++ b/app/models/bulk_imports/file_transfer/project_config.rb @@ -10,6 +10,10 @@ module BulkImports def import_export_yaml ::Gitlab::ImportExport.config_file end + + def skipped_relations + @skipped_relations ||= %w(project_members group_members) + end end end end diff --git a/app/models/ci/base_model.rb b/app/models/ci/base_model.rb new file mode 100644 index 00000000000..8fb752ead1d --- /dev/null +++ b/app/models/ci/base_model.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +module Ci + # TODO: https://gitlab.com/groups/gitlab-org/-/epics/6168 + # + # Do not use this yet outside of `ci_instance_variables`. + # This class is part of a migration to move all CI classes to a new separate database. + # Initially we are only going to be moving the `Ci::InstanceVariable` model and it will be duplicated in the main and CI tables + # Do not extend this class in any other models. + class BaseModel < ::ApplicationRecord + self.abstract_class = true + + if Gitlab::Database.has_config?(:ci) + connects_to database: { writing: :ci, reading: :ci } + end + end +end diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb index fdfffd9b0cd..4328f3f7a4b 100644 --- a/app/models/ci/build.rb +++ b/app/models/ci/build.rb @@ -11,7 +11,6 @@ module Ci include Importable include Ci::HasRef include IgnorableColumns - include TaggableQueries BuildArchivedError = Class.new(StandardError) @@ -136,6 +135,7 @@ module Ci scope :eager_load_job_artifacts, -> { includes(:job_artifacts) } scope :eager_load_job_artifacts_archive, -> { includes(:job_artifacts_archive) } + scope :eager_load_tags, -> { includes(:tags) } scope :eager_load_everything, -> do includes( @@ -178,25 +178,6 @@ module Ci joins(:metadata).where("ci_builds_metadata.config_options -> 'artifacts' -> 'reports' ?| array[:job_types]", job_types: job_types) end - scope :matches_tag_ids, -> (tag_ids) do - matcher = ::ActsAsTaggableOn::Tagging - .where(taggable_type: CommitStatus.name) - .where(context: 'tags') - .where('taggable_id = ci_builds.id') - .where.not(tag_id: tag_ids).select('1') - - where("NOT EXISTS (?)", matcher) - end - - scope :with_any_tags, -> do - matcher = ::ActsAsTaggableOn::Tagging - .where(taggable_type: CommitStatus.name) - .where(context: 'tags') - .where('taggable_id = ci_builds.id').select('1') - - where("EXISTS (?)", matcher) - end - scope :queued_before, ->(time) { where(arel_table[:queued_at].lt(time)) } scope :preload_project_and_pipeline_project, -> do @@ -212,7 +193,7 @@ module Ci acts_as_taggable - add_authentication_token_field :token, encrypted: :optional + add_authentication_token_field :token, encrypted: :required before_save :ensure_token before_destroy { unscoped_project } @@ -344,7 +325,11 @@ module Ci build.run_after_commit do build.run_status_commit_hooks! - BuildFinishedWorker.perform_async(id) + if Feature.enabled?(:ci_build_finished_worker_namespace_changed, build.project, default_enabled: :yaml) + Ci::BuildFinishedWorker.perform_async(id) + else + ::BuildFinishedWorker.perform_async(id) + end end end @@ -758,6 +743,14 @@ module Ci self.token && ActiveSupport::SecurityUtils.secure_compare(token, self.token) end + def tag_list + if tags.loaded? + tags.map(&:name) + else + super + end + end + def has_tags? tag_list.any? end @@ -782,7 +775,7 @@ module Ci return unless project project.execute_hooks(build_data.dup, :job_hooks) if project.has_active_hooks?(:job_hooks) - project.execute_services(build_data.dup, :job_hooks) if project.has_active_services?(:job_hooks) + project.execute_integrations(build_data.dup, :job_hooks) if project.has_active_integrations?(:job_hooks) end def browsable_artifacts? @@ -939,8 +932,7 @@ module Ci end def supports_artifacts_exclude? - options&.dig(:artifacts, :exclude)&.any? && - Gitlab::Ci::Features.artifacts_exclude_enabled? + options&.dig(:artifacts, :exclude)&.any? end def multi_build_steps? diff --git a/app/models/ci/build_dependencies.rb b/app/models/ci/build_dependencies.rb index d39e0411a79..c4a04d42a1e 100644 --- a/app/models/ci/build_dependencies.rb +++ b/app/models/ci/build_dependencies.rb @@ -37,12 +37,20 @@ module Ci next [] unless processable.pipeline_id # we don't have any dependency when creating the pipeline deps = model_class.where(pipeline_id: processable.pipeline_id).latest - deps = from_previous_stages(deps) - deps = from_needs(deps) + deps = find_dependencies(processable, deps) + from_dependencies(deps).to_a end end + def find_dependencies(processable, deps) + if processable.scheduling_type_dag? + from_needs(deps) + else + from_previous_stages(deps) + end + end + # Dependencies from the same parent-pipeline hierarchy excluding # the current job's pipeline def cross_pipeline @@ -125,8 +133,6 @@ module Ci end def from_needs(scope) - return scope unless processable.scheduling_type_dag? - needs_names = processable.needs.artifacts.select(:name) scope.where(name: needs_names) end diff --git a/app/models/ci/build_metadata.rb b/app/models/ci/build_metadata.rb index f009e4c6aa1..50775f578f0 100644 --- a/app/models/ci/build_metadata.rb +++ b/app/models/ci/build_metadata.rb @@ -22,8 +22,8 @@ module Ci validates :build, presence: true validates :secrets, json_schema: { filename: 'build_metadata_secrets' } - serialize :config_options, Serializers::Json # rubocop:disable Cop/ActiveRecordSerialize - serialize :config_variables, Serializers::Json # rubocop:disable Cop/ActiveRecordSerialize + serialize :config_options, Serializers::SymbolizedJson # rubocop:disable Cop/ActiveRecordSerialize + serialize :config_variables, Serializers::SymbolizedJson # rubocop:disable Cop/ActiveRecordSerialize chronic_duration_attr_reader :timeout_human_readable, :timeout diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 25f4a06088d..3fa9a484b0c 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -14,13 +14,7 @@ module Ci belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id - default_value_for :data_store do - if Feature.enabled?(:dedicated_redis_trace_chunks, type: :ops) - :redis_trace_chunks - else - :redis - end - end + default_value_for :data_store, :redis_trace_chunks after_create { metrics.increment_trace_operation(operation: :chunked) } @@ -115,7 +109,7 @@ module Ci raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 return if offset == size # Skip the following process as it doesn't affect anything - self.append("", offset) + self.append(+"", offset) end def append(new_data, offset) diff --git a/app/models/ci/build_trace_chunks/fog.rb b/app/models/ci/build_trace_chunks/fog.rb index fab85fae33d..3bfac2b33c0 100644 --- a/app/models/ci/build_trace_chunks/fog.rb +++ b/app/models/ci/build_trace_chunks/fog.rb @@ -25,14 +25,36 @@ module Ci files.create(create_attributes(model, new_data)) end + # This is the sequence that causes append_data to be called: + # + # 1. Runner sends a PUT /api/v4/jobs/:id to indicate the job is canceled or finished. + # 2. UpdateBuildStateService#accept_build_state! persists all live job logs to object storage (or filesystem). + # 3. UpdateBuildStateService#accept_build_state! returns a 202 to the runner. + # 4. The runner continues to send PATCH requests with job logs until all logs have been sent and received. + # 5. If the last PATCH request arrives after the job log has been persisted, we + # retrieve the data from object storage to append the remaining lines. def append_data(model, new_data, offset) if offset > 0 truncated_data = data(model).to_s.byteslice(0, offset) - new_data = truncated_data + new_data + new_data = append_strings(truncated_data, new_data) end set_data(model, new_data) new_data.bytesize + rescue Encoding::CompatibilityError => e + Gitlab::ErrorTracking.track_and_raise_exception( + e, + build_id: model.build_id, + chunk_index: model.chunk_index, + chunk_start_offset: model.start_offset, + chunk_end_offset: model.end_offset, + chunk_size: model.size, + chunk_data_store: model.data_store, + offset: offset, + old_data_encoding: truncated_data.encoding.to_s, + new_data: new_data, + new_data_size: new_data.bytesize, + new_data_encoding: new_data.encoding.to_s) end def size(model) @@ -57,6 +79,17 @@ module Ci private + def append_strings(old_data, new_data) + if Feature.enabled?(:ci_job_trace_force_encode, default_enabled: :yaml) + # When object storage is in use, old_data may be retrieved in UTF-8. + old_data = old_data.force_encoding(Encoding::ASCII_8BIT) + # new_data should already be in ASCII-8BIT, but just in case it isn't, do this. + new_data = new_data.force_encoding(Encoding::ASCII_8BIT) + end + + old_data + new_data + end + def key(model) key_raw(model.build_id, model.chunk_index) end diff --git a/app/models/ci/group.rb b/app/models/ci/group.rb index 47b91fcf2ce..e5cb2026503 100644 --- a/app/models/ci/group.rb +++ b/app/models/ci/group.rb @@ -10,6 +10,7 @@ module Ci class Group include StaticModel include Gitlab::Utils::StrongMemoize + include GlobalID::Identification attr_reader :project, :stage, :name, :jobs @@ -22,6 +23,10 @@ module Ci @jobs = jobs end + def id + "#{stage.id}-#{name}" + end + def ==(other) other.present? && other.is_a?(self.class) && project == other.project && diff --git a/app/models/ci/instance_variable.rb b/app/models/ci/instance_variable.rb index e083caa8751..5aee4c924af 100644 --- a/app/models/ci/instance_variable.rb +++ b/app/models/ci/instance_variable.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true module Ci - class InstanceVariable < ApplicationRecord + class InstanceVariable < ::Ci::BaseModel extend Gitlab::Ci::Model extend Gitlab::ProcessMemoryCache::Helper include Ci::NewHasVariable diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb index 6a7a2b3f6bd..46c976d5616 100644 --- a/app/models/ci/job_artifact.rb +++ b/app/models/ci/job_artifact.rb @@ -33,6 +33,7 @@ module Ci secret_detection: 'gl-secret-detection-report.json', dependency_scanning: 'gl-dependency-scanning-report.json', container_scanning: 'gl-container-scanning-report.json', + cluster_image_scanning: 'gl-cluster-image-scanning-report.json', dast: 'gl-dast-report.json', license_scanning: 'gl-license-scanning-report.json', performance: 'performance.json', @@ -71,6 +72,7 @@ module Ci secret_detection: :raw, dependency_scanning: :raw, container_scanning: :raw, + cluster_image_scanning: :raw, dast: :raw, license_scanning: :raw, @@ -108,6 +110,7 @@ module Ci sast secret_detection requirements + cluster_image_scanning ].freeze TYPE_AND_FORMAT_PAIRS = INTERNAL_TYPES.merge(REPORT_TYPES).freeze @@ -181,6 +184,8 @@ module Ci scope :with_destroy_preloads, -> { includes(project: [:route, :statistics]) } scope :scoped_project, -> { where('ci_job_artifacts.project_id = projects.id') } + scope :for_project, ->(project) { where(project_id: project) } + scope :created_in_time_range, ->(from: nil, to: nil) { where(created_at: from..to) } delegate :filename, :exists?, :open, to: :file @@ -210,7 +215,8 @@ module Ci coverage_fuzzing: 23, ## EE-specific browser_performance: 24, ## EE-specific load_performance: 25, ## EE-specific - api_fuzzing: 26 ## EE-specific + api_fuzzing: 26, ## EE-specific + cluster_image_scanning: 27 ## EE-specific } # `file_location` indicates where actual files are stored. diff --git a/app/models/ci/job_token/project_scope_link.rb b/app/models/ci/job_token/project_scope_link.rb index 283ad4a190d..99118f8090b 100644 --- a/app/models/ci/job_token/project_scope_link.rb +++ b/app/models/ci/job_token/project_scope_link.rb @@ -19,6 +19,10 @@ module Ci validates :target_project, presence: true validate :not_self_referential_link + def self.for_source_and_target(source_project, target_project) + self.find_by(source_project: source_project, target_project: target_project) + end + private def not_self_referential_link diff --git a/app/models/ci/pending_build.rb b/app/models/ci/pending_build.rb index b9a8a44bd6b..0663052f51d 100644 --- a/app/models/ci/pending_build.rb +++ b/app/models/ci/pending_build.rb @@ -7,12 +7,52 @@ module Ci belongs_to :project belongs_to :build, class_name: 'Ci::Build' + scope :ref_protected, -> { where(protected: true) } + scope :queued_before, ->(time) { where(arel_table[:created_at].lt(time)) } + def self.upsert_from_build!(build) - entry = self.new(build: build, project: build.project, protected: build.protected?) + entry = self.new(args_from_build(build)) entry.validate! self.upsert(entry.attributes.compact, returning: %w[build_id], unique_by: :build_id) end + + def self.args_from_build(build) + args = { + build: build, + project: build.project, + protected: build.protected? + } + + if Feature.enabled?(:ci_pending_builds_maintain_shared_runners_data, type: :development, default_enabled: :yaml) + args.merge(instance_runners_enabled: shareable?(build)) + else + args + end + end + private_class_method :args_from_build + + def self.shareable?(build) + shared_runner_enabled?(build) && + builds_access_level?(build) && + project_not_removed?(build) + end + private_class_method :shareable? + + def self.shared_runner_enabled?(build) + build.project.shared_runners.exists? + end + private_class_method :shared_runner_enabled? + + def self.project_not_removed?(build) + !build.project.pending_delete? + end + private_class_method :project_not_removed? + + def self.builds_access_level?(build) + build.project.project_feature.builds_access_level.nil? || build.project.project_feature.builds_access_level > 0 + end + private_class_method :builds_access_level? end end diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb index 159d9d10878..5d079f57267 100644 --- a/app/models/ci/pipeline.rb +++ b/app/models/ci/pipeline.rb @@ -29,6 +29,8 @@ module Ci BridgeStatusError = Class.new(StandardError) + paginates_per 15 + sha_attribute :source_sha sha_attribute :target_sha @@ -222,7 +224,7 @@ module Ci end after_transition [:created, :waiting_for_resource, :preparing, :pending, :running] => :success do |pipeline| - # We wait a little bit to ensure that all BuildFinishedWorkers finish first + # We wait a little bit to ensure that all Ci::BuildFinishedWorkers finish first # because this is where some metrics like code coverage is parsed and stored # in CI build records which the daily build metrics worker relies on. pipeline.run_after_commit { Ci::DailyBuildGroupReportResultsWorker.perform_in(10.minutes, pipeline.id) } @@ -577,11 +579,11 @@ module Ci canceled? && auto_canceled_by_id? end - def cancel_running(retries: nil) + def cancel_running(retries: 1) commit_status_relations = [:project, :pipeline] ci_build_relations = [:deployment, :taggings] - retry_optimistic_lock(cancelable_statuses, retries, name: 'ci_pipeline_cancel_running') do |cancelables| + retry_lock(cancelable_statuses, retries, name: 'ci_pipeline_cancel_running') do |cancelables| cancelables.find_in_batches do |batch| ActiveRecord::Associations::Preloader.new.preload(batch, commit_status_relations) ActiveRecord::Associations::Preloader.new.preload(batch.select { |job| job.is_a?(Ci::Build) }, ci_build_relations) @@ -594,7 +596,7 @@ module Ci end end - def auto_cancel_running(pipeline, retries: nil) + def auto_cancel_running(pipeline, retries: 1) update(auto_canceled_by: pipeline) cancel_running(retries: retries) do |job| @@ -610,8 +612,6 @@ module Ci # rubocop: enable CodeReuse/ServiceClass def lazy_ref_commit - return unless ::Gitlab::Ci::Features.pipeline_latest? - BatchLoader.for(ref).batch do |refs, loader| next unless project.repository_exists? @@ -623,11 +623,6 @@ module Ci def latest? return false unless git_ref && commit.present? - - unless ::Gitlab::Ci::Features.pipeline_latest? - return project.commit(git_ref) == commit - end - return false if lazy_ref_commit.nil? lazy_ref_commit.id == commit.id @@ -861,7 +856,7 @@ module Ci def execute_hooks project.execute_hooks(pipeline_data, :pipeline_hooks) if project.has_active_hooks?(:pipeline_hooks) - project.execute_services(pipeline_data, :pipeline_hooks) if project.has_active_services?(:pipeline_hooks) + project.execute_integrations(pipeline_data, :pipeline_hooks) if project.has_active_integrations?(:pipeline_hooks) end # All the merge requests for which the current pipeline runs/ran against @@ -911,7 +906,7 @@ module Ci def same_family_pipeline_ids ::Gitlab::Ci::PipelineObjectHierarchy.new( - self.class.default_scoped.where(id: root_ancestor), options: { same_project: true } + self.class.default_scoped.where(id: root_ancestor), options: { project_condition: :same } ).base_and_descendants.select(:id) end @@ -932,29 +927,34 @@ module Ci Environment.where(id: environment_ids) end - # Without using `unscoped`, caller scope is also included into the query. - # Using `unscoped` here will be redundant after Rails 6.1 + # With multi-project and parent-child pipelines + def self_and_upstreams + object_hierarchy.base_and_ancestors + end + + # With multi-project and parent-child pipelines + def self_with_upstreams_and_downstreams + object_hierarchy.all_objects + end + + # With only parent-child pipelines + def self_and_ancestors + object_hierarchy(project_condition: :same).base_and_ancestors + end + + # With only parent-child pipelines def self_and_descendants - ::Gitlab::Ci::PipelineObjectHierarchy - .new(self.class.unscoped.where(id: id), options: { same_project: true }) - .base_and_descendants + object_hierarchy(project_condition: :same).base_and_descendants end def root_ancestor return self unless child? - Gitlab::Ci::PipelineObjectHierarchy - .new(self.class.unscoped.where(id: id), options: { same_project: true }) + object_hierarchy(project_condition: :same) .base_and_ancestors(hierarchy_order: :desc) .first end - def self_with_ancestors_and_descendants(same_project: false) - ::Gitlab::Ci::PipelineObjectHierarchy - .new(self.class.unscoped.where(id: id), options: { same_project: same_project }) - .all_objects - end - def bridge_triggered? source_bridge.present? end @@ -1026,8 +1026,6 @@ module Ci end def can_generate_codequality_reports? - return false unless ::Gitlab::Ci::Features.display_quality_on_mr_diff?(project) - has_reports?(Ci::JobArtifact.codequality_reports) end @@ -1214,14 +1212,6 @@ module Ci self.ci_ref = Ci::Ref.ensure_for(self) end - def base_and_ancestors(same_project: false) - # Without using `unscoped`, caller scope is also included into the query. - # Using `unscoped` here will be redundant after Rails 6.1 - ::Gitlab::Ci::PipelineObjectHierarchy - .new(self.class.unscoped.where(id: id), options: { same_project: same_project }) - .base_and_ancestors - end - # We need `base_and_ancestors` in a specific order to "break" when needed. # If we use `find_each`, then the order is broken. # rubocop:disable Rails/FindEach @@ -1232,7 +1222,7 @@ module Ci source_bridge.pending! Ci::AfterRequeueJobService.new(project, current_user).execute(source_bridge) # rubocop:disable CodeReuse/ServiceClass else - base_and_ancestors.includes(:source_bridge).each do |pipeline| + self_and_upstreams.includes(:source_bridge).each do |pipeline| break unless pipeline.bridge_waiting? pipeline.source_bridge.pending! @@ -1315,6 +1305,13 @@ module Ci project.repository.keep_around(self.sha, self.before_sha) end + + # Without using `unscoped`, caller scope is also included into the query. + # Using `unscoped` here will be redundant after Rails 6.1 + def object_hierarchy(options = {}) + ::Gitlab::Ci::PipelineObjectHierarchy + .new(self.class.unscoped.where(id: id), options: options) + end end end diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb index 71110ef0696..a541dca47de 100644 --- a/app/models/ci/runner.rb +++ b/app/models/ci/runner.rb @@ -13,7 +13,7 @@ module Ci include Gitlab::Utils::StrongMemoize include TaggableQueries - add_authentication_token_field :token, encrypted: -> { Feature.enabled?(:ci_runners_tokens_optional_encryption, default_enabled: true) ? :optional : :required } + add_authentication_token_field :token, encrypted: :optional enum access_level: { not_protected: 0, @@ -214,15 +214,15 @@ module Ci Arel.sql("(#{arel_tag_names_array.to_sql})") ] - # we use distinct to de-duplicate data - distinct.pluck(*unique_params).map do |values| + group(*unique_params).pluck('array_agg(ci_runners.id)', *unique_params).map do |values| Gitlab::Ci::Matching::RunnerMatcher.new({ - runner_type: values[0], - public_projects_minutes_cost_factor: values[1], - private_projects_minutes_cost_factor: values[2], - run_untagged: values[3], - access_level: values[4], - tag_list: values[5] + runner_ids: values[0], + runner_type: values[1], + public_projects_minutes_cost_factor: values[2], + private_projects_minutes_cost_factor: values[3], + run_untagged: values[4], + access_level: values[5], + tag_list: values[6] }) end end @@ -230,6 +230,7 @@ module Ci def runner_matcher strong_memoize(:runner_matcher) do Gitlab::Ci::Matching::RunnerMatcher.new({ + runner_ids: [id], runner_type: runner_type, public_projects_minutes_cost_factor: public_projects_minutes_cost_factor, private_projects_minutes_cost_factor: private_projects_minutes_cost_factor, diff --git a/app/models/clusters/applications/runner.rb b/app/models/clusters/applications/runner.rb index 49840e3a2e7..3785023c9af 100644 --- a/app/models/clusters/applications/runner.rb +++ b/app/models/clusters/applications/runner.rb @@ -3,7 +3,7 @@ module Clusters module Applications class Runner < ApplicationRecord - VERSION = '0.29.0' + VERSION = '0.30.0' self.table_name = 'clusters_applications_runners' diff --git a/app/models/clusters/integrations/prometheus.rb b/app/models/clusters/integrations/prometheus.rb index 0a01ac5d1ce..3f2c47d48e6 100644 --- a/app/models/clusters/integrations/prometheus.rb +++ b/app/models/clusters/integrations/prometheus.rb @@ -23,7 +23,7 @@ module Clusters after_destroy do run_after_commit do - deactivate_project_services + deactivate_project_integrations end end @@ -32,9 +32,9 @@ module Clusters run_after_commit do if enabled - activate_project_services + activate_project_integrations else - deactivate_project_services + deactivate_project_integrations end end end @@ -45,14 +45,14 @@ module Clusters private - def activate_project_services + def activate_project_integrations ::Clusters::Applications::ActivateServiceWorker - .perform_async(cluster_id, ::PrometheusService.to_param) # rubocop:disable CodeReuse/ServiceClass + .perform_async(cluster_id, ::Integrations::Prometheus.to_param) end - def deactivate_project_services + def deactivate_project_integrations ::Clusters::Applications::DeactivateServiceWorker - .perform_async(cluster_id, ::PrometheusService.to_param) # rubocop:disable CodeReuse/ServiceClass + .perform_async(cluster_id, ::Integrations::Prometheus.to_param) end end end diff --git a/app/models/commit_status.rb b/app/models/commit_status.rb index 2db606898b9..cf23cd3be67 100644 --- a/app/models/commit_status.rb +++ b/app/models/commit_status.rb @@ -7,6 +7,7 @@ class CommitStatus < ApplicationRecord include Presentable include EnumWithNil include BulkInsertableAssociations + include TaggableQueries self.table_name = 'ci_builds' diff --git a/app/models/compare.rb b/app/models/compare.rb index 9b214171f07..2eaaf98c260 100644 --- a/app/models/compare.rb +++ b/app/models/compare.rb @@ -25,6 +25,10 @@ class Compare @straight = straight end + def cache_key + [@project, :compare, diff_refs.hash] + end + def commits @commits ||= Commit.decorate(@compare.commits, project) end diff --git a/app/models/concerns/analytics/cycle_analytics/stage.rb b/app/models/concerns/analytics/cycle_analytics/stage.rb index 90d48aa81d0..2a0274f5706 100644 --- a/app/models/concerns/analytics/cycle_analytics/stage.rb +++ b/app/models/concerns/analytics/cycle_analytics/stage.rb @@ -50,6 +50,10 @@ module Analytics end end + def events_hash_code + Digest::SHA256.hexdigest("#{start_event.hash_code}-#{end_event.hash_code}") + end + def start_event_label_based? start_event_identifier && start_event.label_based? end diff --git a/app/models/concerns/any_field_validation.rb b/app/models/concerns/any_field_validation.rb new file mode 100644 index 00000000000..987c4e7800e --- /dev/null +++ b/app/models/concerns/any_field_validation.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +# This module enables a record to be valid if any field is present +# +# Overwrite one_of_required_fields to set one of which fields must be present +module AnyFieldValidation + extend ActiveSupport::Concern + + included do + validate :any_field_present + end + + private + + def any_field_present + return unless one_of_required_fields.all? { |field| self[field].blank? } + + errors.add(:base, _("At least one field of %{one_of_required_fields} must be present") % + { one_of_required_fields: one_of_required_fields }) + end + + def one_of_required_fields + raise NotImplementedError + end +end diff --git a/app/models/concerns/approvable_base.rb b/app/models/concerns/approvable_base.rb index c2d94b50f8d..ef7ba7b1089 100644 --- a/app/models/concerns/approvable_base.rb +++ b/app/models/concerns/approvable_base.rb @@ -24,6 +24,19 @@ module ApprovableBase .group(:id) .having("COUNT(users.id) = ?", usernames.size) end + + scope :not_approved_by_users_with_usernames, -> (usernames) do + users = User.where(username: usernames).select(:id) + self_table = self.arel_table + app_table = Approval.arel_table + + where( + Approval.where(approvals: { user_id: users }) + .where(app_table[:merge_request_id].eq(self_table[:id])) + .select('true') + .arel.exists.not + ) + end end class_methods do diff --git a/app/models/concerns/atomic_internal_id.rb b/app/models/concerns/atomic_internal_id.rb index 80cf6260b0b..88f577c3e23 100644 --- a/app/models/concerns/atomic_internal_id.rb +++ b/app/models/concerns/atomic_internal_id.rb @@ -159,9 +159,8 @@ module AtomicInternalId # Defines class methods: # # - with_{scope}_{column}_supply - # This method can be used to allocate a block of IID values during - # bulk operations (importing/copying, etc). This can be more efficient - # than creating instances one-by-one. + # This method can be used to allocate a stream of IID values during + # bulk operations (importing/copying, etc). # # Pass in a block that receives a `Supply` instance. To allocate a new # IID value, call `Supply#next_value`. @@ -181,14 +180,8 @@ module AtomicInternalId scope_attrs = ::AtomicInternalId.scope_attrs(scope_value) usage = ::AtomicInternalId.scope_usage(self) - generator = InternalId::InternalIdGenerator.new(subject, scope_attrs, usage, init) - - generator.with_lock do - supply = Supply.new(generator.record.last_value) - block.call(supply) - ensure - generator.track_greatest(supply.current_value) if supply - end + supply = Supply.new(-> { InternalId.generate_next(subject, scope_attrs, usage, init) }) + block.call(supply) end end end @@ -236,14 +229,14 @@ module AtomicInternalId end class Supply - attr_reader :current_value + attr_reader :generator - def initialize(start_value) - @current_value = start_value + def initialize(generator) + @generator = generator end def next_value - @current_value += 1 + @generator.call end end end diff --git a/app/models/concerns/avatarable.rb b/app/models/concerns/avatarable.rb index fdc418029be..84a74386ff7 100644 --- a/app/models/concerns/avatarable.rb +++ b/app/models/concerns/avatarable.rb @@ -9,13 +9,18 @@ module Avatarable ALLOWED_IMAGE_SCALER_WIDTHS = (USER_AVATAR_SIZES | PROJECT_AVATAR_SIZES | GROUP_AVATAR_SIZES).freeze + # This value must not be bigger than then: https://gitlab.com/gitlab-org/gitlab/-/blob/master/workhorse/config.toml.example#L20 + # + # https://docs.gitlab.com/ee/development/image_scaling.html + MAXIMUM_FILE_SIZE = 200.kilobytes.to_i + included do prepend ShadowMethods include ObjectStorage::BackgroundMove include Gitlab::Utils::StrongMemoize validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? } - validates :avatar, file_size: { maximum: 200.kilobytes.to_i }, if: :avatar_changed? + validates :avatar, file_size: { maximum: MAXIMUM_FILE_SIZE }, if: :avatar_changed? mount_uploader :avatar, AvatarUploader diff --git a/app/models/concerns/cache_markdown_field.rb b/app/models/concerns/cache_markdown_field.rb index 101bff32dfe..79b622c8dad 100644 --- a/app/models/concerns/cache_markdown_field.rb +++ b/app/models/concerns/cache_markdown_field.rb @@ -163,9 +163,9 @@ module CacheMarkdownField refs = all_references(self.author) references = {} - references[:mentioned_users_ids] = refs.mentioned_users&.pluck(:id).presence - references[:mentioned_groups_ids] = refs.mentioned_groups&.pluck(:id).presence - references[:mentioned_projects_ids] = refs.mentioned_projects&.pluck(:id).presence + references[:mentioned_users_ids] = refs.mentioned_user_ids.presence + references[:mentioned_groups_ids] = refs.mentioned_group_ids.presence + references[:mentioned_projects_ids] = refs.mentioned_project_ids.presence # One retry is enough as next time `model_user_mention` should return the existing mention record, # that threw the `ActiveRecord::RecordNotUnique` exception in first place. diff --git a/app/models/concerns/cascading_namespace_setting_attribute.rb b/app/models/concerns/cascading_namespace_setting_attribute.rb index 9efd90756b1..5d24e15d518 100644 --- a/app/models/concerns/cascading_namespace_setting_attribute.rb +++ b/app/models/concerns/cascading_namespace_setting_attribute.rb @@ -24,10 +24,6 @@ module CascadingNamespaceSettingAttribute include Gitlab::Utils::StrongMemoize class_methods do - def cascading_settings_feature_enabled? - ::Feature.enabled?(:cascading_namespace_settings, default_enabled: true) - end - private # Facilitates the cascading lookup of values and, @@ -82,8 +78,6 @@ module CascadingNamespaceSettingAttribute def define_attr_reader(attribute) define_method(attribute) do strong_memoize(attribute) do - next self[attribute] unless self.class.cascading_settings_feature_enabled? - next self[attribute] if will_save_change_to_attribute?(attribute) next locked_value(attribute) if cascading_attribute_locked?(attribute, include_self: false) next self[attribute] unless self[attribute].nil? @@ -189,7 +183,6 @@ module CascadingNamespaceSettingAttribute end def locked_ancestor(attribute) - return unless self.class.cascading_settings_feature_enabled? return unless namespace.has_parent? strong_memoize(:"#{attribute}_locked_ancestor") do @@ -202,14 +195,10 @@ module CascadingNamespaceSettingAttribute end def locked_by_ancestor?(attribute) - return false unless self.class.cascading_settings_feature_enabled? - locked_ancestor(attribute).present? end def locked_by_application_setting?(attribute) - return false unless self.class.cascading_settings_feature_enabled? - Gitlab::CurrentSettings.public_send("lock_#{attribute}") # rubocop:disable GitlabSecurity/PublicSend end @@ -241,7 +230,7 @@ module CascadingNamespaceSettingAttribute def namespace_ancestor_ids strong_memoize(:namespace_ancestor_ids) do - namespace.self_and_ancestors(hierarchy_order: :asc).pluck(:id).reject { |id| id == namespace_id } + namespace.ancestor_ids(hierarchy_order: :asc) end end diff --git a/app/models/concerns/ci/maskable.rb b/app/models/concerns/ci/maskable.rb index e1ef4531845..62be0150ee0 100644 --- a/app/models/concerns/ci/maskable.rb +++ b/app/models/concerns/ci/maskable.rb @@ -11,7 +11,7 @@ module Ci # * Minimal length of 8 characters # * Characters must be from the Base64 alphabet (RFC4648) with the addition of '@', ':', '.', and '~' # * Absolutely no fun is allowed - REGEX = /\A[a-zA-Z0-9_+=\/@:.~-]{8,}\z/.freeze + REGEX = %r{\A[a-zA-Z0-9_+=/@:.~-]{8,}\z}.freeze included do validates :masked, inclusion: { in: [true, false] } diff --git a/app/models/concerns/ci/metadatable.rb b/app/models/concerns/ci/metadatable.rb index 601637ea32a..114435d5a21 100644 --- a/app/models/concerns/ci/metadatable.rb +++ b/app/models/concerns/ci/metadatable.rb @@ -77,7 +77,7 @@ module Ci def write_metadata_attribute(legacy_key, metadata_key, value) # save to metadata or this model depending on the state of feature flag - if Feature.enabled?(:ci_build_metadata_config) + if Feature.enabled?(:ci_build_metadata_config, project, default_enabled: :yaml) ensure_metadata.write_attribute(metadata_key, value) write_attribute(legacy_key, nil) else diff --git a/app/models/concerns/enums/ci/commit_status.rb b/app/models/concerns/enums/ci/commit_status.rb index 72788d15c0a..16dec5fb081 100644 --- a/app/models/concerns/enums/ci/commit_status.rb +++ b/app/models/concerns/enums/ci/commit_status.rb @@ -25,6 +25,7 @@ module Enums ci_quota_exceeded: 16, pipeline_loop_detected: 17, no_matching_runner: 18, # not used anymore, but cannot be deleted because of old data + trace_size_exceeded: 19, insufficient_bridge_permissions: 1_001, downstream_bridge_project_not_found: 1_002, invalid_bridge_trigger: 1_003, diff --git a/app/models/concerns/has_integrations.rb b/app/models/concerns/has_integrations.rb index b2775f4cbb2..25650ae56ad 100644 --- a/app/models/concerns/has_integrations.rb +++ b/app/models/concerns/has_integrations.rb @@ -19,7 +19,7 @@ module HasIntegrations def without_integration(integration) integrations = Integration .select('1') - .where('services.project_id = projects.id') + .where("#{Integration.table_name}.project_id = projects.id") .where(type: integration.type) Project diff --git a/app/models/concerns/integrations/has_web_hook.rb b/app/models/concerns/integrations/has_web_hook.rb new file mode 100644 index 00000000000..dabe7152b18 --- /dev/null +++ b/app/models/concerns/integrations/has_web_hook.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module Integrations + module HasWebHook + extend ActiveSupport::Concern + + included do + after_save :update_web_hook!, if: :activated? + end + + # Return the URL to be used for the webhook. + def hook_url + raise NotImplementedError + end + + # Return whether the webhook should use SSL verification. + def hook_ssl_verification + true + end + + # Create or update the webhook, raising an exception if it cannot be saved. + def update_web_hook! + hook = service_hook || build_service_hook + hook.url = hook_url if hook.url != hook_url # avoid reencryption + hook.enable_ssl_verification = hook_ssl_verification + hook.save! if hook.changed? + hook + end + + # Execute the webhook, creating it if necessary. + def execute_web_hook!(*args) + update_web_hook! + service_hook.execute(*args) + end + end +end diff --git a/app/models/concerns/issuable.rb b/app/models/concerns/issuable.rb index 2d06247a486..d5e2e63402f 100644 --- a/app/models/concerns/issuable.rb +++ b/app/models/concerns/issuable.rb @@ -31,6 +31,7 @@ module Issuable TITLE_HTML_LENGTH_MAX = 800 DESCRIPTION_LENGTH_MAX = 1.megabyte DESCRIPTION_HTML_LENGTH_MAX = 5.megabytes + SEARCHABLE_FIELDS = %w(title description).freeze STATE_ID_MAP = { opened: 1, @@ -264,15 +265,16 @@ module Issuable # matched_columns - Modify the scope of the query. 'title', 'description' or joining them with a comma. # # Returns an ActiveRecord::Relation. - def full_search(query, matched_columns: 'title,description', use_minimum_char_limit: true) - allowed_columns = [:title, :description] - matched_columns = matched_columns.to_s.split(',').map(&:to_sym) - matched_columns &= allowed_columns + def full_search(query, matched_columns: nil, use_minimum_char_limit: true) + if matched_columns + matched_columns = matched_columns.to_s.split(',') + matched_columns &= SEARCHABLE_FIELDS + matched_columns.map!(&:to_sym) + end - # Matching title or description if the matched_columns did not contain any allowed columns. - matched_columns = [:title, :description] if matched_columns.empty? + search_columns = matched_columns.presence || [:title, :description] - fuzzy_search(query, matched_columns, use_minimum_char_limit: use_minimum_char_limit) + fuzzy_search(query, search_columns, use_minimum_char_limit: use_minimum_char_limit) end def simple_sorts @@ -330,12 +332,15 @@ module Issuable # When using CTE make sure to select the same columns that are on the group_by clause. # This prevents errors when ignored columns are present in the database. issuable_columns = with_cte ? issue_grouping_columns(use_cte: with_cte) : "#{table_name}.*" + group_columns = issue_grouping_columns(use_cte: with_cte) + ["highest_priorities.label_priority"] - extra_select_columns.unshift("(#{highest_priority}) AS highest_priority") + extra_select_columns.unshift("highest_priorities.label_priority as highest_priority") select(issuable_columns) .select(extra_select_columns) - .group(issue_grouping_columns(use_cte: with_cte)) + .from("#{table_name}") + .joins("JOIN LATERAL(#{highest_priority}) as highest_priorities ON TRUE") + .group(group_columns) .reorder(Gitlab::Database.nulls_last_order('highest_priority', direction)) end @@ -382,7 +387,7 @@ module Issuable if use_cte attribute_names.map { |attr| arel_table[attr.to_sym] } else - arel_table[:id] + [arel_table[:id]] end end @@ -457,6 +462,7 @@ module Issuable if old_associations old_labels = old_associations.fetch(:labels, labels) old_assignees = old_associations.fetch(:assignees, assignees) + old_severity = old_associations.fetch(:severity, severity) if old_labels != labels changes[:labels] = [old_labels.map(&:hook_attrs), labels.map(&:hook_attrs)] @@ -466,6 +472,10 @@ module Issuable changes[:assignees] = [old_assignees.map(&:hook_attrs), assignees.map(&:hook_attrs)] end + if supports_severity? && old_severity != severity + changes[:severity] = [old_severity, severity] + end + if self.respond_to?(:total_time_spent) old_total_time_spent = old_associations.fetch(:total_time_spent, total_time_spent) old_time_change = old_associations.fetch(:time_change, time_change) diff --git a/app/models/concerns/milestoneish.rb b/app/models/concerns/milestoneish.rb index eaf64f2541d..4f2ea58f36d 100644 --- a/app/models/concerns/milestoneish.rb +++ b/app/models/concerns/milestoneish.rb @@ -101,6 +101,10 @@ module Milestoneish due_date && due_date.past? end + def expired + expired? || false + end + def total_time_spent @total_time_spent ||= issues.joins(:timelogs).sum(:time_spent) + merge_requests.joins(:timelogs).sum(:time_spent) end diff --git a/app/models/concerns/partitioned_table.rb b/app/models/concerns/partitioned_table.rb index 9f1cec5d520..eab5d4c35bb 100644 --- a/app/models/concerns/partitioned_table.rb +++ b/app/models/concerns/partitioned_table.rb @@ -10,12 +10,12 @@ module PartitionedTable monthly: Gitlab::Database::Partitioning::MonthlyStrategy }.freeze - def partitioned_by(partitioning_key, strategy:) + def partitioned_by(partitioning_key, strategy:, **kwargs) strategy_class = PARTITIONING_STRATEGIES[strategy.to_sym] || raise(ArgumentError, "Unknown partitioning strategy: #{strategy}") - @partitioning_strategy = strategy_class.new(self, partitioning_key) + @partitioning_strategy = strategy_class.new(self, partitioning_key, **kwargs) - Gitlab::Database::Partitioning::PartitionCreator.register(self) + Gitlab::Database::Partitioning::PartitionManager.register(self) end end end diff --git a/app/models/concerns/sortable.rb b/app/models/concerns/sortable.rb index 9f5e9b2bb57..65fb62a814f 100644 --- a/app/models/concerns/sortable.rb +++ b/app/models/concerns/sortable.rb @@ -46,7 +46,7 @@ module Sortable private def highest_label_priority(target_type_column: nil, target_type: nil, target_column:, project_column:, excluded_labels: []) - query = Label.select(LabelPriority.arel_table[:priority].minimum) + query = Label.select(LabelPriority.arel_table[:priority].minimum.as('label_priority')) .left_join_priorities .joins(:label_links) .where("label_priorities.project_id = #{project_column}") diff --git a/app/models/concerns/taggable_queries.rb b/app/models/concerns/taggable_queries.rb index 2897e5e6420..cba2e93a86d 100644 --- a/app/models/concerns/taggable_queries.rb +++ b/app/models/concerns/taggable_queries.rb @@ -12,5 +12,26 @@ module TaggableQueries .where(taggings: { context: context, taggable_type: polymorphic_name }) .select('COALESCE(array_agg(tags.name ORDER BY name), ARRAY[]::text[])') end + + def matches_tag_ids(tag_ids, table: quoted_table_name, column: 'id') + matcher = ::ActsAsTaggableOn::Tagging + .where(taggable_type: CommitStatus.name) + .where(context: 'tags') + .where("taggable_id = #{connection.quote_table_name(table)}.#{connection.quote_column_name(column)}") # rubocop:disable GitlabSecurity/SqlInjection + .where.not(tag_id: tag_ids) + .select('1') + + where("NOT EXISTS (?)", matcher) + end + + def with_any_tags(table: quoted_table_name, column: 'id') + matcher = ::ActsAsTaggableOn::Tagging + .where(taggable_type: CommitStatus.name) + .where(context: 'tags') + .where("taggable_id = #{connection.quote_table_name(table)}.#{connection.quote_column_name(column)}") # rubocop:disable GitlabSecurity/SqlInjection + .select('1') + + where("EXISTS (?)", matcher) + end end end diff --git a/app/models/container_repository.rb b/app/models/container_repository.rb index 2d28a81f462..8e130998f11 100644 --- a/app/models/container_repository.rb +++ b/app/models/container_repository.rb @@ -24,15 +24,8 @@ class ContainerRepository < ApplicationRecord scope :for_group_and_its_subgroups, ->(group) do project_scope = Project .for_group_and_its_subgroups(group) - - project_scope = - if Feature.enabled?(:read_container_registry_access_level, group, default_enabled: :yaml) - project_scope.with_feature_enabled(:container_registry) - else - project_scope.with_container_registry - end - - project_scope = project_scope.select(:id) + .with_feature_enabled(:container_registry) + .select(:id) joins("INNER JOIN (#{project_scope.to_sql}) projects on projects.id=container_repositories.project_id") end @@ -153,7 +146,8 @@ class ContainerRepository < ApplicationRecord end def self.create_from_path!(path) - build_from_path(path).tap(&:save!) + safe_find_or_create_by!(project: path.repository_project, + name: path.repository_name) end def self.build_root_repository(project) diff --git a/app/models/deployment.rb b/app/models/deployment.rb index 7f5849bffc6..313aeb1eda7 100644 --- a/app/models/deployment.rb +++ b/app/models/deployment.rb @@ -189,7 +189,7 @@ class Deployment < ApplicationRecord def execute_hooks(status_changed_at) deployment_data = Gitlab::DataBuilder::Deployment.build(self, status_changed_at) project.execute_hooks(deployment_data, :deployment_hooks) - project.execute_services(deployment_data, :deployment_hooks) + project.execute_integrations(deployment_data, :deployment_hooks) end def last? diff --git a/app/models/diff_discussion.rb b/app/models/diff_discussion.rb index 6806008d676..642e93f7912 100644 --- a/app/models/diff_discussion.rb +++ b/app/models/diff_discussion.rb @@ -42,6 +42,13 @@ class DiffDiscussion < Discussion ) end + def cache_key + [ + super, + Digest::SHA1.hexdigest(position.to_json) + ].join(':') + end + private def get_params diff --git a/app/models/discussion.rb b/app/models/discussion.rb index 70aa02063cc..076d8cc280c 100644 --- a/app/models/discussion.rb +++ b/app/models/discussion.rb @@ -7,6 +7,9 @@ class Discussion include GlobalID::Identification include ResolvableDiscussion + # Bump this if we need to refresh the cached versions of discussions + CACHE_VERSION = 1 + attr_reader :notes, :context_noteable delegate :created_at, @@ -158,4 +161,19 @@ class Discussion def reply_attributes first_note.slice(:type, :noteable_type, :noteable_id, :commit_id, :discussion_id) end + + def cache_key + # Need this so cache will be invalidated when note within a discussion + # has been deleted. + notes_sha = Digest::SHA1.hexdigest(notes.map(&:id).join(':')) + + [ + CACHE_VERSION, + notes.last.latest_cached_markdown_version, + id, + notes_sha, + notes.max_by(&:updated_at).updated_at, + resolved_at + ].join(':') + end end diff --git a/app/models/error_tracking.rb b/app/models/error_tracking.rb new file mode 100644 index 00000000000..20729330088 --- /dev/null +++ b/app/models/error_tracking.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +module ErrorTracking + def self.table_name_prefix + 'error_tracking_' + end +end diff --git a/app/models/error_tracking/error.rb b/app/models/error_tracking/error.rb new file mode 100644 index 00000000000..012dcc4418f --- /dev/null +++ b/app/models/error_tracking/error.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +class ErrorTracking::Error < ApplicationRecord + belongs_to :project + + has_many :events, class_name: 'ErrorTracking::ErrorEvent' + + validates :project, presence: true + validates :name, presence: true + validates :description, presence: true + validates :actor, presence: true + + def self.report_error(name:, description:, actor:, platform:, timestamp:) + safe_find_or_create_by( + name: name, + description: description, + actor: actor, + platform: platform + ) do |error| + error.update!(last_seen_at: timestamp) + end + end +end diff --git a/app/models/error_tracking/error_event.rb b/app/models/error_tracking/error_event.rb new file mode 100644 index 00000000000..ed14a1bce41 --- /dev/null +++ b/app/models/error_tracking/error_event.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +class ErrorTracking::ErrorEvent < ApplicationRecord + belongs_to :error, counter_cache: :events_count + + validates :payload, json_schema: { filename: 'error_tracking_event_payload' } + + validates :error, presence: true + validates :description, presence: true + validates :occurred_at, presence: true +end diff --git a/app/models/error_tracking/project_error_tracking_setting.rb b/app/models/error_tracking/project_error_tracking_setting.rb index 956b5d6470f..c729b002852 100644 --- a/app/models/error_tracking/project_error_tracking_setting.rb +++ b/app/models/error_tracking/project_error_tracking_setting.rb @@ -24,6 +24,8 @@ module ErrorTracking self.reactive_cache_key = ->(setting) { [setting.class.model_name.singular, setting.project_id] } self.reactive_cache_work_type = :external_dependency + self.table_name = 'project_error_tracking_settings' + belongs_to :project validates :api_url, length: { maximum: 255 }, public_url: { enforce_sanitization: true, ascii_only: true }, allow_nil: true diff --git a/app/models/event.rb b/app/models/event.rb index 5b755736f47..14d20b0d6c4 100644 --- a/app/models/event.rb +++ b/app/models/event.rb @@ -24,15 +24,14 @@ class Event < ApplicationRecord left: 9, # User left project destroyed: 10, expired: 11, # User left project due to expiry - approved: 12, - archived: 13 # Recoverable deletion + approved: 12 ).freeze private_constant :ACTIONS WIKI_ACTIONS = [:created, :updated, :destroyed].freeze - DESIGN_ACTIONS = [:created, :updated, :destroyed, :archived].freeze + DESIGN_ACTIONS = [:created, :updated, :destroyed].freeze TARGET_TYPES = HashWithIndifferentAccess.new( issue: Issue, @@ -390,16 +389,15 @@ class Event < ApplicationRecord read_snippet: %i[personal_snippet_note? project_snippet_note?], read_milestone: %i[milestone?], read_wiki: %i[wiki_page?], - read_design: %i[design_note? design?] + read_design: %i[design_note? design?], + read_note: %i[note?] } end private def permission_object - if note? - note_target - elsif target_id.present? + if target_id.present? target else project @@ -438,8 +436,7 @@ class Event < ApplicationRecord { created: _('uploaded'), updated: _('revised'), - destroyed: _('deleted'), - archived: _('archived') + destroyed: _('deleted') } end diff --git a/app/models/event_collection.rb b/app/models/event_collection.rb index 4768506b8fa..f799377a15f 100644 --- a/app/models/event_collection.rb +++ b/app/models/event_collection.rb @@ -59,6 +59,7 @@ class EventCollection parents_for_lateral = parents.select(:id).to_sql lateral = filtered_events + # Applying the limit here (before we filter (permissions) means we may get less than limit) .limit(limit_for_join_lateral) .where("events.#{parent_column} = parents_for_lateral.id") # rubocop:disable GitlabSecurity/SqlInjection .to_sql diff --git a/app/models/group.rb b/app/models/group.rb index e4127b2b2d4..eefb8d3d16a 100644 --- a/app/models/group.rb +++ b/app/models/group.rb @@ -167,7 +167,7 @@ class Group < Namespace def without_integration(integration) integrations = Integration .select('1') - .where('services.group_id = namespaces.id') + .where("#{Integration.table_name}.group_id = namespaces.id") .where(type: integration.type) where('NOT EXISTS (?)', integrations) @@ -296,7 +296,7 @@ class Group < Namespace end def add_users(users, access_level, current_user: nil, expires_at: nil) - GroupMember.add_users( + Members::Groups::CreatorService.add_users( # rubocop:todo CodeReuse/ServiceClass self, users, access_level, @@ -306,14 +306,13 @@ class Group < Namespace end def add_user(user, access_level, current_user: nil, expires_at: nil, ldap: false) - GroupMember.add_user( - self, - user, - access_level, - current_user: current_user, - expires_at: expires_at, - ldap: ldap - ) + Members::Groups::CreatorService.new(self, # rubocop:todo CodeReuse/ServiceClass + user, + access_level, + current_user: current_user, + expires_at: expires_at, + ldap: ldap) + .execute end def add_guest(user, current_user = nil) @@ -667,7 +666,7 @@ class Group < Namespace # TODO: group hooks https://gitlab.com/gitlab-org/gitlab/-/issues/216904 end - def execute_services(data, hooks_scope) + def execute_integrations(data, hooks_scope) # NOOP # TODO: group hooks https://gitlab.com/gitlab-org/gitlab/-/issues/216904 end diff --git a/app/models/hooks/web_hook_log.rb b/app/models/hooks/web_hook_log.rb index 0c96d5d4b6d..8c0565e4a38 100644 --- a/app/models/hooks/web_hook_log.rb +++ b/app/models/hooks/web_hook_log.rb @@ -9,7 +9,7 @@ class WebHookLog < ApplicationRecord self.primary_key = :id - partitioned_by :created_at, strategy: :monthly + partitioned_by :created_at, strategy: :monthly, retain_for: 3.months belongs_to :web_hook diff --git a/app/models/integration.rb b/app/models/integration.rb index 2fbcdc7f1cb..ea1e3840f6c 100644 --- a/app/models/integration.rb +++ b/app/models/integration.rb @@ -10,18 +10,15 @@ class Integration < ApplicationRecord include FromUnion include EachBatch - # TODO Rename the table: https://gitlab.com/gitlab-org/gitlab/-/issues/201856 - self.table_name = 'services' - INTEGRATION_NAMES = %w[ - asana assembla bamboo bugzilla buildkite campfire confluence custom_issue_tracker discord + asana assembla bamboo bugzilla buildkite campfire confluence custom_issue_tracker datadog discord drone_ci emails_on_push ewm external_wiki flowdock hangouts_chat irker jira mattermost mattermost_slash_commands microsoft_teams packagist pipelines_email pivotaltracker prometheus pushover redmine slack slack_slash_commands teamcity unify_circuit webex_teams youtrack ].freeze PROJECT_SPECIFIC_INTEGRATION_NAMES = %w[ - datadog jenkins + jenkins ].freeze # Fake integrations to help with local development. @@ -38,22 +35,6 @@ class Integration < ApplicationRecord Integrations::BaseSlashCommands ].freeze - # used as part of the renaming effort (https://gitlab.com/groups/gitlab-org/-/epics/2504) - RENAMED_TO_INTEGRATION = %w[ - asana assembla - bamboo bugzilla buildkite - campfire confluence custom_issue_tracker - datadog discord drone_ci - emails_on_push ewm emails_on_push external_wiki - flowdock - hangouts_chat - irker - ].to_set.freeze - - def self.renamed?(name) - RENAMED_TO_INTEGRATION.include?(name) - end - serialize :properties, JSON # rubocop:disable Cop/ActiveRecordSerialize attribute :type, Gitlab::Integrations::StiType.new @@ -99,9 +80,9 @@ class Integration < ApplicationRecord scope :by_active_flag, -> (flag) { where(active: flag) } scope :inherit_from_id, -> (id) { where(inherit_from_id: id) } scope :inherit, -> { where.not(inherit_from_id: nil) } - scope :for_group, -> (group) { where(group_id: group, type: available_services_types(include_project_specific: false)) } - scope :for_template, -> { where(template: true, type: available_services_types(include_project_specific: false)) } - scope :for_instance, -> { where(instance: true, type: available_services_types(include_project_specific: false)) } + scope :for_group, -> (group) { where(group_id: group, type: available_integration_types(include_project_specific: false)) } + scope :for_template, -> { where(template: true, type: available_integration_types(include_project_specific: false)) } + scope :for_instance, -> { where(instance: true, type: available_integration_types(include_project_specific: false)) } scope :push_hooks, -> { where(push_events: true, active: true) } scope :tag_push_hooks, -> { where(tag_push_events: true, active: true) } @@ -156,9 +137,13 @@ class Integration < ApplicationRecord args.each do |arg| class_eval <<~RUBY, __FILE__, __LINE__ + 1 + def #{arg} + Gitlab::Utils.to_boolean(properties['#{arg}']) + end + def #{arg}? # '!!' is used because nil or empty string is converted to nil - !!ActiveRecord::Type::Boolean.new.cast(#{arg}) + !!#{arg} end RUBY end @@ -169,7 +154,7 @@ class Integration < ApplicationRecord end def self.event_names - self.supported_events.map { |event| ServicesHelper.service_event_field_name(event) } + self.supported_events.map { |event| IntegrationsHelper.integration_event_field_name(event) } end def self.supported_event_actions @@ -184,100 +169,92 @@ class Integration < ApplicationRecord 'push' end - def self.event_description(event) - ServicesHelper.service_event_description(event) - end - def self.find_or_create_templates create_nonexistent_templates for_template end def self.create_nonexistent_templates - nonexistent_services = build_nonexistent_services_for(for_template) - return if nonexistent_services.empty? + nonexistent_integrations = build_nonexistent_integrations_for(for_template) + return if nonexistent_integrations.empty? # Create within a transaction to perform the lowest possible SQL queries. transaction do - nonexistent_services.each do |service| - service.template = true - service.save + nonexistent_integrations.each do |integration| + integration.template = true + integration.save end end end private_class_method :create_nonexistent_templates def self.find_or_initialize_non_project_specific_integration(name, instance: false, group_id: nil) - return unless name.in?(available_services_names(include_project_specific: false)) + return unless name.in?(available_integration_names(include_project_specific: false)) integration_name_to_model(name).find_or_initialize_by(instance: instance, group_id: group_id) end def self.find_or_initialize_all_non_project_specific(scope) - scope + build_nonexistent_services_for(scope) + scope + build_nonexistent_integrations_for(scope) end - def self.build_nonexistent_services_for(scope) - nonexistent_services_types_for(scope).map do |service_type| - integration_type_to_model(service_type).new + def self.build_nonexistent_integrations_for(scope) + nonexistent_integration_types_for(scope).map do |type| + integration_type_to_model(type).new end end - private_class_method :build_nonexistent_services_for + private_class_method :build_nonexistent_integrations_for - # Returns a list of service types that do not exist in the given scope. + # Returns a list of integration types that do not exist in the given scope. # Example: ["AsanaService", ...] - def self.nonexistent_services_types_for(scope) + def self.nonexistent_integration_types_for(scope) # Using #map instead of #pluck to save one query count. This is because # ActiveRecord loaded the object here, so we don't need to query again later. - available_services_types(include_project_specific: false) - scope.map(&:type) + available_integration_types(include_project_specific: false) - scope.map(&:type) end - private_class_method :nonexistent_services_types_for + private_class_method :nonexistent_integration_types_for - # Returns a list of available service names. + # Returns a list of available integration names. # Example: ["asana", ...] # @deprecated - def self.available_services_names(include_project_specific: true, include_dev: true) - service_names = services_names - service_names += project_specific_services_names if include_project_specific - service_names += dev_services_names if include_dev + def self.available_integration_names(include_project_specific: true, include_dev: true) + names = integration_names + names += project_specific_integration_names if include_project_specific + names += dev_integration_names if include_dev - service_names.sort_by(&:downcase) + names.sort_by(&:downcase) end def self.integration_names INTEGRATION_NAMES end - def self.services_names - integration_names - end - - def self.dev_services_names + def self.dev_integration_names return [] unless Rails.env.development? DEV_INTEGRATION_NAMES end - def self.project_specific_services_names + def self.project_specific_integration_names PROJECT_SPECIFIC_INTEGRATION_NAMES end - # Returns a list of available service types. + # Returns a list of available integration types. # Example: ["AsanaService", ...] - def self.available_services_types(include_project_specific: true, include_dev: true) - available_services_names(include_project_specific: include_project_specific, include_dev: include_dev).map do |service_name| - integration_name_to_type(service_name) + def self.available_integration_types(include_project_specific: true, include_dev: true) + available_integration_names(include_project_specific: include_project_specific, include_dev: include_dev).map do + integration_name_to_type(_1) end end - # Returns the model for the given service name. + # Returns the model for the given integration name. # Example: "asana" => Integrations::Asana def self.integration_name_to_model(name) type = integration_name_to_type(name) integration_type_to_model(type) end - # Returns the STI type for the given service name. + # Returns the STI type for the given integration name. # Example: "asana" => "AsanaService" def self.integration_name_to_type(name) "#{name}_service".camelize @@ -319,7 +296,7 @@ class Integration < ApplicationRecord array = group_ids.to_sql.present? ? "array(#{group_ids.to_sql})" : 'ARRAY[]' where(type: type, group_id: group_ids, inherit_from_id: nil) - .order(Arel.sql("array_position(#{array}::bigint[], services.group_id)")) + .order(Arel.sql("array_position(#{array}::bigint[], #{table_name}.group_id)")) .first end private_class_method :closest_group_integration @@ -337,7 +314,7 @@ class Integration < ApplicationRecord with_templates ? active.where(template: true) : none, active.where(instance: true), active.where(group_id: group_ids, inherit_from_id: nil) - ]).order(Arel.sql("type ASC, array_position(#{array}::bigint[], services.group_id), instance DESC")).group_by(&:type).each do |type, records| + ]).order(Arel.sql("type ASC, array_position(#{array}::bigint[], #{table_name}.group_id), instance DESC")).group_by(&:type).each do |type, records| build_from_integration(records.first, association => scope.id).save end end @@ -406,7 +383,7 @@ class Integration < ApplicationRecord %w[active] end - def to_service_hash + def to_integration_hash as_json(methods: :type, except: %w[id template instance project_id group_id]) end @@ -470,8 +447,8 @@ class Integration < ApplicationRecord # Disable test for instance-level and group-level integrations. # https://gitlab.com/gitlab-org/gitlab/-/issues/213138 - def can_test? - !(instance_level? || group_level?) + def testable? + project_level? end def project_level? diff --git a/app/models/integrations/bamboo.rb b/app/models/integrations/bamboo.rb index fef2774c593..590be52151c 100644 --- a/app/models/integrations/bamboo.rb +++ b/app/models/integrations/bamboo.rb @@ -18,14 +18,8 @@ module Integrations attr_accessor :response - after_save :compose_service_hook, if: :activated? before_update :reset_password - def compose_service_hook - hook = service_hook || build_service_hook - hook.save - end - def reset_password if bamboo_url_changed? && !password_touched? self.password = nil diff --git a/app/models/integrations/base_monitoring.rb b/app/models/integrations/base_monitoring.rb new file mode 100644 index 00000000000..280eeda7c6c --- /dev/null +++ b/app/models/integrations/base_monitoring.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +# Base class for monitoring services +# +# These services integrate with a deployment solution like Prometheus +# to provide additional features for environments. +module Integrations + class BaseMonitoring < Integration + default_value_for :category, 'monitoring' + + def self.supported_events + %w() + end + + def can_query? + raise NotImplementedError + end + + def query(_, *_) + raise NotImplementedError + end + end +end diff --git a/app/models/integrations/base_slash_commands.rb b/app/models/integrations/base_slash_commands.rb index eacf1184aae..1d271e75a91 100644 --- a/app/models/integrations/base_slash_commands.rb +++ b/app/models/integrations/base_slash_commands.rb @@ -20,7 +20,7 @@ module Integrations %w() end - def can_test? + def testable? false end diff --git a/app/models/integrations/buildkite.rb b/app/models/integrations/buildkite.rb index 906a5d02f9c..94a37f0c4f2 100644 --- a/app/models/integrations/buildkite.rb +++ b/app/models/integrations/buildkite.rb @@ -4,7 +4,9 @@ require "addressable/uri" module Integrations class Buildkite < BaseCi + include HasWebHook include ReactiveService + extend Gitlab::Utils::Override ENDPOINT = "https://buildkite.com" @@ -13,8 +15,6 @@ module Integrations validates :project_url, presence: true, public_url: true, if: :activated? validates :token, presence: true, if: :activated? - after_save :compose_service_hook, if: :activated? - def self.supported_events %w(push merge_request tag_push) end @@ -35,21 +35,15 @@ module Integrations self.properties.delete('enable_ssl_verification') # Remove unused key end - def webhook_url + override :hook_url + def hook_url "#{buildkite_endpoint('webhook')}/deliver/#{webhook_token}" end - def compose_service_hook - hook = service_hook || build_service_hook - hook.url = webhook_url - hook.enable_ssl_verification = true - hook.save - end - def execute(data) return unless supported_events.include?(data[:object_kind]) - service_hook.execute(data) + execute_web_hook!(data) end def commit_status(sha, ref) @@ -76,18 +70,22 @@ module Integrations 'buildkite' end + def help + s_('ProjectService|Run CI/CD pipelines with Buildkite.') + end + def fields [ { type: 'text', name: 'token', - title: 'Integration Token', - help: 'This token will be provided when you create a Buildkite pipeline with a GitLab repository', + title: _('Token'), + help: s_('ProjectService|The token you get after you create a Buildkite pipeline with a GitLab repository.'), required: true }, { type: 'text', name: 'project_url', - title: 'Pipeline URL', - placeholder: "#{ENDPOINT}/acme-inc/test-pipeline", + title: _('Pipeline URL'), + placeholder: "#{ENDPOINT}/example-org/test-pipeline", required: true } ] end diff --git a/app/models/integrations/campfire.rb b/app/models/integrations/campfire.rb index eede3d00307..c78fc6eff51 100644 --- a/app/models/integrations/campfire.rb +++ b/app/models/integrations/campfire.rb @@ -2,6 +2,8 @@ module Integrations class Campfire < Integration + include ActionView::Helpers::UrlHelper + prop_accessor :token, :subdomain, :room validates :token, presence: true, if: :activated? @@ -13,15 +15,39 @@ module Integrations 'Send notifications about push events to Campfire chat rooms.' end + def help + docs_link = link_to _('Learn more.'), Rails.application.routes.url_helpers.help_page_url('api/services', anchor: 'campfire'), target: '_blank', rel: 'noopener noreferrer' + s_('CampfireService|Send notifications about push events to Campfire chat rooms. %{docs_link}').html_safe % { docs_link: docs_link.html_safe } + end + def self.to_param 'campfire' end def fields [ - { type: 'text', name: 'token', placeholder: '', required: true }, - { type: 'text', name: 'subdomain', placeholder: '' }, - { type: 'text', name: 'room', placeholder: '' } + { + type: 'text', + name: 'token', + title: _('Campfire token'), + placeholder: '', + help: s_('CampfireService|API authentication token from Campfire.'), + required: true + }, + { + type: 'text', + name: 'subdomain', + title: _('Campfire subdomain (optional)'), + placeholder: '', + help: s_('CampfireService|The %{code_open}.campfirenow.com%{code_close} subdomain.') % { code_open: '<code>'.html_safe, code_close: '</code>'.html_safe } + }, + { + type: 'text', + name: 'room', + title: _('Campfire room ID (optional)'), + placeholder: '123456', + help: s_('CampfireService|From the end of the room URL.') + } ] end diff --git a/app/models/integrations/confluence.rb b/app/models/integrations/confluence.rb index 30f73496993..7f111f482dd 100644 --- a/app/models/integrations/confluence.rb +++ b/app/models/integrations/confluence.rb @@ -58,7 +58,7 @@ module Integrations ] end - def can_test? + def testable? false end diff --git a/app/models/integrations/datadog.rb b/app/models/integrations/datadog.rb index dd4b0664d52..27c2fcf266b 100644 --- a/app/models/integrations/datadog.rb +++ b/app/models/integrations/datadog.rb @@ -2,10 +2,13 @@ module Integrations class Datadog < Integration - DEFAULT_SITE = 'datadoghq.com' - URL_TEMPLATE = 'https://webhooks-http-intake.logs.%{datadog_site}/v1/input/' - URL_TEMPLATE_API_KEYS = 'https://app.%{datadog_site}/account/settings#api' - URL_API_KEYS_DOCS = "https://docs.#{DEFAULT_SITE}/account_management/api-app-keys/" + include HasWebHook + extend Gitlab::Utils::Override + + DEFAULT_DOMAIN = 'datadoghq.com' + URL_TEMPLATE = 'https://webhooks-http-intake.logs.%{datadog_domain}/api/v2/webhook' + URL_TEMPLATE_API_KEYS = 'https://app.%{datadog_domain}/account/settings#api' + URL_API_KEYS_DOCS = "https://docs.#{DEFAULT_DOMAIN}/account_management/api-app-keys/" SUPPORTED_EVENTS = %w[ pipeline job @@ -21,12 +24,10 @@ module Integrations validates :api_url, presence: true, unless: -> (obj) { obj.datadog_site.present? } end - after_save :compose_service_hook, if: :activated? - def initialize_properties super - self.datadog_site ||= DEFAULT_SITE + self.datadog_site ||= DEFAULT_DOMAIN end def self.supported_events @@ -62,7 +63,7 @@ module Integrations { type: 'text', name: 'datadog_site', - placeholder: DEFAULT_SITE, + placeholder: DEFAULT_DOMAIN, help: 'Choose the Datadog site to send data to. Set to "datadoghq.eu" to send data to the EU site', required: false }, @@ -98,35 +99,31 @@ module Integrations ] end - def compose_service_hook - hook = service_hook || build_service_hook - hook.url = hook_url - hook.save - end - + override :hook_url def hook_url - url = api_url.presence || sprintf(URL_TEMPLATE, datadog_site: datadog_site) + url = api_url.presence || sprintf(URL_TEMPLATE, datadog_domain: datadog_domain) url = URI.parse(url) - url.path = File.join(url.path || '/', api_key) - query = { service: datadog_service.presence, env: datadog_env.presence }.compact - url.query = query.to_query unless query.empty? + query = { + "dd-api-key" => api_key, + service: datadog_service.presence, + env: datadog_env.presence + }.compact + url.query = query.to_query url.to_s end def api_keys_url return URL_API_KEYS_DOCS unless datadog_site.presence - sprintf(URL_TEMPLATE_API_KEYS, datadog_site: datadog_site) + sprintf(URL_TEMPLATE_API_KEYS, datadog_domain: datadog_domain) end def execute(data) - return if project.disabled_services.include?(to_param) - object_kind = data[:object_kind] object_kind = 'job' if object_kind == 'build' return unless supported_events.include?(object_kind) - service_hook.execute(data, "#{object_kind} hook") + execute_web_hook!(data, "#{object_kind} hook") end def test(data) @@ -139,5 +136,14 @@ module Integrations { success: true, result: result[:message] } end + + private + + def datadog_domain + # Transparently ignore "app" prefix from datadog_site as the official docs table in + # https://docs.datadoghq.com/getting_started/site/ is confusing for internal URLs. + # US3 needs to keep a prefix but other datacenters cannot have the listed "app" prefix + datadog_site.delete_prefix("app.") + end end end diff --git a/app/models/integrations/discord.rb b/app/models/integrations/discord.rb index ef6d46fd3d3..76160a61bc3 100644 --- a/app/models/integrations/discord.rb +++ b/app/models/integrations/discord.rb @@ -54,6 +54,8 @@ module Integrations builder.add_embed do |embed| embed.author = Discordrb::Webhooks::EmbedAuthor.new(name: message.user_name, icon_url: message.user_avatar) embed.description = (message.pretext + "\n" + Array.wrap(message.attachments).join("\n")).gsub(ATTACHMENT_REGEX, " \\k<entry> - \\k<name>\n") + embed.colour = 16543014 # The hex "fc6d26" as an Integer + embed.timestamp = Time.now.utc end end rescue RestClient::Exception => error diff --git a/app/models/integrations/drone_ci.rb b/app/models/integrations/drone_ci.rb index 0f021356815..c93ae432fe9 100644 --- a/app/models/integrations/drone_ci.rb +++ b/app/models/integrations/drone_ci.rb @@ -2,8 +2,10 @@ module Integrations class DroneCi < BaseCi + include HasWebHook include ReactiveService include ServicePushDataValidations + extend Gitlab::Utils::Override prop_accessor :drone_url, :token boolean_accessor :enable_ssl_verification @@ -11,24 +13,16 @@ module Integrations validates :drone_url, presence: true, public_url: true, if: :activated? validates :token, presence: true, if: :activated? - after_save :compose_service_hook, if: :activated? - - def compose_service_hook - hook = service_hook || build_service_hook - # If using a service template, project may not be available - hook.url = [drone_url, "/api/hook", "?owner=#{project.namespace.full_path}", "&name=#{project.path}", "&access_token=#{token}"].join if project - hook.enable_ssl_verification = !!enable_ssl_verification - hook.save - end - def execute(data) + return unless project + case data[:object_kind] when 'push' - service_hook.execute(data) if push_valid?(data) + execute_web_hook!(data) if push_valid?(data) when 'merge_request' - service_hook.execute(data) if merge_request_valid?(data) + execute_web_hook!(data) if merge_request_valid?(data) when 'tag_push' - service_hook.execute(data) if tag_push_valid?(data) + execute_web_hook!(data) if tag_push_valid?(data) end end @@ -105,5 +99,21 @@ module Integrations { type: 'checkbox', name: 'enable_ssl_verification', title: "Enable SSL verification" } ] end + + override :hook_url + def hook_url + [drone_url, "/hook", "?owner=#{project.namespace.full_path}", "&name=#{project.path}", "&access_token=#{token}"].join + end + + override :hook_ssl_verification + def hook_ssl_verification + !!enable_ssl_verification + end + + override :update_web_hook! + def update_web_hook! + # If using a service template, project may not be available + super if project + end end end diff --git a/app/models/integrations/ewm.rb b/app/models/integrations/ewm.rb index 0a4e8d92ed7..24d343b7cb4 100644 --- a/app/models/integrations/ewm.rb +++ b/app/models/integrations/ewm.rb @@ -27,7 +27,7 @@ module Integrations 'ewm' end - def can_test? + def testable? false end diff --git a/app/models/integrations/jenkins.rb b/app/models/integrations/jenkins.rb index 815e86bcaa1..55fc60990f3 100644 --- a/app/models/integrations/jenkins.rb +++ b/app/models/integrations/jenkins.rb @@ -2,7 +2,9 @@ module Integrations class Jenkins < BaseCi + include HasWebHook include ActionView::Helpers::UrlHelper + extend Gitlab::Utils::Override prop_accessor :jenkins_url, :project_name, :username, :password @@ -16,8 +18,6 @@ module Integrations default_value_for :merge_requests_events, false default_value_for :tag_push_events, false - after_save :compose_service_hook, if: :activated? - def reset_password # don't reset the password if a new one is provided if (jenkins_url_changed? || username.blank?) && !password_touched? @@ -25,16 +25,10 @@ module Integrations end end - def compose_service_hook - hook = service_hook || build_service_hook - hook.url = hook_url - hook.save - end - def execute(data) return unless supported_events.include?(data[:object_kind]) - service_hook.execute(data, "#{data[:object_kind]}_hook") + execute_web_hook!(data, "#{data[:object_kind]}_hook") end def test(data) @@ -48,6 +42,7 @@ module Integrations { success: true, result: result[:message] } end + override :hook_url def hook_url url = URI.parse(jenkins_url) url.path = File.join(url.path || '/', "project/#{project_name}") @@ -97,7 +92,6 @@ module Integrations { type: 'text', name: 'username', - required: true, help: s_('The username for the Jenkins server.') }, { diff --git a/app/models/integrations/jira.rb b/app/models/integrations/jira.rb index a5aee35bada..1dc5c0db9e3 100644 --- a/app/models/integrations/jira.rb +++ b/app/models/integrations/jira.rb @@ -272,6 +272,10 @@ module Integrations test(nil)[:success] end + def configured? + active? && valid_connection? + end + def test(_) result = server_info success = result.present? @@ -533,7 +537,7 @@ module Integrations def update_deployment_type? (api_url_changed? || url_changed? || username_changed? || password_changed?) && - can_test? + testable? end def update_deployment_type @@ -573,15 +577,6 @@ module Integrations data_fields.deployment_server! end end - - def self.event_description(event) - case event - when "merge_request", "merge_request_events" - s_("JiraService|Jira comments are created when an issue is referenced in a merge request.") - when "commit", "commit_events" - s_("JiraService|Jira comments are created when an issue is referenced in a commit.") - end - end end end diff --git a/app/models/integrations/mattermost_slash_commands.rb b/app/models/integrations/mattermost_slash_commands.rb index 6cd664da9e7..30a8ba973c1 100644 --- a/app/models/integrations/mattermost_slash_commands.rb +++ b/app/models/integrations/mattermost_slash_commands.rb @@ -6,7 +6,7 @@ module Integrations prop_accessor :token - def can_test? + def testable? false end diff --git a/app/models/integrations/mock_ci.rb b/app/models/integrations/mock_ci.rb index a0eae9e4abf..7359be83d4f 100644 --- a/app/models/integrations/mock_ci.rb +++ b/app/models/integrations/mock_ci.rb @@ -83,7 +83,7 @@ module Integrations end end - def can_test? + def testable? false end end diff --git a/app/models/integrations/mock_monitoring.rb b/app/models/integrations/mock_monitoring.rb new file mode 100644 index 00000000000..72bb292edaa --- /dev/null +++ b/app/models/integrations/mock_monitoring.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +module Integrations + class MockMonitoring < BaseMonitoring + def title + 'Mock monitoring' + end + + def description + 'Mock monitoring service' + end + + def self.to_param + 'mock_monitoring' + end + + def metrics(environment) + Gitlab::Json.parse(File.read(Rails.root + 'spec/fixtures/metrics.json')) + end + + def testable? + false + end + end +end diff --git a/app/models/integrations/packagist.rb b/app/models/integrations/packagist.rb index b597bd11175..fb0917db02b 100644 --- a/app/models/integrations/packagist.rb +++ b/app/models/integrations/packagist.rb @@ -2,6 +2,9 @@ module Integrations class Packagist < Integration + include HasWebHook + extend Gitlab::Utils::Override + prop_accessor :username, :token, :server validates :username, presence: true, if: :activated? @@ -10,8 +13,6 @@ module Integrations default_value_for :push_events, true default_value_for :tag_push_events, true - after_save :compose_service_hook, if: :activated? - def title 'Packagist' end @@ -39,7 +40,7 @@ module Integrations def execute(data) return unless supported_events.include?(data[:object_kind]) - service_hook.execute(data) + execute_web_hook!(data) end def test(data) @@ -53,12 +54,7 @@ module Integrations { success: true, result: result[:message] } end - def compose_service_hook - hook = service_hook || build_service_hook - hook.url = hook_url - hook.save - end - + override :hook_url def hook_url base_url = server.presence || 'https://packagist.org' "#{base_url}/api/update-package?username=#{username}&apiToken=#{token}" diff --git a/app/models/integrations/pipelines_email.rb b/app/models/integrations/pipelines_email.rb index 585bc14242a..efba35cc2a8 100644 --- a/app/models/integrations/pipelines_email.rb +++ b/app/models/integrations/pipelines_email.rb @@ -57,7 +57,7 @@ module Integrations PipelineNotificationWorker.new.perform(pipeline_id, recipients: all_recipients) end - def can_test? + def testable? project&.ci_pipelines&.any? end diff --git a/app/models/integrations/pivotaltracker.rb b/app/models/integrations/pivotaltracker.rb index 46f97cc3c6b..24cfd51eb55 100644 --- a/app/models/integrations/pivotaltracker.rb +++ b/app/models/integrations/pivotaltracker.rb @@ -2,17 +2,23 @@ module Integrations class Pivotaltracker < Integration + include ActionView::Helpers::UrlHelper API_ENDPOINT = 'https://www.pivotaltracker.com/services/v5/source_commits' prop_accessor :token, :restrict_to_branch validates :token, presence: true, if: :activated? def title - 'PivotalTracker' + 'Pivotal Tracker' end def description - s_('PivotalTrackerService|Add commit messages as comments to PivotalTracker stories.') + s_('PivotalTrackerService|Add commit messages as comments to Pivotal Tracker stories.') + end + + def help + docs_link = link_to _('Learn more.'), Rails.application.routes.url_helpers.help_page_url('user/project/integrations/pivotal_tracker'), target: '_blank', rel: 'noopener noreferrer' + s_('Add commit messages as comments to Pivotal Tracker stories. %{docs_link}').html_safe % { docs_link: docs_link.html_safe } end def self.to_param @@ -24,14 +30,15 @@ module Integrations { type: 'text', name: 'token', - placeholder: s_('PivotalTrackerService|Pivotal Tracker API token.'), + help: s_('PivotalTrackerService|Pivotal Tracker API token. User must have access to the story. All comments are attributed to this user.'), required: true }, { type: 'text', name: 'restrict_to_branch', - placeholder: s_('PivotalTrackerService|Comma-separated list of branches which will be ' \ - 'automatically inspected. Leave blank to include all branches.') + title: 'Restrict to branch (optional)', + help: s_('PivotalTrackerService|Comma-separated list of branches to ' \ + 'automatically inspect. Leave blank to include all branches.') } ] end diff --git a/app/models/integrations/prometheus.rb b/app/models/integrations/prometheus.rb new file mode 100644 index 00000000000..54cb823d606 --- /dev/null +++ b/app/models/integrations/prometheus.rb @@ -0,0 +1,203 @@ +# frozen_string_literal: true + +module Integrations + class Prometheus < BaseMonitoring + include PrometheusAdapter + + # Access to prometheus is directly through the API + prop_accessor :api_url + prop_accessor :google_iap_service_account_json + prop_accessor :google_iap_audience_client_id + boolean_accessor :manual_configuration + + # We need to allow the self-monitoring project to connect to the internal + # Prometheus instance. + # Since the internal Prometheus instance is usually a localhost URL, we need + # to allow localhost URLs when the following conditions are true: + # 1. project is the self-monitoring project. + # 2. api_url is the internal Prometheus URL. + with_options presence: true do + validates :api_url, public_url: true, if: ->(object) { object.manual_configuration? && !object.allow_local_api_url? } + validates :api_url, url: true, if: ->(object) { object.manual_configuration? && object.allow_local_api_url? } + end + + before_save :synchronize_service_state + + after_save :clear_reactive_cache! + + after_commit :track_events + + after_create_commit :create_default_alerts + + scope :preload_project, -> { preload(:project) } + scope :with_clusters_with_cilium, -> { joins(project: [:clusters]).merge(Clusters::Cluster.with_available_cilium) } + + def initialize_properties + if properties.nil? + self.properties = {} + end + end + + def show_active_box? + false + end + + def title + 'Prometheus' + end + + def description + s_('PrometheusService|Monitor application health with Prometheus metrics and dashboards') + end + + def self.to_param + 'prometheus' + end + + def fields + [ + { + type: 'checkbox', + name: 'manual_configuration', + title: s_('PrometheusService|Active'), + help: s_('PrometheusService|Select this checkbox to override the auto configuration settings with your own settings.'), + required: true + }, + { + type: 'text', + name: 'api_url', + title: 'API URL', + placeholder: s_('PrometheusService|https://prometheus.example.com/'), + help: s_('PrometheusService|The Prometheus API base URL.'), + required: true + }, + { + type: 'text', + name: 'google_iap_audience_client_id', + title: 'Google IAP Audience Client ID', + placeholder: s_('PrometheusService|IAP_CLIENT_ID.apps.googleusercontent.com'), + help: s_('PrometheusService|PrometheusService|The ID of the IAP-secured resource.'), + autocomplete: 'off', + required: false + }, + { + type: 'textarea', + name: 'google_iap_service_account_json', + title: 'Google IAP Service Account JSON', + placeholder: s_('PrometheusService|{ "type": "service_account", "project_id": ... }'), + help: s_('PrometheusService|The contents of the credentials.json file of your service account.'), + required: false + } + ] + end + + # Check we can connect to the Prometheus API + def test(*args) + prometheus_client.ping + { success: true, result: 'Checked API endpoint' } + rescue Gitlab::PrometheusClient::Error => err + { success: false, result: err } + end + + def prometheus_client + return unless should_return_client? + + options = prometheus_client_default_options.merge( + allow_local_requests: allow_local_api_url? + ) + + if behind_iap? + # Adds the Authorization header + options[:headers] = iap_client.apply({}) + end + + Gitlab::PrometheusClient.new(api_url, options) + end + + def prometheus_available? + return false if template? + return false unless project + + project.all_clusters.enabled.eager_load(:integration_prometheus).any? do |cluster| + cluster.integration_prometheus_available? + end + end + + def allow_local_api_url? + allow_local_requests_from_web_hooks_and_services? || + (self_monitoring_project? && internal_prometheus_url?) + end + + def configured? + should_return_client? + end + + private + + delegate :allow_local_requests_from_web_hooks_and_services?, to: :current_settings, private: true + + def self_monitoring_project? + project && project.id == current_settings.self_monitoring_project_id + end + + def internal_prometheus_url? + api_url.present? && api_url == ::Gitlab::Prometheus::Internal.uri + end + + def should_return_client? + api_url.present? && manual_configuration? && active? && valid? + end + + def current_settings + Gitlab::CurrentSettings.current_application_settings + end + + def synchronize_service_state + self.active = prometheus_available? || manual_configuration? + + true + end + + def track_events + if enabled_manual_prometheus? + Gitlab::Tracking.event('cluster:services:prometheus', 'enabled_manual_prometheus') + elsif disabled_manual_prometheus? + Gitlab::Tracking.event('cluster:services:prometheus', 'disabled_manual_prometheus') + end + + true + end + + def enabled_manual_prometheus? + manual_configuration_changed? && manual_configuration? + end + + def disabled_manual_prometheus? + manual_configuration_changed? && !manual_configuration? + end + + def create_default_alerts + return unless project_id + + ::Prometheus::CreateDefaultAlertsWorker.perform_async(project_id) + end + + def behind_iap? + manual_configuration? && google_iap_audience_client_id.present? && google_iap_service_account_json.present? + end + + def clean_google_iap_service_account + return unless google_iap_service_account_json + + google_iap_service_account_json + .then { |json| Gitlab::Json.parse(json) } + .except('token_credential_uri') + end + + def iap_client + @iap_client ||= Google::Auth::Credentials + .new(clean_google_iap_service_account, target_audience: google_iap_audience_client_id) + .client + end + end +end diff --git a/app/models/integrations/teamcity.rb b/app/models/integrations/teamcity.rb index 3f14c5d82b3..135c304b57e 100644 --- a/app/models/integrations/teamcity.rb +++ b/app/models/integrations/teamcity.rb @@ -18,7 +18,6 @@ module Integrations attr_accessor :response - after_save :compose_service_hook, if: :activated? before_update :reset_password class << self @@ -29,20 +28,6 @@ module Integrations def supported_events %w(push merge_request) end - - def event_description(event) - case event - when 'push', 'push_events' - 'TeamCity CI will be triggered after every push to the repository except branch delete' - when 'merge_request', 'merge_request_events' - 'TeamCity CI will be triggered after a merge request has been created or updated' - end - end - end - - def compose_service_hook - hook = service_hook || build_service_hook - hook.save end def reset_password diff --git a/app/models/internal_id.rb b/app/models/internal_id.rb index b56bac58705..f114094d69c 100644 --- a/app/models/internal_id.rb +++ b/app/models/internal_id.rb @@ -16,7 +16,7 @@ # * Add `usage` value to enum # * (Optionally) add columns to `internal_ids` if needed for scope. class InternalId < ApplicationRecord - include Gitlab::Utils::StrongMemoize + extend Gitlab::Utils::StrongMemoize belongs_to :project belongs_to :namespace @@ -25,6 +25,10 @@ class InternalId < ApplicationRecord validates :usage, presence: true + scope :filter_by, -> (scope, usage) do + where(**scope, usage: usage) + end + # Increments #last_value and saves the record # # The operation locks the record and gathers a `ROW SHARE` lock (in PostgreSQL). @@ -53,18 +57,15 @@ class InternalId < ApplicationRecord class << self def track_greatest(subject, scope, usage, new_value, init) - InternalIdGenerator.new(subject, scope, usage, init) - .track_greatest(new_value) + build_generator(subject, scope, usage, init).track_greatest(new_value) end def generate_next(subject, scope, usage, init) - InternalIdGenerator.new(subject, scope, usage, init) - .generate + build_generator(subject, scope, usage, init).generate end def reset(subject, scope, usage, value) - InternalIdGenerator.new(subject, scope, usage) - .reset(value) + build_generator(subject, scope, usage).reset(value) end # Flushing records is generally safe in a sense that those @@ -77,11 +78,36 @@ class InternalId < ApplicationRecord where(filter).delete_all end + + def internal_id_transactions_increment(operation:, usage:) + self.internal_id_transactions_total.increment( + operation: operation, + usage: usage.to_s, + in_transaction: ActiveRecord::Base.connection.transaction_open?.to_s + ) + end + + def internal_id_transactions_total + strong_memoize(:internal_id_transactions_total) do + name = :gitlab_internal_id_transactions_total + comment = 'Counts all the internal ids happening within transaction' + + Gitlab::Metrics.counter(name, comment) + end + end + + private + + def build_generator(subject, scope, usage, init = nil) + if Feature.enabled?(:generate_iids_without_explicit_locking) + ImplicitlyLockingInternalIdGenerator.new(subject, scope, usage, init) + else + InternalIdGenerator.new(subject, scope, usage, init) + end + end end class InternalIdGenerator - extend Gitlab::Utils::StrongMemoize - # Generate next internal id for a given scope and usage. # # For currently supported usages, see #usage enum. @@ -117,7 +143,7 @@ class InternalId < ApplicationRecord # init: Block that gets called to initialize InternalId record if not present # Make sure to not throw exceptions in the absence of records (if this is expected). def generate - self.class.internal_id_transactions_increment(operation: :generate, usage: usage) + InternalId.internal_id_transactions_increment(operation: :generate, usage: usage) subject.transaction do # Create a record in internal_ids if one does not yet exist @@ -134,7 +160,7 @@ class InternalId < ApplicationRecord def reset(value) return false unless value - self.class.internal_id_transactions_increment(operation: :reset, usage: usage) + InternalId.internal_id_transactions_increment(operation: :reset, usage: usage) updated = InternalId @@ -149,8 +175,9 @@ class InternalId < ApplicationRecord # and set its new_value if it is higher than the current last_value # # Note this will acquire a ROW SHARE lock on the InternalId record + def track_greatest(new_value) - self.class.internal_id_transactions_increment(operation: :track_greatest, usage: usage) + InternalId.internal_id_transactions_increment(operation: :track_greatest, usage: usage) subject.transaction do record.track_greatest_and_save!(new_value) @@ -162,7 +189,7 @@ class InternalId < ApplicationRecord end def with_lock(&block) - self.class.internal_id_transactions_increment(operation: :with_lock, usage: usage) + InternalId.internal_id_transactions_increment(operation: :with_lock, usage: usage) record.with_lock(&block) end @@ -199,22 +226,118 @@ class InternalId < ApplicationRecord rescue ActiveRecord::RecordNotUnique lookup end + end - def self.internal_id_transactions_increment(operation:, usage:) - self.internal_id_transactions_total.increment( - operation: operation, - usage: usage.to_s, - in_transaction: ActiveRecord::Base.connection.transaction_open?.to_s - ) + class ImplicitlyLockingInternalIdGenerator + # Generate next internal id for a given scope and usage. + # + # For currently supported usages, see #usage enum. + # + # The method implements a locking scheme that has the following properties: + # 1) Generated sequence of internal ids is unique per (scope and usage) + # 2) The method is thread-safe and may be used in concurrent threads/processes. + # 3) The generated sequence is gapless. + # 4) In the absence of a record in the internal_ids table, one will be created + # and last_value will be calculated on the fly. + # + # subject: The instance or class we're generating an internal id for. + # scope: Attributes that define the scope for id generation. + # Valid keys are `project/project_id` and `namespace/namespace_id`. + # usage: Symbol to define the usage of the internal id, see InternalId.usages + # init: Proc that accepts the subject and the scope and returns Integer|NilClass + attr_reader :subject, :scope, :scope_attrs, :usage, :init + + def initialize(subject, scope, usage, init = nil) + @subject = subject + @scope = scope + @usage = usage + @init = init + + raise ArgumentError, 'Scope is not well-defined, need at least one column for scope (given: 0)' if scope.empty? + + unless InternalId.usages.has_key?(usage.to_s) + raise ArgumentError, "Usage '#{usage}' is unknown. Supported values are #{InternalId.usages.keys} from InternalId.usages" + end end - def self.internal_id_transactions_total - strong_memoize(:internal_id_transactions_total) do - name = :gitlab_internal_id_transactions_total - comment = 'Counts all the internal ids happening within transaction' + # Generates next internal id and returns it + # init: Block that gets called to initialize InternalId record if not present + # Make sure to not throw exceptions in the absence of records (if this is expected). + def generate + InternalId.internal_id_transactions_increment(operation: :generate, usage: usage) - Gitlab::Metrics.counter(name, comment) + next_iid = update_record!(subject, scope, usage, arel_table[:last_value] + 1) + + return next_iid if next_iid + + create_record!(subject, scope, usage, init) do |iid| + iid.last_value += 1 end + rescue ActiveRecord::RecordNotUnique + retry + end + + # Reset tries to rewind to `value-1`. This will only succeed, + # if `value` stored in database is equal to `last_value`. + # value: The expected last_value to decrement + def reset(value) + return false unless value + + InternalId.internal_id_transactions_increment(operation: :reset, usage: usage) + + iid = update_record!(subject, scope.merge(last_value: value), usage, arel_table[:last_value] - 1) + iid == value - 1 + end + + # Create a record in internal_ids if one does not yet exist + # and set its new_value if it is higher than the current last_value + def track_greatest(new_value) + InternalId.internal_id_transactions_increment(operation: :track_greatest, usage: usage) + + function = Arel::Nodes::NamedFunction.new('GREATEST', [ + arel_table[:last_value], + new_value.to_i + ]) + + next_iid = update_record!(subject, scope, usage, function) + return next_iid if next_iid + + create_record!(subject, scope, usage, init) do |object| + object.last_value = [object.last_value, new_value].max + end + rescue ActiveRecord::RecordNotUnique + retry + end + + private + + def update_record!(subject, scope, usage, new_value) + stmt = Arel::UpdateManager.new + stmt.table(arel_table) + stmt.set(arel_table[:last_value] => new_value) + stmt.wheres = InternalId.filter_by(scope, usage).arel.constraints + + ActiveRecord::Base.connection.insert(stmt, 'Update InternalId', 'last_value') + end + + def create_record!(subject, scope, usage, init) + raise ArgumentError, 'Cannot initialize without init!' unless init + + instance = subject.is_a?(::Class) ? nil : subject + + subject.transaction(requires_new: true) do + last_value = init.call(instance, scope) || 0 + + internal_id = InternalId.create!(**scope, usage: usage, last_value: last_value) do |subject| + yield subject if block_given? + end + + internal_id.last_value + end + end + + def arel_table + InternalId.arel_table end end end diff --git a/app/models/issue.rb b/app/models/issue.rb index 48f388ea48d..00fcba5298a 100644 --- a/app/models/issue.rb +++ b/app/models/issue.rb @@ -80,6 +80,7 @@ class Issue < ApplicationRecord has_and_belongs_to_many :prometheus_alert_events, join_table: :issues_prometheus_alert_events # rubocop: disable Rails/HasAndBelongsToMany has_many :prometheus_alerts, through: :prometheus_alert_events + accepts_nested_attributes_for :issuable_severity, update_only: true accepts_nested_attributes_for :sentry_issue validates :project, presence: true @@ -195,11 +196,23 @@ class Issue < ApplicationRecord end end - # Alias to state machine .with_state_id method - # This needs to be defined after the state machine block to avoid errors class << self + extend ::Gitlab::Utils::Override + + # Alias to state machine .with_state_id method + # This needs to be defined after the state machine block to avoid errors alias_method :with_state, :with_state_id alias_method :with_states, :with_state_ids + + override :order_upvotes_desc + def order_upvotes_desc + reorder(upvotes_count: :desc) + end + + override :order_upvotes_asc + def order_upvotes_asc + reorder(upvotes_count: :asc) + end end def self.relative_positioning_query_base(issue) @@ -267,10 +280,41 @@ class Issue < ApplicationRecord # `with_cte` argument allows sorting when using CTE queries and prevents # errors in postgres when using CTE search optimisation def self.order_by_position_and_priority(with_cte: false) + order = Gitlab::Pagination::Keyset::Order.build([column_order_relative_position, column_order_highest_priority, column_order_id_desc]) + order_labels_priority(with_cte: with_cte) - .reorder(Gitlab::Database.nulls_last_order('relative_position', 'ASC'), - Gitlab::Database.nulls_last_order('highest_priority', 'ASC'), - "id DESC") + .reorder(order) + end + + def self.column_order_relative_position + Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( + attribute_name: 'relative_position', + column_expression: arel_table[:relative_position], + order_expression: Gitlab::Database.nulls_last_order('issues.relative_position', 'ASC'), + reversed_order_expression: Gitlab::Database.nulls_last_order('issues.relative_position', 'DESC'), + order_direction: :asc, + nullable: :nulls_last, + distinct: false + ) + end + + def self.column_order_highest_priority + Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( + attribute_name: 'highest_priority', + column_expression: Arel.sql('highest_priorities.label_priority'), + order_expression: Gitlab::Database.nulls_last_order('highest_priorities.label_priority', 'ASC'), + reversed_order_expression: Gitlab::Database.nulls_last_order('highest_priorities.label_priority', 'DESC'), + order_direction: :asc, + nullable: :nulls_last, + distinct: false + ) + end + + def self.column_order_id_desc + Gitlab::Pagination::Keyset::ColumnOrderDefinition.new( + attribute_name: 'id', + order_expression: arel_table[:id].desc + ) end # Temporary disable moving null elements because of performance problems @@ -394,8 +438,15 @@ class Issue < ApplicationRecord end def check_for_spam? - publicly_visible? && - (title_changed? || description_changed? || confidential_changed?) + # content created via support bots is always checked for spam, EVEN if + # the issue is not publicly visible and/or confidential + return true if author.support_bot? && spammable_attribute_changed? + + # Only check for spam on issues which are publicly visible (and thus indexed in search engines) + return false unless publicly_visible? + + # Only check for spam if certain attributes have changed + spammable_attribute_changed? end def as_json(options = {}) @@ -481,8 +532,21 @@ class Issue < ApplicationRecord issue_assignees.pluck(:user_id) end + def update_upvotes_count + self.lock! + self.update_column(:upvotes_count, self.upvotes) + end + private + def spammable_attribute_changed? + title_changed? || + description_changed? || + # NOTE: We need to check them for spam when issues are made non-confidential, because spam + # may have been added while they were confidential and thus not being checked for spam. + confidential_changed?(from: true, to: false) + end + # Ensure that the metrics association is safely created and respecting the unique constraint on issue_id override :ensure_metrics def ensure_metrics diff --git a/app/models/member.rb b/app/models/member.rb index 0636c3c2d4e..14c886e3ab8 100644 --- a/app/models/member.rb +++ b/app/models/member.rb @@ -156,7 +156,7 @@ class Member < ApplicationRecord distinct_members = select('DISTINCT ON (user_id, invite_email) *') .order('user_id, invite_email, access_level DESC, expires_at DESC, created_at ASC') - from(distinct_members, :members) + unscoped.from(distinct_members, :members) end scope :order_name_asc, -> { left_join_users.reorder(Gitlab::Database.nulls_last_order('users.name', 'ASC')) } @@ -232,140 +232,9 @@ class Member < ApplicationRecord find_by(invite_token: invite_token) end - def add_user(source, user, access_level, existing_members: nil, current_user: nil, expires_at: nil, ldap: false) - # rubocop: disable CodeReuse/ServiceClass - # `user` can be either a User object, User ID or an email to be invited - member = retrieve_member(source, user, existing_members) - access_level = retrieve_access_level(access_level) - - return member unless can_update_member?(current_user, member) - - set_member_attributes( - member, - access_level, - current_user: current_user, - expires_at: expires_at, - ldap: ldap - ) - - if member.request? - ::Members::ApproveAccessRequestService.new( - current_user, - access_level: access_level - ).execute( - member, - skip_authorization: ldap, - skip_log_audit_event: ldap - ) - else - member.save - end - - member - # rubocop: enable CodeReuse/ServiceClass - end - - # Populates the attributes of a member. - # - # This logic resides in a separate method so that EE can extend this logic, - # without having to patch the `add_user` method directly. - def set_member_attributes(member, access_level, current_user: nil, expires_at: nil, ldap: false) - member.attributes = { - created_by: member.created_by || current_user, - access_level: access_level, - expires_at: expires_at - } - end - - def add_users(source, users, access_level, current_user: nil, expires_at: nil) - return [] unless users.present? - - emails, users, existing_members = parse_users_list(source, users) - - self.transaction do - (emails + users).map! do |user| - add_user( - source, - user, - access_level, - existing_members: existing_members, - current_user: current_user, - expires_at: expires_at - ) - end - end - end - - def access_levels - Gitlab::Access.sym_options - end - def valid_email?(email) Devise.email_regexp.match?(email) end - - private - - def parse_users_list(source, list) - emails = [] - user_ids = [] - users = [] - existing_members = {} - - list.each do |item| - case item - when User - users << item - when Integer - user_ids << item - when /\A\d+\Z/ - user_ids << item.to_i - when Devise.email_regexp - emails << item - end - end - - if user_ids.present? - users.concat(User.where(id: user_ids)) - # the below will automatically discard invalid user_ids - existing_members = source.members_and_requesters.where(user_id: user_ids).index_by(&:user_id) - end - - [emails, users, existing_members] - end - - # This method is used to find users that have been entered into the "Add members" field. - # These can be the User objects directly, their IDs, their emails, or new emails to be invited. - def retrieve_user(user) - return user if user.is_a?(User) - - return User.find_by(id: user) if user.is_a?(Integer) - - User.find_by_any_email(user) || user - end - - def retrieve_member(source, user, existing_members) - user = retrieve_user(user) - - if user.is_a?(User) - if existing_members - existing_members[user.id] || source.members.build(user_id: user.id) - else - source.members_and_requesters.find_or_initialize_by(user_id: user.id) - end - else - source.members.build(invite_email: user) - end - end - - def retrieve_access_level(access_level) - access_levels.fetch(access_level) { access_level.to_i } - end - - def can_update_member?(current_user, member) - # There is no current user for bulk actions, in which case anything is allowed - !current_user || current_user.can?(:"update_#{member.type.underscore}", member) - end end def real_source_type @@ -570,7 +439,7 @@ class Member < ApplicationRecord def update_highest_role? return unless user_id.present? - previous_changes[:access_level].present? + previous_changes[:access_level].present? || destroyed? end def update_highest_role_attribute diff --git a/app/models/members/group_member.rb b/app/models/members/group_member.rb index c7bc31cde5d..cf5906a4cbf 100644 --- a/app/models/members/group_member.rb +++ b/app/models/members/group_member.rb @@ -28,14 +28,12 @@ class GroupMember < Member attr_accessor :last_owner, :last_blocked_owner + self.enumerate_columns_in_select_statements = true + def self.access_level_roles Gitlab::Access.options_with_owner end - def self.access_levels - Gitlab::Access.sym_options_with_owner - end - def self.pluck_user_ids pluck(:user_id) end diff --git a/app/models/members/project_member.rb b/app/models/members/project_member.rb index 41ecc4cbf01..5040879e177 100644 --- a/app/models/members/project_member.rb +++ b/app/models/members/project_member.rb @@ -48,7 +48,7 @@ class ProjectMember < Member project_ids.each do |project_id| project = Project.find(project_id) - add_users( + Members::Projects::CreatorService.add_users( # rubocop:todo CodeReuse/ServiceClass project, users, access_level, @@ -80,12 +80,6 @@ class ProjectMember < Member def access_level_roles Gitlab::Access.options end - - private - - def can_update_member?(current_user, member) - super || (member.owner? && member.new_record?) - end end def project diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb index 68fb957759d..7ca83d1d68c 100644 --- a/app/models/merge_request.rb +++ b/app/models/merge_request.rb @@ -300,6 +300,11 @@ class MergeRequest < ApplicationRecord query = joins(:metrics) + if !target_project_id && self.where_values_hash["target_project_id"] + target_project_id = self.where_values_hash["target_project_id"] + query = query.unscope(where: :target_project_id) + end + project_condition = if target_project_id MergeRequest::Metrics.arel_table[:target_project_id].eq(target_project_id) else @@ -360,7 +365,7 @@ class MergeRequest < ApplicationRecord scope :preload_approved_by_users, -> { preload(:approved_by_users) } scope :preload_metrics, -> (relation) { preload(metrics: relation) } scope :preload_project_and_latest_diff, -> { preload(:source_project, :latest_merge_request_diff) } - scope :preload_latest_diff_commit, -> { preload(latest_merge_request_diff: :merge_request_diff_commits) } + scope :preload_latest_diff_commit, -> { preload(latest_merge_request_diff: { merge_request_diff_commits: [:commit_author, :committer] }) } scope :preload_milestoneish_associations, -> { preload_routables.preload(:assignees, :labels) } scope :with_web_entity_associations, -> { preload(:author, target_project: [:project_feature, group: [:route, :parent], namespace: :route]) } @@ -1340,7 +1345,7 @@ class MergeRequest < ApplicationRecord def has_ci? return false if has_no_commits? - !!(head_pipeline_id || all_pipelines.any? || source_project&.ci_service) + !!(head_pipeline_id || all_pipelines.any? || source_project&.ci_integration) end def branch_missing? @@ -1551,8 +1556,6 @@ class MergeRequest < ApplicationRecord end def has_codequality_mr_diff_report? - return false unless ::Gitlab::Ci::Features.display_quality_on_mr_diff?(project) - actual_head_pipeline&.has_codequality_mr_diff_report? end diff --git a/app/models/merge_request/cleanup_schedule.rb b/app/models/merge_request/cleanup_schedule.rb index 79817269be2..35194b2b318 100644 --- a/app/models/merge_request/cleanup_schedule.rb +++ b/app/models/merge_request/cleanup_schedule.rb @@ -1,14 +1,61 @@ # frozen_string_literal: true class MergeRequest::CleanupSchedule < ApplicationRecord + STATUSES = { + unstarted: 0, + running: 1, + completed: 2, + failed: 3 + }.freeze + belongs_to :merge_request, inverse_of: :cleanup_schedule validates :scheduled_at, presence: true - def self.scheduled_merge_request_ids(limit) - where('completed_at IS NULL AND scheduled_at <= NOW()') + state_machine :status, initial: :unstarted do + state :unstarted, value: STATUSES[:unstarted] + state :running, value: STATUSES[:running] + state :completed, value: STATUSES[:completed] + state :failed, value: STATUSES[:failed] + + event :run do + transition unstarted: :running + end + + event :retry do + transition running: :unstarted + end + + event :complete do + transition running: :completed + end + + event :mark_as_failed do + transition running: :failed + end + + before_transition to: [:completed] do |cleanup_schedule, _transition| + cleanup_schedule.completed_at = Time.current + end + + before_transition from: :running, to: [:unstarted, :failed] do |cleanup_schedule, _transition| + cleanup_schedule.failed_count += 1 + end + end + + scope :scheduled_and_unstarted, -> { + where('completed_at IS NULL AND scheduled_at <= NOW() AND status = ?', STATUSES[:unstarted]) .order('scheduled_at DESC') - .limit(limit) - .pluck(:merge_request_id) + } + + def self.start_next + MergeRequest::CleanupSchedule.transaction do + cleanup_schedule = scheduled_and_unstarted.lock('FOR UPDATE SKIP LOCKED').first + + next if cleanup_schedule.blank? + + cleanup_schedule.run! + cleanup_schedule + end end end diff --git a/app/models/merge_request/diff_commit_user.rb b/app/models/merge_request/diff_commit_user.rb new file mode 100644 index 00000000000..3fc5c9318a4 --- /dev/null +++ b/app/models/merge_request/diff_commit_user.rb @@ -0,0 +1,95 @@ +# frozen_string_literal: true + +class MergeRequest::DiffCommitUser < ApplicationRecord + validates :name, length: { maximum: 512 } + validates :email, length: { maximum: 512 } + validates :name, presence: true, unless: :email + validates :email, presence: true, unless: :name + + # Prepares a value to be inserted into a column in the table + # `merge_request_diff_commit_users`. Values in this table are limited to + # 512 characters. + # + # We treat empty strings as NULL values, as there's no point in (for + # example) storing a row where both the name and Email are an empty + # string. In addition, if we treated them differently we could end up with + # two rows: one where field X is NULL, and one where field X is an empty + # string. This is redundant, so we avoid storing such data. + def self.prepare(value) + value.present? ? value[0..511] : nil + end + + # Creates a new row, or returns an existing one if a row already exists. + def self.find_or_create(name, email) + find_or_create_by!(name: name, email: email) + rescue ActiveRecord::RecordNotUnique + retry + end + + # Finds many (name, email) pairs in bulk. + def self.bulk_find(pairs) + queries = {} + rows = [] + + pairs.each do |(name, email)| + queries[[name, email]] = where(name: name, email: email).to_sql + end + + # We may end up having to query many users. To ensure we don't hit any + # query size limits, we get a fixed number of users at a time. + queries.values.each_slice(1_000).map do |slice| + rows.concat(from("(#{slice.join("\nUNION ALL\n")}) #{table_name}").to_a) + end + + rows + end + + # Finds or creates rows for the given pairs of names and Emails. + # + # The `names_and_emails` argument must be an Array/Set of tuples like so: + # + # [ + # [name, email], + # [name, email], + # ... + # ] + # + # This method expects that the names and Emails have already been trimmed to + # at most 512 characters. + # + # The return value is a Hash that maps these tuples to instances of this + # model. + def self.bulk_find_or_create(pairs) + mapping = {} + create = [] + + # Over time, fewer new rows need to be created. We take advantage of that + # here by first finding all rows that already exist, using a limited number + # of queries (in most cases only one query will be needed). + bulk_find(pairs).each do |row| + mapping[[row.name, row.email]] = row + end + + pairs.each do |(name, email)| + create << { name: name, email: email } unless mapping[[name, email]] + end + + return mapping if create.empty? + + # Sometimes we may need to insert new users into the table. We do this in + # bulk, so we only need one INSERT for all missing users. + insert_all(create, returning: %w[id name email]).each do |row| + mapping[[row['name'], row['email']]] = + new(id: row['id'], name: row['name'], email: row['email']) + end + + # It's possible for (name, email) pairs to be inserted concurrently, + # resulting in the above insert not returning anything. Here we get any + # remaining users that were created concurrently. + bulk_find(pairs.reject { |pair| mapping.key?(pair) }).each do |row| + mapping[[row.name, row.email]] = row + end + + mapping + end +end diff --git a/app/models/merge_request_diff.rb b/app/models/merge_request_diff.rb index f58d7788432..d2ea663551d 100644 --- a/app/models/merge_request_diff.rb +++ b/app/models/merge_request_diff.rb @@ -701,7 +701,7 @@ class MergeRequestDiff < ApplicationRecord end def load_commits(limit: nil) - commits = merge_request_diff_commits.limit(limit) + commits = merge_request_diff_commits.with_users.limit(limit) .map { |commit| Commit.from_hash(commit.to_hash, project) } CommitCollection diff --git a/app/models/merge_request_diff_commit.rb b/app/models/merge_request_diff_commit.rb index ed398e0d2e0..466d28301c0 100644 --- a/app/models/merge_request_diff_commit.rb +++ b/app/models/merge_request_diff_commit.rb @@ -9,21 +9,51 @@ class MergeRequestDiffCommit < ApplicationRecord belongs_to :merge_request_diff + # This relation is called `commit_author` and not `author`, as the project + # import/export logic treats relations named `author` as instances of the + # `User` class. + # + # NOTE: these columns are _not_ indexed, nor do they use foreign keys. + # + # This is deliberate, as creating these indexes on GitLab.com takes a _very_ + # long time. In addition, there's no real need for them either based on how + # this data is used. + # + # For more information, refer to the following: + # + # - https://gitlab.com/gitlab-com/gl-infra/production/-/issues/5038#note_614592881 + # - https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63669 + belongs_to :commit_author, class_name: 'MergeRequest::DiffCommitUser' + belongs_to :committer, class_name: 'MergeRequest::DiffCommitUser' + sha_attribute :sha alias_attribute :id, :sha serialize :trailers, Serializers::Json # rubocop:disable Cop/ActiveRecordSerialize validates :trailers, json_schema: { filename: 'git_trailers' } + scope :with_users, -> { preload(:commit_author, :committer) } + + # A list of keys of which their values need to be trimmed before they can be + # inserted into the merge_request_diff_commit_users table. + TRIM_USER_KEYS = + %i[author_name author_email committer_name committer_email].freeze + # Deprecated; use `bulk_insert!` from `BulkInsertSafe` mixin instead. # cf. https://gitlab.com/gitlab-org/gitlab/issues/207989 for progress def self.create_bulk(merge_request_diff_id, commits) - rows = commits.map.with_index do |commit, index| - # See #parent_ids. - commit_hash = commit.to_hash.except(:parent_ids) + commit_hashes, user_tuples = prepare_commits_for_bulk_insert(commits) + users = MergeRequest::DiffCommitUser.bulk_find_or_create(user_tuples) + + rows = commit_hashes.map.with_index do |commit_hash, index| sha = commit_hash.delete(:id) + author = users[[commit_hash[:author_name], commit_hash[:author_email]]] + committer = + users[[commit_hash[:committer_name], commit_hash[:committer_email]]] commit_hash.merge( + commit_author_id: author&.id, + committer_id: committer&.id, merge_request_diff_id: merge_request_diff_id, relative_order: index, sha: Gitlab::Database::ShaAttribute.serialize(sha), # rubocop:disable Cop/ActiveRecordSerialize @@ -36,6 +66,24 @@ class MergeRequestDiffCommit < ApplicationRecord Gitlab::Database.bulk_insert(self.table_name, rows) # rubocop:disable Gitlab/BulkInsert end + def self.prepare_commits_for_bulk_insert(commits) + user_tuples = Set.new + hashes = commits.map do |commit| + hash = commit.to_hash.except(:parent_ids) + + TRIM_USER_KEYS.each do |key| + hash[key] = MergeRequest::DiffCommitUser.prepare(hash[key]) + end + + user_tuples << [hash[:author_name], hash[:author_email]] + user_tuples << [hash[:committer_name], hash[:committer_email]] + + hash + end + + [hashes, user_tuples] + end + def self.oldest_merge_request_id_per_commit(project_id, shas) # This method is defined here and not on MergeRequest, otherwise the SHA # values used in the WHERE below won't be encoded correctly. @@ -54,4 +102,20 @@ class MergeRequestDiffCommit < ApplicationRecord ) .group(:sha) end + + def author_name + commit_author_id ? commit_author.name : super + end + + def author_email + commit_author_id ? commit_author.email : super + end + + def committer_name + committer_id ? committer.name : super + end + + def committer_email + committer_id ? committer.email : super + end end diff --git a/app/models/milestone.rb b/app/models/milestone.rb index 9ed6c106e45..2168d57693e 100644 --- a/app/models/milestone.rb +++ b/app/models/milestone.rb @@ -120,6 +120,19 @@ class Milestone < ApplicationRecord sorted.with_order_id_desc end + def self.sort_with_expired_last(method) + # NOTE: this is a custom ordering of milestones + # to prioritize displaying non-expired milestones and milestones without due dates + sorted = reorder(Arel.sql("(CASE WHEN due_date IS NULL THEN 1 WHEN due_date >= CURRENT_DATE THEN 0 ELSE 2 END) ASC")) + sorted = if method.to_s == 'expired_last_due_date_desc' + sorted.order(due_date: :desc) + else + sorted.order(due_date: :asc) + end + + sorted.with_order_id_desc + end + def self.states_count(projects, groups = nil) return STATE_COUNT_HASH unless projects || groups diff --git a/app/models/namespace.rb b/app/models/namespace.rb index 5f41441058b..5524fec5324 100644 --- a/app/models/namespace.rb +++ b/app/models/namespace.rb @@ -116,6 +116,14 @@ class Namespace < ApplicationRecord ) end + scope :sorted_by_similarity_and_parent_id_desc, -> (search) do + order_expression = Gitlab::Database::SimilarityScore.build_expression(search: search, rules: [ + { column: arel_table["path"], multiplier: 1 }, + { column: arel_table["name"], multiplier: 0.7 } + ]) + reorder(order_expression.desc, Namespace.arel_table['parent_id'].desc.nulls_last, Namespace.arel_table['id'].desc) + end + # Make sure that the name is same as strong_memoize name in root_ancestor # method attr_writer :root_ancestor, :emails_disabled_memoized @@ -272,7 +280,7 @@ class Namespace < ApplicationRecord # that belongs to this namespace def all_projects if Feature.enabled?(:recursive_approach_for_all_projects, default_enabled: :yaml) - namespace = user? ? self : self_and_descendants + namespace = user? ? self : self_and_descendant_ids Project.where(namespace: namespace) else Project.inside_path(full_path) diff --git a/app/models/namespace_setting.rb b/app/models/namespace_setting.rb index 600abc33471..fc890bf687c 100644 --- a/app/models/namespace_setting.rb +++ b/app/models/namespace_setting.rb @@ -15,7 +15,7 @@ class NamespaceSetting < ApplicationRecord NAMESPACE_SETTINGS_PARAMS = [:default_branch_name, :delayed_project_removal, :lock_delayed_project_removal, :resource_access_token_creation_allowed, - :prevent_sharing_groups_outside_hierarchy].freeze + :prevent_sharing_groups_outside_hierarchy, :new_user_signups_cap].freeze self.primary_key = :namespace_id diff --git a/app/models/namespaces/traversal/linear.rb b/app/models/namespaces/traversal/linear.rb index d0281f4d974..3d78f384634 100644 --- a/app/models/namespaces/traversal/linear.rb +++ b/app/models/namespaces/traversal/linear.rb @@ -64,15 +64,28 @@ module Namespaces traversal_ids.present? end - def root_ancestor - return super if parent.nil? - return super unless persisted? + def use_traversal_ids_for_ancestors? + return false unless use_traversal_ids? + return false unless Feature.enabled?(:use_traversal_ids_for_ancestors, root_ancestor, default_enabled: :yaml) + + traversal_ids.present? + end + + def use_traversal_ids_for_root_ancestor? + return false unless Feature.enabled?(:use_traversal_ids_for_root_ancestor, default_enabled: :yaml) - return super if traversal_ids.blank? - return super unless Feature.enabled?(:use_traversal_ids_for_root_ancestor, default_enabled: :yaml) + traversal_ids.present? + end + + def root_ancestor + return super unless use_traversal_ids_for_root_ancestor? strong_memoize(:root_ancestor) do - Namespace.find_by(id: traversal_ids.first) + if parent.nil? + self + else + Namespace.find_by(id: traversal_ids.first) + end end end @@ -95,14 +108,33 @@ module Namespaces end def ancestors(hierarchy_order: nil) - return super() unless use_traversal_ids? - return super() unless Feature.enabled?(:use_traversal_ids_for_ancestors, root_ancestor, default_enabled: :yaml) + return super unless use_traversal_ids_for_ancestors? return self.class.none if parent_id.blank? lineage(bottom: parent, hierarchy_order: hierarchy_order) end + def ancestor_ids(hierarchy_order: nil) + return super unless use_traversal_ids_for_ancestors? + + hierarchy_order == :desc ? traversal_ids[0..-2] : traversal_ids[0..-2].reverse + end + + def self_and_ancestors(hierarchy_order: nil) + return super unless use_traversal_ids_for_ancestors? + + return self.class.where(id: id) if parent_id.blank? + + lineage(bottom: self, hierarchy_order: hierarchy_order) + end + + def self_and_ancestor_ids(hierarchy_order: nil) + return super unless use_traversal_ids_for_ancestors? + + hierarchy_order == :desc ? traversal_ids : traversal_ids.reverse + end + private # Update the traversal_ids for the full hierarchy. @@ -112,8 +144,7 @@ module Namespaces # Clear any previously memoized root_ancestor as our ancestors have changed. clear_memoization(:root_ancestor) - # We cannot rely on Namespaces::Traversal::Linear#root_ancestor because it might be stale - Namespace::TraversalHierarchy.for_namespace(recursive_root_ancestor).sync_traversal_ids! + Namespace::TraversalHierarchy.for_namespace(self).sync_traversal_ids! end # Lock the root of the hierarchy we just left, and lock the root of the hierarchy diff --git a/app/models/namespaces/traversal/recursive.rb b/app/models/namespaces/traversal/recursive.rb index 5a1a9d24117..d9e8743aa50 100644 --- a/app/models/namespaces/traversal/recursive.rb +++ b/app/models/namespaces/traversal/recursive.rb @@ -10,7 +10,7 @@ module Namespaces if persisted? strong_memoize(:root_ancestor) do - self_and_ancestors.reorder(nil).find_by(parent_id: nil) + recursive_self_and_ancestors.reorder(nil).find_by(parent_id: nil) end else parent.root_ancestor @@ -26,14 +26,19 @@ module Namespaces alias_method :recursive_self_and_hierarchy, :self_and_hierarchy # Returns all the ancestors of the current namespaces. - def ancestors + def ancestors(hierarchy_order: nil) return self.class.none unless parent_id object_hierarchy(self.class.where(id: parent_id)) - .base_and_ancestors + .base_and_ancestors(hierarchy_order: hierarchy_order) end alias_method :recursive_ancestors, :ancestors + def ancestor_ids(hierarchy_order: nil) + recursive_ancestors(hierarchy_order: hierarchy_order).pluck(:id) + end + alias_method :recursive_ancestor_ids, :ancestor_ids + # returns all ancestors upto but excluding the given namespace # when no namespace is given, all ancestors upto the top are returned def ancestors_upto(top = nil, hierarchy_order: nil) @@ -49,6 +54,11 @@ module Namespaces end alias_method :recursive_self_and_ancestors, :self_and_ancestors + def self_and_ancestor_ids(hierarchy_order: nil) + recursive_self_and_ancestors(hierarchy_order: hierarchy_order).pluck(:id) + end + alias_method :recursive_self_and_ancestor_ids, :self_and_ancestor_ids + # Returns all the descendants of the current namespace. def descendants object_hierarchy(self.class.where(parent_id: id)) @@ -63,12 +73,12 @@ module Namespaces alias_method :recursive_self_and_descendants, :self_and_descendants def self_and_descendant_ids - self_and_descendants.select(:id) + recursive_self_and_descendants.select(:id) end alias_method :recursive_self_and_descendant_ids, :self_and_descendant_ids def object_hierarchy(ancestors_base) - Gitlab::ObjectHierarchy.new(ancestors_base, options: { use_distinct: Feature.enabled?(:use_distinct_in_object_hierarchy, self) }) + Gitlab::ObjectHierarchy.new(ancestors_base) end end end diff --git a/app/models/note.rb b/app/models/note.rb index d1a59394ba1..ed341e58436 100644 --- a/app/models/note.rb +++ b/app/models/note.rb @@ -500,6 +500,13 @@ class Note < ApplicationRecord refs end + def bump_updated_at + # Instead of calling touch which is throttled via ThrottledTouch concern, + # we bump the updated_at column directly. This also prevents executing + # after_commit callbacks that we don't need. + update_column(:updated_at, Time.current) + end + def expire_etag_cache noteable&.expire_note_etag_cache end diff --git a/app/models/operations/feature_flag.rb b/app/models/operations/feature_flag.rb index 8b052f80395..450a5970ad8 100644 --- a/app/models/operations/feature_flag.rb +++ b/app/models/operations/feature_flag.rb @@ -80,7 +80,7 @@ module Operations end def link_reference_pattern - @link_reference_pattern ||= super("feature_flags", /(?<feature_flag>\d+)\/edit/) + @link_reference_pattern ||= super("feature_flags", %r{(?<feature_flag>\d+)/edit}) end def reference_postfix diff --git a/app/models/packages/debian.rb b/app/models/packages/debian.rb index f7f7f9f95e9..e20f1b8244a 100644 --- a/app/models/packages/debian.rb +++ b/app/models/packages/debian.rb @@ -2,6 +2,10 @@ module Packages module Debian + DISTRIBUTION_REGEX = %r{[a-z0-9][a-z0-9.-]*}i.freeze + COMPONENT_REGEX = DISTRIBUTION_REGEX.freeze + ARCHITECTURE_REGEX = %r{[a-z0-9][-a-z0-9]*}.freeze + def self.table_name_prefix 'packages_debian_' end diff --git a/app/models/packages/event.rb b/app/models/packages/event.rb index 98c9d5246db..a1eb7120117 100644 --- a/app/models/packages/event.rb +++ b/app/models/packages/event.rb @@ -3,7 +3,7 @@ class Packages::Event < ApplicationRecord belongs_to :package, optional: true - UNIQUE_EVENTS_ALLOWED = %i[push_package delete_package pull_package].freeze + UNIQUE_EVENTS_ALLOWED = %i[push_package delete_package pull_package pull_symbol_package push_symbol_package].freeze EVENT_SCOPES = ::Packages::Package.package_types.merge(container: 1000, tag: 1001).freeze EVENT_PREFIX = "i_package" @@ -21,7 +21,9 @@ class Packages::Event < ApplicationRecord delete_tag: 7, delete_tag_bulk: 8, list_tags: 9, - cli_metadata: 10 + cli_metadata: 10, + pull_symbol_package: 11, + push_symbol_package: 12 } enum originator_type: { user: 0, deploy_token: 1, guest: 2 } diff --git a/app/models/packages/go/module.rb b/app/models/packages/go/module.rb index 00d51c21881..a029437c82d 100644 --- a/app/models/packages/go/module.rb +++ b/app/models/packages/go/module.rb @@ -33,7 +33,7 @@ module Packages end def path_valid?(major) - m = /\/v(\d+)$/i.match(@name) + m = %r{/v(\d+)$}i.match(@name) case major when 0, 1 diff --git a/app/models/packages/helm.rb b/app/models/packages/helm.rb index e021b997bf5..b34ccf907dd 100644 --- a/app/models/packages/helm.rb +++ b/app/models/packages/helm.rb @@ -2,6 +2,8 @@ module Packages module Helm + TEMPORARY_PACKAGE_NAME = 'Helm.Temporary.Package' + def self.table_name_prefix 'packages_helm_' end diff --git a/app/models/packages/nuget.rb b/app/models/packages/nuget.rb index f152eedb8fc..6bedd488c8a 100644 --- a/app/models/packages/nuget.rb +++ b/app/models/packages/nuget.rb @@ -2,6 +2,7 @@ module Packages module Nuget TEMPORARY_PACKAGE_NAME = 'NuGet.Temporary.Package' + TEMPORARY_SYMBOL_PACKAGE_NAME = 'NuGet.Temporary.SymbolPackage' def self.table_name_prefix 'packages_nuget_' diff --git a/app/models/packages/package.rb b/app/models/packages/package.rb index b040c98ef09..d2e4f46898c 100644 --- a/app/models/packages/package.rb +++ b/app/models/packages/package.rb @@ -158,8 +158,6 @@ class Packages::Package < ApplicationRecord joins(:project).reorder(keyset_order) end - after_commit :update_composer_cache, on: :destroy, if: -> { composer? && Feature.disabled?(:disable_composer_callback) } - def self.only_maven_packages_with_path(path, use_cte: false) if use_cte # This is an optimization fence which assumes that looking up the Metadatum record by path (globally) @@ -295,12 +293,6 @@ class Packages::Package < ApplicationRecord private - def update_composer_cache - return unless composer? - - ::Packages::Composer::CacheUpdateWorker.perform_async(project_id, name, composer_metadatum.version_cache_sha) # rubocop:disable CodeReuse/Worker - end - def composer_tag_version? composer? && !Gitlab::Regex.composer_dev_version_regex.match(version.to_s) end diff --git a/app/models/packages/package_file.rb b/app/models/packages/package_file.rb index 3ef30c035e8..799242a639a 100644 --- a/app/models/packages/package_file.rb +++ b/app/models/packages/package_file.rb @@ -27,10 +27,12 @@ class Packages::PackageFile < ApplicationRecord validates :file_name, uniqueness: { scope: :package }, if: -> { package&.pypi? } scope :recent, -> { order(id: :desc) } + scope :limit_recent, ->(limit) { recent.limit(limit) } scope :for_package_ids, ->(ids) { where(package_id: ids) } scope :with_file_name, ->(file_name) { where(file_name: file_name) } scope :with_file_name_like, ->(file_name) { where(arel_table[:file_name].matches(file_name)) } scope :with_files_stored_locally, -> { where(file_store: ::Packages::PackageFileUploader::Store::LOCAL) } + scope :with_format, ->(format) { where(::Packages::PackageFile.arel_table[:file_name].matches("%.#{format}")) } scope :preload_conan_file_metadata, -> { preload(:conan_file_metadatum) } scope :preload_debian_file_metadata, -> { preload(:debian_file_metadatum) } scope :preload_helm_file_metadata, -> { preload(:helm_file_metadatum) } diff --git a/app/models/plan.rb b/app/models/plan.rb index f3ef04315f8..e16ecb4c629 100644 --- a/app/models/plan.rb +++ b/app/models/plan.rb @@ -14,7 +14,7 @@ class Plan < ApplicationRecord Gitlab::SafeRequestStore.fetch(:plan_default) do # find_by allows us to find object (cheaply) against replica DB # safe_find_or_create_by does stick to primary DB - find_by(name: DEFAULT) || safe_find_or_create_by(name: DEFAULT) + find_by(name: DEFAULT) || safe_find_or_create_by(name: DEFAULT) { |plan| plan.title = DEFAULT.titleize } end end diff --git a/app/models/plan_limits.rb b/app/models/plan_limits.rb index f17078c0cab..bf08da6a1e1 100644 --- a/app/models/plan_limits.rb +++ b/app/models/plan_limits.rb @@ -1,6 +1,10 @@ # frozen_string_literal: true class PlanLimits < ApplicationRecord + include IgnorableColumns + + ignore_column :ci_max_artifact_size_running_container_scanning, remove_with: '14.3', remove_after: '2021-08-22' + LimitUndefinedError = Class.new(StandardError) belongs_to :plan diff --git a/app/models/project.rb b/app/models/project.rb index 95ba0973321..9e6e29aadda 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -147,11 +147,7 @@ class Project < ApplicationRecord has_many :boards def self.integration_association_name(name) - if ::Integration.renamed?(name) - "#{name}_integration" - else - "#{name}_service" - end + "#{name}_integration" end # Project integrations @@ -172,25 +168,25 @@ class Project < ApplicationRecord has_one :flowdock_integration, class_name: 'Integrations::Flowdock' has_one :hangouts_chat_integration, class_name: 'Integrations::HangoutsChat' has_one :irker_integration, class_name: 'Integrations::Irker' - has_one :jenkins_service, class_name: 'Integrations::Jenkins' - has_one :jira_service, class_name: 'Integrations::Jira' - has_one :mattermost_service, class_name: 'Integrations::Mattermost' - has_one :mattermost_slash_commands_service, class_name: 'Integrations::MattermostSlashCommands' - has_one :microsoft_teams_service, class_name: 'Integrations::MicrosoftTeams' - has_one :mock_ci_service, class_name: 'Integrations::MockCi' - has_one :packagist_service, class_name: 'Integrations::Packagist' - has_one :pipelines_email_service, class_name: 'Integrations::PipelinesEmail' - has_one :pivotaltracker_service, class_name: 'Integrations::Pivotaltracker' - has_one :pushover_service, class_name: 'Integrations::Pushover' - has_one :redmine_service, class_name: 'Integrations::Redmine' - has_one :slack_service, class_name: 'Integrations::Slack' - has_one :slack_slash_commands_service, class_name: 'Integrations::SlackSlashCommands' - has_one :teamcity_service, class_name: 'Integrations::Teamcity' - has_one :unify_circuit_service, class_name: 'Integrations::UnifyCircuit' - has_one :webex_teams_service, class_name: 'Integrations::WebexTeams' - has_one :youtrack_service, class_name: 'Integrations::Youtrack' - has_one :prometheus_service, inverse_of: :project - has_one :mock_monitoring_service + has_one :jenkins_integration, class_name: 'Integrations::Jenkins' + has_one :jira_integration, class_name: 'Integrations::Jira' + has_one :mattermost_integration, class_name: 'Integrations::Mattermost' + has_one :mattermost_slash_commands_integration, class_name: 'Integrations::MattermostSlashCommands' + has_one :microsoft_teams_integration, class_name: 'Integrations::MicrosoftTeams' + has_one :mock_ci_integration, class_name: 'Integrations::MockCi' + has_one :mock_monitoring_integration, class_name: 'Integrations::MockMonitoring' + has_one :packagist_integration, class_name: 'Integrations::Packagist' + has_one :pipelines_email_integration, class_name: 'Integrations::PipelinesEmail' + has_one :pivotaltracker_integration, class_name: 'Integrations::Pivotaltracker' + has_one :prometheus_integration, class_name: 'Integrations::Prometheus', inverse_of: :project + has_one :pushover_integration, class_name: 'Integrations::Pushover' + has_one :redmine_integration, class_name: 'Integrations::Redmine' + has_one :slack_integration, class_name: 'Integrations::Slack' + has_one :slack_slash_commands_integration, class_name: 'Integrations::SlackSlashCommands' + has_one :teamcity_integration, class_name: 'Integrations::Teamcity' + has_one :unify_circuit_integration, class_name: 'Integrations::UnifyCircuit' + has_one :webex_teams_integration, class_name: 'Integrations::WebexTeams' + has_one :youtrack_integration, class_name: 'Integrations::Youtrack' has_one :root_of_fork_network, foreign_key: 'root_project_id', @@ -381,6 +377,8 @@ class Project < ApplicationRecord has_one :operations_feature_flags_client, class_name: 'Operations::FeatureFlagsClient' has_many :operations_feature_flags_user_lists, class_name: 'Operations::FeatureFlags::UserList' + has_many :error_tracking_errors, inverse_of: :project, class_name: 'ErrorTracking::Error' + has_many :timelogs accepts_nested_attributes_for :variables, allow_destroy: true @@ -400,7 +398,7 @@ class Project < ApplicationRecord accepts_nested_attributes_for :error_tracking_setting, update_only: true accepts_nested_attributes_for :metrics_setting, update_only: true, allow_destroy: true accepts_nested_attributes_for :grafana_integration, update_only: true, allow_destroy: true - accepts_nested_attributes_for :prometheus_service, update_only: true + accepts_nested_attributes_for :prometheus_integration, update_only: true accepts_nested_attributes_for :alerting_setting, update_only: true delegate :feature_available?, :builds_enabled?, :wiki_enabled?, @@ -410,36 +408,37 @@ class Project < ApplicationRecord :wiki_access_level, :snippets_access_level, :builds_access_level, :repository_access_level, :pages_access_level, :metrics_dashboard_access_level, :analytics_access_level, :operations_enabled?, :operations_access_level, :security_and_compliance_access_level, - :container_registry_access_level, + :container_registry_access_level, :container_registry_enabled?, to: :project_feature, allow_nil: true + alias_method :container_registry_enabled, :container_registry_enabled? delegate :show_default_award_emojis, :show_default_award_emojis=, :show_default_award_emojis?, to: :project_setting, allow_nil: true delegate :scheduled?, :started?, :in_progress?, :failed?, :finished?, prefix: :import, to: :import_state, allow_nil: true delegate :squash_always?, :squash_never?, :squash_enabled_by_default?, :squash_readonly?, to: :project_setting - delegate :squash_option, to: :project_setting + delegate :squash_option, :squash_option=, to: :project_setting + delegate :previous_default_branch, :previous_default_branch=, to: :project_setting delegate :no_import?, to: :import_state, allow_nil: true delegate :name, to: :owner, allow_nil: true, prefix: true delegate :members, to: :team, prefix: true delegate :add_user, :add_users, to: :team delegate :add_guest, :add_reporter, :add_developer, :add_maintainer, :add_role, to: :team - delegate :group_runners_enabled, :group_runners_enabled=, :group_runners_enabled?, to: :ci_cd_settings + delegate :group_runners_enabled, :group_runners_enabled=, to: :ci_cd_settings, allow_nil: true delegate :root_ancestor, to: :namespace, allow_nil: true delegate :last_pipeline, to: :commit, allow_nil: true delegate :external_dashboard_url, to: :metrics_setting, allow_nil: true, prefix: true delegate :dashboard_timezone, to: :metrics_setting, allow_nil: true, prefix: true delegate :default_git_depth, :default_git_depth=, to: :ci_cd_settings, prefix: :ci, allow_nil: true - delegate :forward_deployment_enabled, :forward_deployment_enabled=, :forward_deployment_enabled?, to: :ci_cd_settings, prefix: :ci, allow_nil: true - delegate :job_token_scope_enabled, :job_token_scope_enabled=, :job_token_scope_enabled?, to: :ci_cd_settings, prefix: :ci - delegate :keep_latest_artifact, :keep_latest_artifact=, :keep_latest_artifact?, :keep_latest_artifacts_available?, to: :ci_cd_settings, allow_nil: true - delegate :restrict_user_defined_variables, :restrict_user_defined_variables=, :restrict_user_defined_variables?, - to: :ci_cd_settings, allow_nil: true + delegate :forward_deployment_enabled, :forward_deployment_enabled=, to: :ci_cd_settings, prefix: :ci, allow_nil: true + delegate :job_token_scope_enabled, :job_token_scope_enabled=, to: :ci_cd_settings, prefix: :ci, allow_nil: true + delegate :keep_latest_artifact, :keep_latest_artifact=, to: :ci_cd_settings, allow_nil: true + delegate :restrict_user_defined_variables, :restrict_user_defined_variables=, to: :ci_cd_settings, allow_nil: true delegate :actual_limits, :actual_plan_name, to: :namespace, allow_nil: true delegate :allow_merge_on_skipped_pipeline, :allow_merge_on_skipped_pipeline?, :allow_merge_on_skipped_pipeline=, :has_confluence?, :allow_editing_commit_messages?, to: :project_setting - delegate :active?, to: :prometheus_service, allow_nil: true, prefix: true + delegate :active?, to: :prometheus_integration, allow_nil: true, prefix: true delegate :log_jira_dvcs_integration_usage, :jira_dvcs_server_last_sync_at, :jira_dvcs_cloud_last_sync_at, to: :feature_usage @@ -542,7 +541,7 @@ class Project < ApplicationRecord scope :for_milestones, ->(ids) { joins(:milestones).where('milestones.id' => ids).distinct } scope :with_push, -> { joins(:events).merge(Event.pushed_action) } scope :with_project_feature, -> { joins('LEFT JOIN project_features ON projects.id = project_features.project_id') } - scope :with_active_jira_services, -> { joins(:integrations).merge(::Integrations::Jira.active) } + scope :with_active_jira_integrations, -> { joins(:integrations).merge(::Integrations::Jira.active) } scope :with_jira_dvcs_cloud, -> { joins(:feature_usage).merge(ProjectFeatureUsage.with_jira_dvcs_integration_enabled(cloud: true)) } scope :with_jira_dvcs_server, -> { joins(:feature_usage).merge(ProjectFeatureUsage.with_jira_dvcs_integration_enabled(cloud: false)) } scope :inc_routes, -> { includes(:route, namespace: :route) } @@ -550,9 +549,8 @@ class Project < ApplicationRecord scope :with_namespace, -> { includes(:namespace) } scope :with_import_state, -> { includes(:import_state) } scope :include_project_feature, -> { includes(:project_feature) } - scope :with_service, ->(service) { joins(service).eager_load(service) } + scope :with_integration, ->(integration) { joins(integration).eager_load(integration) } scope :with_shared_runners, -> { where(shared_runners_enabled: true) } - scope :with_container_registry, -> { where(container_registry_enabled: true) } scope :inside_path, ->(path) do # We need routes alias rs for JOIN so it does not conflict with # includes(:route) which we use in ProjectsFinder. @@ -1398,24 +1396,22 @@ class Project < ApplicationRecord @external_wiki ||= integrations.external_wikis.first end - def find_or_initialize_services - available_services_names = Integration.available_services_names - disabled_services - - available_services_names.map do |service_name| - find_or_initialize_service(service_name) - end.sort_by(&:title) + def find_or_initialize_integrations + Integration + .available_integration_names + .difference(disabled_integrations) + .map { find_or_initialize_integration(_1) } + .sort_by(&:title) end - def disabled_services - return %w[datadog] unless Feature.enabled?(:datadog_ci_integration, self) - + def disabled_integrations [] end - def find_or_initialize_service(name) - return if disabled_services.include?(name) + def find_or_initialize_integration(name) + return if disabled_integrations.include?(name) - find_service(integrations, name) || build_from_instance_or_template(name) || build_service(name) + find_integration(integrations, name) || build_from_instance_or_template(name) || build_integration(name) end # rubocop: disable CodeReuse/ServiceClass @@ -1428,20 +1424,12 @@ class Project < ApplicationRecord end # rubocop: enable CodeReuse/ServiceClass - def ci_services + def ci_integrations integrations.where(category: :ci) end - def ci_service - @ci_service ||= ci_services.reorder(nil).find_by(active: true) - end - - def monitoring_services - integrations.where(category: :monitoring) - end - - def monitoring_service - @monitoring_service ||= monitoring_services.reorder(nil).find_by(active: true) + def ci_integration + @ci_integration ||= ci_integrations.reorder(nil).find_by(active: true) end def avatar_in_git @@ -1512,7 +1500,7 @@ class Project < ApplicationRecord end # rubocop: enable CodeReuse/ServiceClass - def execute_services(data, hooks_scope = :push_hooks) + def execute_integrations(data, hooks_scope = :push_hooks) # Call only service hooks that are active for this scope run_after_commit_or_now do integrations.public_send(hooks_scope).each do |integration| # rubocop:disable GitlabSecurity/PublicSend @@ -1525,7 +1513,7 @@ class Project < ApplicationRecord hooks.hooks_for(hooks_scope).any? || SystemHook.hooks_for(hooks_scope).any? || Gitlab::FileHook.any? end - def has_active_services?(hooks_scope = :push_hooks) + def has_active_integrations?(hooks_scope = :push_hooks) integrations.public_send(hooks_scope).any? # rubocop:disable GitlabSecurity/PublicSend end @@ -2629,14 +2617,41 @@ class Project < ApplicationRecord !!read_attribute(:merge_requests_author_approval) end - def container_registry_enabled - if Feature.enabled?(:read_container_registry_access_level, self.namespace, default_enabled: :yaml) - project_feature.container_registry_enabled? - else - read_attribute(:container_registry_enabled) - end + def ci_forward_deployment_enabled? + return false unless ci_cd_settings + + ci_cd_settings.forward_deployment_enabled? + end + + def ci_job_token_scope_enabled? + return false unless ci_cd_settings + + ci_cd_settings.job_token_scope_enabled? + end + + def restrict_user_defined_variables? + return false unless ci_cd_settings + + ci_cd_settings.restrict_user_defined_variables? + end + + def keep_latest_artifacts_available? + return false unless ci_cd_settings + + ci_cd_settings.keep_latest_artifacts_available? + end + + def keep_latest_artifact? + return false unless ci_cd_settings + + ci_cd_settings.keep_latest_artifact? + end + + def group_runners_enabled? + return false unless ci_cd_settings + + ci_cd_settings.group_runners_enabled? end - alias_method :container_registry_enabled?, :container_registry_enabled private @@ -2654,28 +2669,28 @@ class Project < ApplicationRecord project_feature.update!(container_registry_access_level: access_level) end - def find_service(services, name) - services.find { |service| service.to_param == name } + def find_integration(integrations, name) + integrations.find { _1.to_param == name } end def build_from_instance_or_template(name) - instance = find_service(services_instances, name) + instance = find_integration(integration_instances, name) return Integration.build_from_integration(instance, project_id: id) if instance - template = find_service(services_templates, name) + template = find_integration(integration_templates, name) return Integration.build_from_integration(template, project_id: id) if template end - def build_service(name) + def build_integration(name) Integration.integration_name_to_model(name).new(project_id: id) end - def services_templates - @services_templates ||= Integration.for_template + def integration_templates + @integration_templates ||= Integration.for_template end - def services_instances - @services_instances ||= Integration.for_instance + def integration_instances + @integration_instances ||= Integration.for_instance end def closest_namespace_setting(name) diff --git a/app/models/project_ci_cd_setting.rb b/app/models/project_ci_cd_setting.rb index b025326c6f8..c0c2ea42d46 100644 --- a/app/models/project_ci_cd_setting.rb +++ b/app/models/project_ci_cd_setting.rb @@ -16,7 +16,6 @@ class ProjectCiCdSetting < ApplicationRecord allow_nil: true default_value_for :forward_deployment_enabled, true - default_value_for :job_token_scope_enabled, true def forward_deployment_enabled? super && ::Feature.enabled?(:forward_deployment_enabled, project, default_enabled: true) diff --git a/app/models/project_services/mock_monitoring_service.rb b/app/models/project_services/mock_monitoring_service.rb deleted file mode 100644 index 25ae0f6b60d..00000000000 --- a/app/models/project_services/mock_monitoring_service.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -class MockMonitoringService < MonitoringService - def title - 'Mock monitoring' - end - - def description - 'Mock monitoring service' - end - - def self.to_param - 'mock_monitoring' - end - - def metrics(environment) - Gitlab::Json.parse(File.read(Rails.root + 'spec/fixtures/metrics.json')) - end - - def can_test? - false - end -end diff --git a/app/models/project_services/monitoring_service.rb b/app/models/project_services/monitoring_service.rb deleted file mode 100644 index ea65a200027..00000000000 --- a/app/models/project_services/monitoring_service.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -# Base class for monitoring services -# -# These services integrate with a deployment solution like Prometheus -# to provide additional features for environments. -class MonitoringService < Integration - default_value_for :category, 'monitoring' - - def self.supported_events - %w() - end - - def can_query? - raise NotImplementedError - end - - def query(_, *_) - raise NotImplementedError - end -end diff --git a/app/models/project_services/prometheus_service.rb b/app/models/project_services/prometheus_service.rb deleted file mode 100644 index a289c1c2afb..00000000000 --- a/app/models/project_services/prometheus_service.rb +++ /dev/null @@ -1,203 +0,0 @@ -# frozen_string_literal: true - -class PrometheusService < MonitoringService - include PrometheusAdapter - - # Access to prometheus is directly through the API - prop_accessor :api_url - prop_accessor :google_iap_service_account_json - prop_accessor :google_iap_audience_client_id - boolean_accessor :manual_configuration - - # We need to allow the self-monitoring project to connect to the internal - # Prometheus instance. - # Since the internal Prometheus instance is usually a localhost URL, we need - # to allow localhost URLs when the following conditions are true: - # 1. project is the self-monitoring project. - # 2. api_url is the internal Prometheus URL. - with_options presence: true do - validates :api_url, public_url: true, if: ->(object) { object.manual_configuration? && !object.allow_local_api_url? } - validates :api_url, url: true, if: ->(object) { object.manual_configuration? && object.allow_local_api_url? } - end - - before_save :synchronize_service_state - - after_save :clear_reactive_cache! - - after_commit :track_events - - after_create_commit :create_default_alerts - - scope :preload_project, -> { preload(:project) } - scope :with_clusters_with_cilium, -> { joins(project: [:clusters]).merge(Clusters::Cluster.with_available_cilium) } - - def initialize_properties - if properties.nil? - self.properties = {} - end - end - - def show_active_box? - false - end - - def title - 'Prometheus' - end - - def description - s_('PrometheusService|Monitor application health with Prometheus metrics and dashboards') - end - - def self.to_param - 'prometheus' - end - - def fields - [ - { - type: 'checkbox', - name: 'manual_configuration', - title: s_('PrometheusService|Active'), - help: s_('PrometheusService|Select this checkbox to override the auto configuration settings with your own settings.'), - required: true - }, - { - type: 'text', - name: 'api_url', - title: 'API URL', - placeholder: s_('PrometheusService|https://prometheus.example.com/'), - help: s_('PrometheusService|The Prometheus API base URL.'), - required: true - }, - { - type: 'text', - name: 'google_iap_audience_client_id', - title: 'Google IAP Audience Client ID', - placeholder: s_('PrometheusService|IAP_CLIENT_ID.apps.googleusercontent.com'), - help: s_('PrometheusService|PrometheusService|The ID of the IAP-secured resource.'), - autocomplete: 'off', - required: false - }, - { - type: 'textarea', - name: 'google_iap_service_account_json', - title: 'Google IAP Service Account JSON', - placeholder: s_('PrometheusService|{ "type": "service_account", "project_id": ... }'), - help: s_('PrometheusService|The contents of the credentials.json file of your service account.'), - required: false - } - ] - end - - # Check we can connect to the Prometheus API - def test(*args) - prometheus_client.ping - { success: true, result: 'Checked API endpoint' } - rescue Gitlab::PrometheusClient::Error => err - { success: false, result: err } - end - - def prometheus_client - return unless should_return_client? - - options = prometheus_client_default_options.merge( - allow_local_requests: allow_local_api_url? - ) - - if behind_iap? - # Adds the Authorization header - options[:headers] = iap_client.apply({}) - end - - Gitlab::PrometheusClient.new(api_url, options) - end - - def prometheus_available? - return false if template? - return false unless project - - project.all_clusters.enabled.eager_load(:integration_prometheus).any? do |cluster| - cluster.integration_prometheus_available? - end - end - - def allow_local_api_url? - allow_local_requests_from_web_hooks_and_services? || - (self_monitoring_project? && internal_prometheus_url?) - end - - def configured? - should_return_client? - end - - private - - def self_monitoring_project? - project && project.id == current_settings.self_monitoring_project_id - end - - def internal_prometheus_url? - api_url.present? && api_url == ::Gitlab::Prometheus::Internal.uri - end - - def allow_local_requests_from_web_hooks_and_services? - current_settings.allow_local_requests_from_web_hooks_and_services? - end - - def should_return_client? - api_url.present? && manual_configuration? && active? && valid? - end - - def current_settings - Gitlab::CurrentSettings.current_application_settings - end - - def synchronize_service_state - self.active = prometheus_available? || manual_configuration? - - true - end - - def track_events - if enabled_manual_prometheus? - Gitlab::Tracking.event('cluster:services:prometheus', 'enabled_manual_prometheus') - elsif disabled_manual_prometheus? - Gitlab::Tracking.event('cluster:services:prometheus', 'disabled_manual_prometheus') - end - - true - end - - def enabled_manual_prometheus? - manual_configuration_changed? && manual_configuration? - end - - def disabled_manual_prometheus? - manual_configuration_changed? && !manual_configuration? - end - - def create_default_alerts - return unless project_id - - Prometheus::CreateDefaultAlertsWorker.perform_async(project_id) - end - - def behind_iap? - manual_configuration? && google_iap_audience_client_id.present? && google_iap_service_account_json.present? - end - - def clean_google_iap_service_account - return unless google_iap_service_account_json - - google_iap_service_account_json - .then { |json| Gitlab::Json.parse(json) } - .except('token_credential_uri') - end - - def iap_client - @iap_client ||= Google::Auth::Credentials - .new(clean_google_iap_service_account, target_audience: google_iap_audience_client_id) - .client - end -end diff --git a/app/models/project_team.rb b/app/models/project_team.rb index a85afada901..4586aa2b4b4 100644 --- a/app/models/project_team.rb +++ b/app/models/project_team.rb @@ -42,7 +42,7 @@ class ProjectTeam end def add_users(users, access_level, current_user: nil, expires_at: nil) - ProjectMember.add_users( + Members::Projects::CreatorService.add_users( # rubocop:todo CodeReuse/ServiceClass project, users, access_level, @@ -52,13 +52,12 @@ class ProjectTeam end def add_user(user, access_level, current_user: nil, expires_at: nil) - ProjectMember.add_user( - project, - user, - access_level, - current_user: current_user, - expires_at: expires_at - ) + Members::Projects::CreatorService.new(project, # rubocop:todo CodeReuse/ServiceClass + user, + access_level, + current_user: current_user, + expires_at: expires_at) + .execute end # Remove all users from project team diff --git a/app/models/repository_language.rb b/app/models/repository_language.rb index b7a96211fb1..2816aa4cc5b 100644 --- a/app/models/repository_language.rb +++ b/app/models/repository_language.rb @@ -8,6 +8,10 @@ class RepositoryLanguage < ApplicationRecord default_scope { includes(:programming_language) } # rubocop:disable Cop/DefaultScope + scope :with_programming_language, ->(name) do + joins(:programming_language).merge(ProgrammingLanguage.with_name_case_insensitive(name)) + end + validates :project, presence: true validates :share, inclusion: { in: 0..100, message: "The share of a language is between 0 and 100" } validates :programming_language, uniqueness: { scope: :project_id } diff --git a/app/models/service_desk_setting.rb b/app/models/service_desk_setting.rb index c5203354b9d..1c854cc9941 100644 --- a/app/models/service_desk_setting.rb +++ b/app/models/service_desk_setting.rb @@ -8,7 +8,10 @@ class ServiceDeskSetting < ApplicationRecord validate :valid_issue_template validate :valid_project_key validates :outgoing_name, length: { maximum: 255 }, allow_blank: true - validates :project_key, length: { maximum: 255 }, allow_blank: true, format: { with: /\A[a-z0-9_]+\z/ } + validates :project_key, + length: { maximum: 255 }, + allow_blank: true, + format: { with: /\A[a-z0-9_]+\z/, message: -> (setting, data) { _("can contain only lowercase letters, digits, and '_'.") } } scope :with_project_key, ->(key) { where(project_key: key) } diff --git a/app/models/timelog.rb b/app/models/timelog.rb index 96fd485b797..3f0e827cf61 100644 --- a/app/models/timelog.rb +++ b/app/models/timelog.rb @@ -6,6 +6,7 @@ class Timelog < ApplicationRecord before_save :set_project validates :time_spent, :user, presence: true + validates :summary, length: { maximum: 255 } validate :issuable_id_is_present, unless: :importing? belongs_to :issue, touch: true diff --git a/app/models/user.rb b/app/models/user.rb index ce702131151..80b8c9173d1 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -375,6 +375,10 @@ class User < ApplicationRecord Ci::DropPipelineService.new.execute_async_for_all(user.pipelines, :user_blocked, user) Ci::DisableUserPipelineSchedulesService.new.execute(user) end + + after_transition any => :deactivated do |user| + NotificationService.new.user_deactivated(user.name, user.notification_email) + end # rubocop: enable CodeReuse/ServiceClass end @@ -430,6 +434,7 @@ class User < ApplicationRecord scope :by_id_and_login, ->(id, login) { where(id: id).where('username = LOWER(:login) OR email = LOWER(:login)', login: login) } scope :dormant, -> { active.where('last_activity_on <= ?', MINIMUM_INACTIVE_DAYS.day.ago.to_date) } scope :with_no_activity, -> { active.where(last_activity_on: nil) } + scope :by_provider_and_extern_uid, ->(provider, extern_uid) { joins(:identities).merge(Identity.with_extern_uid(provider, extern_uid)) } def preferred_language read_attribute('preferred_language') || @@ -554,10 +559,6 @@ class User < ApplicationRecord end end - def for_github_id(id) - joins(:identities).merge(Identity.with_extern_uid(:github, id)) - end - # Find a User by their primary email or any associated secondary email def find_by_any_email(email, confirmed: false) return unless email @@ -809,6 +810,10 @@ class User < ApplicationRecord # Instance methods # + def default_dashboard? + dashboard == self.class.column_defaults['dashboard'] + end + def full_path username end @@ -1231,7 +1236,7 @@ class User < ApplicationRecord end def matches_identity?(provider, extern_uid) - identities.where(provider: provider, extern_uid: extern_uid).exists? + identities.with_extern_uid(provider, extern_uid).exists? end def project_deploy_keys @@ -1300,6 +1305,10 @@ class User < ApplicationRecord save if notification_email_changed? || public_email_changed? || commit_email_changed? end + def admin_unsubscribe! + update_column :admin_email_unsubscribed_at, Time.current + end + def set_projects_limit # `User.select(:id)` raises # `ActiveModel::MissingAttributeError: missing attribute: projects_limit` @@ -1882,9 +1891,11 @@ class User < ApplicationRecord end def password_expired_if_applicable? + return false if bot? + return false unless password_expired? && password_automatically_set? return false unless allow_password_authentication? - password_expired? + true end def can_be_deactivated? diff --git a/app/models/user_callout.rb b/app/models/user_callout.rb index 2e8ff1b7b49..854992dcd1e 100644 --- a/app/models/user_callout.rb +++ b/app/models/user_callout.rb @@ -31,7 +31,11 @@ class UserCallout < ApplicationRecord pipeline_needs_banner: 29, pipeline_needs_hover_tip: 30, web_ide_ci_environments_guidance: 31, - security_configuration_upgrade_banner: 32 + security_configuration_upgrade_banner: 32, + cloud_licensing_subscription_activation_banner: 33, # EE-only + trial_status_reminder_d14: 34, # EE-only + trial_status_reminder_d3: 35, # EE-only + security_configuration_devops_alert: 36 # EE-only } validates :user, presence: true diff --git a/app/models/wiki.rb b/app/models/wiki.rb index 7fc01f373c8..e114e30d589 100644 --- a/app/models/wiki.rb +++ b/app/models/wiki.rb @@ -7,6 +7,8 @@ class Wiki include Gitlab::Utils::StrongMemoize include GlobalID::Identification + extend ActiveModel::Naming + MARKUPS = { # rubocop:disable Style/MultilineIfModifier 'Markdown' => :markdown, 'RDoc' => :rdoc, @@ -86,6 +88,7 @@ class Wiki def create_wiki_repository repository.create_if_not_exists + change_head_to_default_branch raise CouldNotCreateWikiError unless repository_exists? rescue StandardError => err @@ -172,6 +175,7 @@ class Wiki commit = commit_details(:created, message, title) wiki.write_page(title, format.to_sym, content, commit) + repository.expire_status_cache if repository.empty? after_wiki_activity true @@ -246,7 +250,7 @@ class Wiki override :default_branch def default_branch - wiki.class.default_ref + super || Gitlab::Git::Wiki.default_ref(container) end def wiki_base_path @@ -273,6 +277,19 @@ class Wiki @repository = nil end + def capture_git_error(action, &block) + yield block + rescue Gitlab::Git::Index::IndexError, + Gitlab::Git::CommitError, + Gitlab::Git::PreReceiveError, + Gitlab::Git::CommandError, + ArgumentError => error + + Gitlab::ErrorTracking.log_exception(error, action: action, wiki_id: id) + + false + end + private def multi_commit_options(action, message = nil, title = nil) @@ -306,17 +323,14 @@ class Wiki "#{user.username} #{action} page: #{title}" end - def capture_git_error(action, &block) - yield block - rescue Gitlab::Git::Index::IndexError, - Gitlab::Git::CommitError, - Gitlab::Git::PreReceiveError, - Gitlab::Git::CommandError, - ArgumentError => error - - Gitlab::ErrorTracking.log_exception(error, action: action, wiki_id: id) + def change_head_to_default_branch + # If the wiki has commits in the 'HEAD' branch means that the current + # HEAD is pointing to the right branch. If not, it could mean that either + # the repo has just been created or that 'HEAD' is pointing + # to the wrong branch and we need to rewrite it + return if repository.raw_repository.commit_count('HEAD') != 0 - false + repository.raw_repository.write_ref('HEAD', "refs/heads/#{default_branch}") end end diff --git a/app/models/wiki_page.rb b/app/models/wiki_page.rb index 9ae5a870323..25438581f2f 100644 --- a/app/models/wiki_page.rb +++ b/app/models/wiki_page.rb @@ -40,7 +40,6 @@ class WikiPage end validates :title, presence: true - validates :content, presence: true validate :validate_path_limits, if: :title_changed? validate :validate_content_size_limit, if: :content_changed? |