diff options
Diffstat (limited to 'app/models')
113 files changed, 1507 insertions, 712 deletions
diff --git a/app/models/alert_management/alert.rb b/app/models/alert_management/alert.rb index fb166fb56b7..75581805b49 100644 --- a/app/models/alert_management/alert.rb +++ b/app/models/alert_management/alert.rb @@ -118,6 +118,7 @@ module AlertManagement end delegate :iid, to: :issue, prefix: true, allow_nil: true + delegate :metrics_dashboard_url, :runbook, :details_url, to: :present scope :for_iid, -> (iid) { where(iid: iid) } scope :for_status, -> (status) { where(status: status) } @@ -136,6 +137,7 @@ module AlertManagement # Descending sort order sorts severity from more critical to less critical. # https://gitlab.com/gitlab-org/gitlab/-/issues/221242#what-is-the-expected-correct-behavior scope :order_severity, -> (sort_order) { order(severity: sort_order == :asc ? :desc : :asc) } + scope :order_severity_with_open_prometheus_alert, -> { open.with_prometheus_alert.order(severity: :asc, started_at: :desc) } # Ascending sort order sorts statuses: Ignored > Resolved > Acknowledged > Triggered # Descending sort order sorts statuses: Triggered > Acknowledged > Resolved > Ignored diff --git a/app/models/application_record.rb b/app/models/application_record.rb index 9ec407a10a4..91b8bfedcbb 100644 --- a/app/models/application_record.rb +++ b/app/models/application_record.rb @@ -42,15 +42,15 @@ class ApplicationRecord < ActiveRecord::Base limit(count) end - def self.safe_find_or_create_by!(*args) - safe_find_or_create_by(*args).tap do |record| + def self.safe_find_or_create_by!(*args, &block) + safe_find_or_create_by(*args, &block).tap do |record| record.validate! unless record.persisted? end end - def self.safe_find_or_create_by(*args) + def self.safe_find_or_create_by(*args, &block) safe_ensure_unique(retries: 1) do - find_or_create_by(*args) + find_or_create_by(*args, &block) end end diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb index 25b81aef45f..661b10019ad 100644 --- a/app/models/application_setting.rb +++ b/app/models/application_setting.rb @@ -5,11 +5,14 @@ class ApplicationSetting < ApplicationRecord include CacheMarkdownField include TokenAuthenticatable include ChronicDurationAttribute + include IgnorableColumns + + ignore_column :namespace_storage_size_limit, remove_with: '13.5', remove_after: '2020-09-22' GRAFANA_URL_ERROR_MESSAGE = 'Please check your Grafana URL setting in ' \ 'Admin Area > Settings > Metrics and profiling > Metrics - Grafana' - add_authentication_token_field :runners_registration_token, encrypted: -> { Feature.enabled?(:application_settings_tokens_optional_encryption, default_enabled: true) ? :optional : :required } + add_authentication_token_field :runners_registration_token, encrypted: -> { Feature.enabled?(:application_settings_tokens_optional_encryption) ? :optional : :required } add_authentication_token_field :health_check_access_token add_authentication_token_field :static_objects_external_storage_auth_token @@ -272,6 +275,7 @@ class ApplicationSetting < ApplicationRecord numericality: { greater_than_or_equal_to: 0 } validates :snippet_size_limit, numericality: { only_integer: true, greater_than: 0 } + validates :wiki_page_max_content_bytes, numericality: { only_integer: true, greater_than_or_equal_to: 1.kilobytes } validates :email_restrictions, untrusted_regexp: true @@ -362,10 +366,6 @@ class ApplicationSetting < ApplicationRecord length: { maximum: 255 }, allow_blank: true - validates :namespace_storage_size_limit, - presence: true, - numericality: { only_integer: true, greater_than_or_equal_to: 0 } - validates :issues_create_limit, numericality: { only_integer: true, greater_than_or_equal_to: 0 } diff --git a/app/models/application_setting_implementation.rb b/app/models/application_setting_implementation.rb index 73554ee8457..8bdb80a65b1 100644 --- a/app/models/application_setting_implementation.rb +++ b/app/models/application_setting_implementation.rb @@ -37,8 +37,8 @@ module ApplicationSettingImplementation { after_sign_up_text: nil, akismet_enabled: false, - allow_local_requests_from_web_hooks_and_services: false, allow_local_requests_from_system_hooks: true, + allow_local_requests_from_web_hooks_and_services: false, asset_proxy_enabled: false, authorized_keys_enabled: true, # TODO default to false if the instance is configured to use AuthorizedKeysCommand commit_email_hostname: default_commit_email_hostname, @@ -47,10 +47,11 @@ module ApplicationSettingImplementation container_registry_token_expire_delay: 5, container_registry_vendor: '', container_registry_version: '', + custom_http_clone_url_root: nil, default_artifacts_expire_in: '30 days', + default_branch_name: nil, default_branch_protection: Settings.gitlab['default_branch_protection'], default_ci_config_path: nil, - default_branch_name: nil, default_group_visibility: Settings.gitlab.default_projects_features['visibility_level'], default_project_creation: Settings.gitlab['default_project_creation'], default_project_visibility: Settings.gitlab.default_projects_features['visibility_level'], @@ -63,9 +64,9 @@ module ApplicationSettingImplementation dsa_key_restriction: 0, ecdsa_key_restriction: 0, ed25519_key_restriction: 0, - eks_integration_enabled: false, - eks_account_id: nil, eks_access_key_id: nil, + eks_account_id: nil, + eks_integration_enabled: false, eks_secret_access_key: nil, email_restrictions_enabled: false, email_restrictions: nil, @@ -74,6 +75,9 @@ module ApplicationSettingImplementation gitaly_timeout_fast: 10, gitaly_timeout_medium: 30, gravatar_enabled: Settings.gravatar['enabled'], + group_download_export_limit: 1, + group_export_limit: 6, + group_import_limit: 6, help_page_hide_commercial_content: false, help_page_text: nil, hide_third_party_offers: false, @@ -83,46 +87,57 @@ module ApplicationSettingImplementation housekeeping_gc_period: 200, housekeeping_incremental_repack_period: 10, import_sources: Settings.gitlab['import_sources'], + instance_statistics_visibility_private: false, issues_create_limit: 300, local_markdown_version: 0, + login_recaptcha_protection_enabled: false, max_artifacts_size: Settings.artifacts['max_size'], max_attachment_size: Settings.gitlab['max_attachment_size'], max_import_size: 50, + minimum_password_length: DEFAULT_MINIMUM_PASSWORD_LENGTH, mirror_available: true, notify_on_unknown_sign_in: true, outbound_local_requests_whitelist: [], password_authentication_enabled_for_git: true, password_authentication_enabled_for_web: Settings.gitlab['signin_enabled'], performance_bar_allowed_group_id: nil, - rsa_key_restriction: 0, plantuml_enabled: false, plantuml_url: nil, polling_interval_multiplier: 1, + productivity_analytics_start_date: Time.current, + project_download_export_limit: 1, project_export_enabled: true, + project_export_limit: 6, + project_import_limit: 6, protected_ci_variables: true, - push_event_hooks_limit: 3, + protected_paths: DEFAULT_PROTECTED_PATHS, push_event_activities_limit: 3, + push_event_hooks_limit: 3, raw_blob_request_limit: 300, recaptcha_enabled: false, - login_recaptcha_protection_enabled: false, repository_checks_enabled: true, - repository_storages: ['default'], repository_storages_weighted: { default: 100 }, + repository_storages: ['default'], require_two_factor_authentication: false, restricted_visibility_levels: Settings.gitlab['restricted_visibility_levels'], - session_expire_delay: Settings.gitlab['session_expire_delay'], + rsa_key_restriction: 0, send_user_confirmation_email: false, + session_expire_delay: Settings.gitlab['session_expire_delay'], shared_runners_enabled: Settings.gitlab_ci['shared_runners_enabled'], shared_runners_text: nil, sign_in_text: nil, signup_enabled: Settings.gitlab['signup_enabled'], + snippet_size_limit: 50.megabytes, + snowplow_app_id: nil, + snowplow_collector_hostname: nil, + snowplow_cookie_domain: nil, + snowplow_enabled: false, + snowplow_iglu_registry_url: nil, sourcegraph_enabled: false, - sourcegraph_url: nil, sourcegraph_public_only: true, + sourcegraph_url: nil, spam_check_endpoint_enabled: false, spam_check_endpoint_url: nil, - minimum_password_length: DEFAULT_MINIMUM_PASSWORD_LENGTH, - namespace_storage_size_limit: 0, terminal_max_session_time: 0, throttle_authenticated_api_enabled: false, throttle_authenticated_api_period_in_seconds: 3600, @@ -130,41 +145,26 @@ module ApplicationSettingImplementation throttle_authenticated_web_enabled: false, throttle_authenticated_web_period_in_seconds: 3600, throttle_authenticated_web_requests_per_period: 7200, - throttle_unauthenticated_enabled: false, - throttle_unauthenticated_period_in_seconds: 3600, - throttle_unauthenticated_requests_per_period: 3600, + throttle_incident_management_notification_enabled: false, + throttle_incident_management_notification_per_period: 3600, + throttle_incident_management_notification_period_in_seconds: 3600, throttle_protected_paths_enabled: false, throttle_protected_paths_in_seconds: 10, throttle_protected_paths_per_period: 60, - protected_paths: DEFAULT_PROTECTED_PATHS, - throttle_incident_management_notification_enabled: false, - throttle_incident_management_notification_period_in_seconds: 3600, - throttle_incident_management_notification_per_period: 3600, + throttle_unauthenticated_enabled: false, + throttle_unauthenticated_period_in_seconds: 3600, + throttle_unauthenticated_requests_per_period: 3600, time_tracking_limit_to_hours: false, two_factor_grace_period: 48, unique_ips_limit_enabled: false, unique_ips_limit_per_user: 10, unique_ips_limit_time_window: 3600, usage_ping_enabled: Settings.gitlab['usage_ping_enabled'], - instance_statistics_visibility_private: false, + usage_stats_set_by_user_id: nil, user_default_external: false, user_default_internal_regex: nil, user_show_add_ssh_key_message: true, - usage_stats_set_by_user_id: nil, - snowplow_collector_hostname: nil, - snowplow_cookie_domain: nil, - snowplow_enabled: false, - snowplow_app_id: nil, - snowplow_iglu_registry_url: nil, - custom_http_clone_url_root: nil, - productivity_analytics_start_date: Time.current, - snippet_size_limit: 50.megabytes, - project_import_limit: 6, - project_export_limit: 6, - project_download_export_limit: 1, - group_import_limit: 6, - group_export_limit: 6, - group_download_export_limit: 1 + wiki_page_max_content_bytes: 50.megabytes } end diff --git a/app/models/audit_event.rb b/app/models/audit_event.rb index 13fc2514f0c..e7cfa30a892 100644 --- a/app/models/audit_event.rb +++ b/app/models/audit_event.rb @@ -5,7 +5,7 @@ class AuditEvent < ApplicationRecord include IgnorableColumns include BulkInsertSafe - PARALLEL_PERSISTENCE_COLUMNS = [:author_name, :entity_path].freeze + PARALLEL_PERSISTENCE_COLUMNS = [:author_name, :entity_path, :target_details].freeze ignore_column :updated_at, remove_with: '13.4', remove_after: '2020-09-22' @@ -58,6 +58,12 @@ class AuditEvent < ApplicationRecord end end + def as_json(options = {}) + super(options).tap do |json| + json['ip_address'] = self.ip_address.to_s + end + end + private def default_author_value diff --git a/app/models/audit_event_partitioned.rb b/app/models/audit_event_partitioned.rb new file mode 100644 index 00000000000..672daebd14a --- /dev/null +++ b/app/models/audit_event_partitioned.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +# This model is not yet intended to be used. +# It is in a transitioning phase while we are partitioning +# the table on the database-side. +# Please refer to https://gitlab.com/groups/gitlab-org/-/epics/3206 +# for details. +class AuditEventPartitioned < ApplicationRecord + include PartitionedTable + + self.table_name = 'audit_events_part_5fc467ac26' + + partitioned_by :created_at, strategy: :monthly +end diff --git a/app/models/blob.rb b/app/models/blob.rb index 874bf58530e..8a9db8b45ea 100644 --- a/app/models/blob.rb +++ b/app/models/blob.rb @@ -6,6 +6,8 @@ class Blob < SimpleDelegator include BlobLanguageFromGitAttributes include BlobActiveModel + MODE_SYMLINK = '120000' # The STRING 120000 is the git-reported octal filemode for a symlink + CACHE_TIME = 60 # Cache raw blobs referred to by a (mutable) ref for 1 minute CACHE_TIME_IMMUTABLE = 3600 # Cache blobs referred to by an immutable reference for 1 hour diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb index 6c90645e997..af4e6bb0494 100644 --- a/app/models/ci/build.rb +++ b/app/models/ci/build.rb @@ -73,8 +73,7 @@ module Ci return unless has_environment? strong_memoize(:persisted_environment) do - deployment&.environment || - Environment.find_by(name: expanded_environment_name, project: project) + Environment.find_by(name: expanded_environment_name, project: project) end end @@ -351,7 +350,7 @@ module Ci after_transition any => [:failed] do |build| next unless build.project - if build.retry_failure? + if build.auto_retry_allowed? begin Ci::Build.retry(build, build.user) rescue Gitlab::Access::AccessDeniedError => ex @@ -373,6 +372,10 @@ module Ci end end + def auto_retry_allowed? + auto_retry.allowed? + end + def detailed_status(current_user) Gitlab::Ci::Status::Build::Factory .new(self, current_user) @@ -439,27 +442,6 @@ module Ci pipeline.builds.retried.where(name: self.name).count end - def retry_failure? - max_allowed_retries = nil - max_allowed_retries ||= options_retry_max if retry_on_reason_or_always? - max_allowed_retries ||= DEFAULT_RETRIES.fetch(failure_reason.to_sym, 0) - - max_allowed_retries > 0 && retries_count < max_allowed_retries - end - - def options_retry_max - options_retry[:max] - end - - def options_retry_when - options_retry.fetch(:when, ['always']) - end - - def retry_on_reason_or_always? - options_retry_when.include?(failure_reason.to_s) || - options_retry_when.include?('always') - end - def any_unmet_prerequisites? prerequisites.present? end @@ -474,8 +456,7 @@ module Ci strong_memoize(:expanded_environment_name) do # We're using a persisted expanded environment name in order to avoid # variable expansion per request. - if Feature.enabled?(:ci_persisted_expanded_environment_name, project, default_enabled: true) && - metadata&.expanded_environment_name.present? + if metadata&.expanded_environment_name.present? metadata.expanded_environment_name else ExpandVariables.expand(environment, -> { simple_variables }) @@ -543,8 +524,6 @@ module Ci end end - CI_REGISTRY_USER = 'gitlab-ci-token' - def persisted_variables Gitlab::Ci::Variables::Collection.new.tap do |variables| break variables unless persisted? @@ -556,7 +535,7 @@ module Ci .append(key: 'CI_JOB_TOKEN', value: token.to_s, public: false, masked: true) .append(key: 'CI_BUILD_ID', value: id.to_s) .append(key: 'CI_BUILD_TOKEN', value: token.to_s, public: false, masked: true) - .append(key: 'CI_REGISTRY_USER', value: CI_REGISTRY_USER) + .append(key: 'CI_REGISTRY_USER', value: ::Gitlab::Auth::CI_JOB_USER) .append(key: 'CI_REGISTRY_PASSWORD', value: token.to_s, public: false, masked: true) .append(key: 'CI_REPOSITORY_URL', value: repo_url.to_s, public: false) .concat(deploy_token_variables) @@ -615,7 +594,7 @@ module Ci def repo_url return unless token - auth = "gitlab-ci-token:#{token}@" + auth = "#{::Gitlab::Auth::CI_JOB_USER}:#{token}@" project.http_url_to_repo.sub(%r{^https?://}) do |prefix| prefix + auth end @@ -668,6 +647,13 @@ module Ci !artifacts_expired? && artifacts_file&.exists? end + # This method is similar to #artifacts? but it includes the artifacts + # locking mechanics. A new method was created to prevent breaking existing + # behavior and avoid introducing N+1s. + def available_artifacts? + (!artifacts_expired? || pipeline.artifacts_locked?) && job_artifacts_archive&.exists? + end + def artifacts_metadata? artifacts? && artifacts_metadata&.exists? end @@ -878,8 +864,7 @@ module Ci end def multi_build_steps? - options.dig(:release)&.any? && - Gitlab::Ci::Features.release_generation_enabled? + options.dig(:release)&.any? end def hide_secrets(trace) @@ -962,6 +947,12 @@ module Ci private + def auto_retry + strong_memoize(:auto_retry) do + Gitlab::Ci::Build::AutoRetry.new(self) + end + end + def dependencies strong_memoize(:dependencies) do Ci::BuildDependencies.new(self) @@ -1017,19 +1008,6 @@ module Ci end end - # The format of the retry option changed in GitLab 11.5: Before it was - # integer only, after it is a hash. New builds are created with the new - # format, but builds created before GitLab 11.5 and saved in database still - # have the old integer only format. This method returns the retry option - # normalized as a hash in 11.5+ format. - def options_retry - strong_memoize(:options_retry) do - value = options&.dig(:retry) - value = value.is_a?(Integer) ? { max: value } : value.to_h - value.with_indifferent_access - end - end - def has_expiring_artifacts? artifacts_expire_at.present? && artifacts_expire_at > Time.current end diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 0a7a0e0772b..407802baf09 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -75,18 +75,16 @@ module Ci def append(new_data, offset) raise ArgumentError, 'New data is missing' unless new_data - raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 + raise ArgumentError, 'Offset is out of range' if offset < 0 || offset > size raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) - in_lock(*lock_params) do # Write operation is atomic - unsafe_set_data!(data.byteslice(0, offset) + new_data) - end + in_lock(*lock_params) { unsafe_append_data!(new_data, offset) } schedule_to_persist if full? end def size - data&.bytesize.to_i + @size ||= current_store.size(self) || data&.bytesize end def start_offset @@ -118,7 +116,7 @@ module Ci raise FailedToPersistDataError, 'Data is not fulfilled in a bucket' end - old_store_class = self.class.get_store_class(data_store) + old_store_class = current_store self.raw_data = nil self.data_store = new_store @@ -128,16 +126,33 @@ module Ci end def get_data - self.class.get_store_class(data_store).data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default - rescue Excon::Error::NotFound - # If the data store is :fog and the file does not exist in the object storage, this method returns nil. + current_store.data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default end def unsafe_set_data!(value) raise ArgumentError, 'New data size exceeds chunk size' if value.bytesize > CHUNK_SIZE - self.class.get_store_class(data_store).set_data(self, value) + current_store.set_data(self, value) + @data = value + @size = value.bytesize + + save! if changed? + end + + def unsafe_append_data!(value, offset) + new_size = value.bytesize + offset + + if new_size > CHUNK_SIZE + raise ArgumentError, 'New data size exceeds chunk size' + end + + current_store.append_data(self, value, offset).then do |stored| + raise ArgumentError, 'Trace appended incorrectly' if stored != new_size + end + + @data = nil + @size = new_size save! if changed? end @@ -156,6 +171,10 @@ module Ci size == CHUNK_SIZE end + def current_store + self.class.get_store_class(data_store) + end + def lock_params ["trace_write:#{build_id}:chunks:#{chunk_index}", { ttl: WRITE_LOCK_TTL, diff --git a/app/models/ci/build_trace_chunks/database.rb b/app/models/ci/build_trace_chunks/database.rb index 73cb8abf381..3b8e23510d9 100644 --- a/app/models/ci/build_trace_chunks/database.rb +++ b/app/models/ci/build_trace_chunks/database.rb @@ -19,8 +19,22 @@ module Ci model.raw_data end - def set_data(model, data) - model.raw_data = data + def set_data(model, new_data) + model.raw_data = new_data + end + + def append_data(model, new_data, offset) + if offset > 0 + truncated_data = data(model).to_s.byteslice(0, offset) + new_data = truncated_data + new_data + end + + model.raw_data = new_data + model.raw_data.to_s.bytesize + end + + def size(model) + data(model).to_s.bytesize end def delete_data(model) diff --git a/app/models/ci/build_trace_chunks/fog.rb b/app/models/ci/build_trace_chunks/fog.rb index a849bd08427..b1e9fd1faeb 100644 --- a/app/models/ci/build_trace_chunks/fog.rb +++ b/app/models/ci/build_trace_chunks/fog.rb @@ -9,10 +9,26 @@ module Ci def data(model) connection.get_object(bucket_name, key(model))[:body] + rescue Excon::Error::NotFound + # If the object does not exist in the object storage, this method returns nil. end - def set_data(model, data) - connection.put_object(bucket_name, key(model), data) + def set_data(model, new_data) + connection.put_object(bucket_name, key(model), new_data) + end + + def append_data(model, new_data, offset) + if offset > 0 + truncated_data = data(model).to_s.byteslice(0, offset) + new_data = truncated_data + new_data + end + + set_data(model, new_data) + new_data.bytesize + end + + def size(model) + data(model).to_s.bytesize end def delete_data(model) diff --git a/app/models/ci/build_trace_chunks/redis.rb b/app/models/ci/build_trace_chunks/redis.rb index c3864f78b01..0ae563f6ce8 100644 --- a/app/models/ci/build_trace_chunks/redis.rb +++ b/app/models/ci/build_trace_chunks/redis.rb @@ -4,6 +4,32 @@ module Ci module BuildTraceChunks class Redis CHUNK_REDIS_TTL = 1.week + LUA_APPEND_CHUNK = <<~EOS.freeze + local key, new_data, offset = KEYS[1], ARGV[1], ARGV[2] + local length = new_data:len() + local expire = #{CHUNK_REDIS_TTL.seconds} + local current_size = redis.call("strlen", key) + offset = tonumber(offset) + + if offset == 0 then + -- overwrite everything + redis.call("set", key, new_data, "ex", expire) + return redis.call("strlen", key) + elseif offset > current_size then + -- offset range violation + return -1 + elseif offset + length >= current_size then + -- efficiently append or overwrite and append + redis.call("expire", key, expire) + return redis.call("setrange", key, offset, new_data) + else + -- append and truncate + local current_data = redis.call("get", key) + new_data = current_data:sub(1, offset) .. new_data + redis.call("set", key, new_data, "ex", expire) + return redis.call("strlen", key) + end + EOS def available? true @@ -21,6 +47,18 @@ module Ci end end + def append_data(model, new_data, offset) + Gitlab::Redis::SharedState.with do |redis| + redis.eval(LUA_APPEND_CHUNK, keys: [key(model)], argv: [new_data, offset]) + end + end + + def size(model) + Gitlab::Redis::SharedState.with do |redis| + redis.strlen(key(model)) + end + end + def delete_data(model) delete_keys([[model.build_id, model.chunk_index]]) end diff --git a/app/models/ci/group.rb b/app/models/ci/group.rb index 779c6c0396f..f0c035635b9 100644 --- a/app/models/ci/group.rb +++ b/app/models/ci/group.rb @@ -24,15 +24,9 @@ module Ci def status strong_memoize(:status) do - if ::Gitlab::Ci::Features.composite_status?(project) - Gitlab::Ci::Status::Composite - .new(@jobs) - .status - else - CommitStatus - .where(id: @jobs) - .legacy_status - end + Gitlab::Ci::Status::Composite + .new(@jobs) + .status end end diff --git a/app/models/ci/instance_variable.rb b/app/models/ci/instance_variable.rb index 628749b32cb..e083caa8751 100644 --- a/app/models/ci/instance_variable.rb +++ b/app/models/ci/instance_variable.rb @@ -14,12 +14,14 @@ module Ci alias_attribute :secret_value, :value validates :key, uniqueness: { - message: "(%{value}) has already been taken" + message: -> (object, data) { _("(%{value}) has already been taken") } } - validates :encrypted_value, length: { - maximum: 1024, - too_long: 'The encrypted value of the provided variable exceeds %{count} bytes. Variables over 700 characters risk exceeding the limit.' + validates :value, length: { + maximum: 10_000, + too_long: -> (object, data) do + _('The value of the provided variable exceeds the %{count} character limit') + end } scope :unprotected, -> { where(protected: false) } diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb index dbeba1ece31..75c3ce98c95 100644 --- a/app/models/ci/job_artifact.rb +++ b/app/models/ci/job_artifact.rb @@ -8,6 +8,8 @@ module Ci include UsageStatistics include Sortable include IgnorableColumns + include Artifactable + include FileStoreMounter extend Gitlab::Ci::Model NotSupportedAdapterError = Class.new(StandardError) @@ -114,7 +116,7 @@ module Ci belongs_to :project belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id - mount_uploader :file, JobArtifactUploader + mount_file_store_uploader JobArtifactUploader validates :file_format, presence: true, unless: :trace?, on: :create validate :validate_supported_file_format!, on: :create @@ -123,8 +125,6 @@ module Ci update_project_statistics project_statistics_name: :build_artifacts_size - after_save :update_file_store, if: :saved_change_to_file? - scope :not_expired, -> { where('expire_at IS NULL OR expire_at > ?', Time.current) } scope :with_files_stored_locally, -> { where(file_store: ::JobArtifactUploader::Store::LOCAL) } scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) } @@ -200,12 +200,6 @@ module Ci load_performance: 25 ## EE-specific } - enum file_format: { - raw: 1, - zip: 2, - gzip: 3 - }, _suffix: true - # `file_location` indicates where actual files are stored. # Ideally, actual files should be stored in the same directory, and use the same # convention to generate its path. However, sometimes we can't do so due to backward-compatibility. @@ -220,11 +214,6 @@ module Ci hashed_path: 2 } - FILE_FORMAT_ADAPTERS = { - gzip: Gitlab::Ci::Build::Artifacts::Adapters::GzipStream, - raw: Gitlab::Ci::Build::Artifacts::Adapters::RawStream - }.freeze - def validate_supported_file_format! return if Feature.disabled?(:drop_license_management_artifact, project, default_enabled: true) @@ -239,12 +228,6 @@ module Ci end end - def update_file_store - # The file.object_store is set during `uploader.store!` - # which happens after object is inserted/updated - self.update_column(:file_store, file.object_store) - end - def self.associated_file_types_for(file_type) return unless file_types.include?(file_type) @@ -284,7 +267,7 @@ module Ci def expire_in=(value) self.expire_at = if value - ChronicDuration.parse(value)&.seconds&.from_now + ::Gitlab::Ci::Build::Artifacts::ExpireInParser.new(value).seconds_from_now end end @@ -303,16 +286,12 @@ module Ci end def self.max_artifact_size(type:, project:) - max_size = if Feature.enabled?(:ci_max_artifact_size_per_type, project, default_enabled: false) - limit_name = "#{PLAN_LIMIT_PREFIX}#{type}" - - project.actual_limits.limit_for( - limit_name, - alternate_limit: -> { project.closest_setting(:max_artifacts_size) } - ) - else - project.closest_setting(:max_artifacts_size) - end + limit_name = "#{PLAN_LIMIT_PREFIX}#{type}" + + max_size = project.actual_limits.limit_for( + limit_name, + alternate_limit: -> { project.closest_setting(:max_artifacts_size) } + ) max_size&.megabytes.to_i end diff --git a/app/models/ci/legacy_stage.rb b/app/models/ci/legacy_stage.rb index 250306e2be4..df4368eccd5 100644 --- a/app/models/ci/legacy_stage.rb +++ b/app/models/ci/legacy_stage.rb @@ -32,7 +32,7 @@ module Ci end def status - @status ||= statuses.latest.slow_composite_status(project: project) + @status ||= statuses.latest.composite_status end def detailed_status(current_user) diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb index d4b439d648f..7762328d274 100644 --- a/app/models/ci/pipeline.rb +++ b/app/models/ci/pipeline.rb @@ -57,12 +57,12 @@ module Ci # the merge request's latest commit. has_many :merge_requests_as_head_pipeline, foreign_key: "head_pipeline_id", class_name: 'MergeRequest' - has_many :pending_builds, -> { pending }, foreign_key: :commit_id, class_name: 'Ci::Build' + has_many :pending_builds, -> { pending }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline has_many :failed_builds, -> { latest.failed }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline - has_many :retryable_builds, -> { latest.failed_or_canceled.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build' + has_many :retryable_builds, -> { latest.failed_or_canceled.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline has_many :cancelable_statuses, -> { cancelable }, foreign_key: :commit_id, class_name: 'CommitStatus' - has_many :manual_actions, -> { latest.manual_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build' - has_many :scheduled_actions, -> { latest.scheduled_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build' + has_many :manual_actions, -> { latest.manual_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline + has_many :scheduled_actions, -> { latest.scheduled_actions.includes(:project) }, foreign_key: :commit_id, class_name: 'Ci::Build', inverse_of: :pipeline has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id' has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id' @@ -83,6 +83,7 @@ module Ci has_many :daily_build_group_report_results, class_name: 'Ci::DailyBuildGroupReportResult', foreign_key: :last_pipeline_id has_many :latest_builds_report_results, through: :latest_builds, source: :report_results + has_many :pipeline_artifacts, class_name: 'Ci::PipelineArtifact', inverse_of: :pipeline, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent accepts_nested_attributes_for :variables, reject_if: :persisted? @@ -249,14 +250,6 @@ module Ci pipeline.run_after_commit { AutoDevops::DisableWorker.perform_async(pipeline.id) } end - - after_transition any => [:success] do |pipeline| - next unless Gitlab::Ci::Features.keep_latest_artifacts_for_ref_enabled?(pipeline.project) - - pipeline.run_after_commit do - Ci::PipelineSuccessUnlockArtifactsWorker.perform_async(pipeline.id) - end - end end scope :internal, -> { where(source: internal_sources) } @@ -416,7 +409,7 @@ module Ci def legacy_stage(name) stage = Ci::LegacyStage.new(self, name: name) - stage unless stage.statuses_count.zero? + stage unless stage.statuses_count == 0 end def ref_exists? @@ -425,40 +418,6 @@ module Ci false end - def ordered_stages - if ::Gitlab::Ci::Features.atomic_processing?(project) - # The `Ci::Stage` contains all up-to date data - # as atomic processing updates all data in-bulk - stages - elsif complete? - # The `Ci::Stage` contains up-to date data only for `completed` pipelines - # this is due to asynchronous processing of pipeline, and stages possibly - # not updated inline with processing of pipeline - stages - else - # In other cases, we need to calculate stages dynamically - legacy_stages - end - end - - def legacy_stages_using_sql - # TODO, this needs refactoring, see gitlab-foss#26481. - stages_query = statuses - .group('stage').select(:stage).order('max(stage_idx)') - - status_sql = statuses.latest.where('stage=sg.stage').legacy_status_sql - - warnings_sql = statuses.latest.select('COUNT(*)') - .where('stage=sg.stage').failed_but_allowed.to_sql - - stages_with_statuses = CommitStatus.from(stages_query, :sg) - .pluck('sg.stage', Arel.sql(status_sql), Arel.sql("(#{warnings_sql})")) - - stages_with_statuses.map do |stage| - Ci::LegacyStage.new(self, Hash[%i[name status warnings].zip(stage)]) - end - end - def legacy_stages_using_composite_status stages = latest_statuses_ordered_by_stage.group_by(&:stage) @@ -477,12 +436,9 @@ module Ci triggered_pipelines.preload(:source_job) end + # TODO: Remove usage of this method in templates def legacy_stages - if ::Gitlab::Ci::Features.composite_status?(project) - legacy_stages_using_composite_status - else - legacy_stages_using_sql - end + legacy_stages_using_composite_status end def valid_commit_sha @@ -665,7 +621,7 @@ module Ci end def has_warnings? - number_of_warnings.positive? + number_of_warnings > 0 end def number_of_warnings @@ -755,10 +711,6 @@ module Ci end end - def update_legacy_status - set_status(latest_builds_status.to_s) - end - def protected_ref? strong_memoize(:protected_ref) { project.protected_for?(git_ref) } end @@ -828,7 +780,7 @@ module Ci return unless started_at seconds = (started_at - created_at).to_i - seconds unless seconds.zero? + seconds unless seconds == 0 end def update_duration @@ -922,12 +874,6 @@ module Ci end end - def test_reports_count - Rails.cache.fetch(['project', project.id, 'pipeline', id, 'test_reports_count'], force: false) do - test_reports.total_count - end - end - def accessibility_reports Gitlab::Ci::Reports::AccessibilityReports.new.tap do |accessibility_reports| builds.latest.with_reports(Ci::JobArtifact.accessibility_reports).each do |build| @@ -1061,10 +1007,6 @@ module Ci @persistent_ref ||= PersistentRef.new(pipeline: self) end - def find_successful_build_ids_by_names(names) - statuses.latest.success.where(name: names).pluck(:id) - end - def cacheable? Ci::PipelineEnums.ci_config_sources.key?(config_source.to_sym) end @@ -1084,8 +1026,6 @@ module Ci end def ensure_ci_ref! - return unless Gitlab::Ci::Features.pipeline_fixed_notifications? - self.ci_ref = Ci::Ref.ensure_for(self) end @@ -1123,12 +1063,6 @@ module Ci end end - def latest_builds_status - return 'failed' unless yaml_errors.blank? - - statuses.latest.slow_composite_status(project: project) || 'skipped' - end - def keep_around_commits return unless project diff --git a/app/models/ci/pipeline_artifact.rb b/app/models/ci/pipeline_artifact.rb new file mode 100644 index 00000000000..e7f51977ccd --- /dev/null +++ b/app/models/ci/pipeline_artifact.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +# This class is being used to persist additional artifacts after a pipeline completes, which is a great place to cache a computed result in object storage + +module Ci + class PipelineArtifact < ApplicationRecord + extend Gitlab::Ci::Model + include Artifactable + include FileStoreMounter + + FILE_STORE_SUPPORTED = [ + ObjectStorage::Store::LOCAL, + ObjectStorage::Store::REMOTE + ].freeze + + FILE_SIZE_LIMIT = 10.megabytes.freeze + + belongs_to :project, class_name: "Project", inverse_of: :pipeline_artifacts + belongs_to :pipeline, class_name: "Ci::Pipeline", inverse_of: :pipeline_artifacts + + validates :pipeline, :project, :file_format, :file, presence: true + validates :file_store, presence: true, inclusion: { in: FILE_STORE_SUPPORTED } + validates :size, presence: true, numericality: { less_than_or_equal_to: FILE_SIZE_LIMIT } + validates :file_type, presence: true + + mount_file_store_uploader Ci::PipelineArtifactUploader + before_save :set_size, if: :file_changed? + + enum file_type: { + code_coverage: 1 + } + + def set_size + self.size = file.size + end + end +end diff --git a/app/models/ci/pipeline_enums.rb b/app/models/ci/pipeline_enums.rb index 352dc56aac7..9d108ff0fa4 100644 --- a/app/models/ci/pipeline_enums.rb +++ b/app/models/ci/pipeline_enums.rb @@ -63,6 +63,10 @@ module Ci def self.ci_config_sources_values ci_config_sources.values end + + def self.non_ci_config_source_values + config_sources.values - ci_config_sources.values + end end end diff --git a/app/models/ci/ref.rb b/app/models/ci/ref.rb index 29b44575d65..3d8823728e7 100644 --- a/app/models/ci/ref.rb +++ b/app/models/ci/ref.rb @@ -3,6 +3,7 @@ module Ci class Ref < ApplicationRecord extend Gitlab::Ci::Model + include AfterCommitQueue include Gitlab::OptimisticLocking FAILING_STATUSES = %w[failed broken still_failing].freeze @@ -15,6 +16,7 @@ module Ci transition unknown: :success transition fixed: :success transition %i[failed broken still_failing] => :fixed + transition success: same end event :do_fail do @@ -29,6 +31,14 @@ module Ci state :fixed, value: 3 state :broken, value: 4 state :still_failing, value: 5 + + after_transition any => [:fixed, :success] do |ci_ref| + next unless ::Gitlab::Ci::Features.keep_latest_artifacts_for_ref_enabled?(ci_ref.project) + + ci_ref.run_after_commit do + Ci::PipelineSuccessUnlockArtifactsWorker.perform_async(ci_ref.last_finished_pipeline_id) + end + end end class << self @@ -47,8 +57,6 @@ module Ci end def update_status_by!(pipeline) - return unless Gitlab::Ci::Features.pipeline_fixed_notifications? - retry_lock(self) do next unless last_finished_pipeline_id == pipeline.id diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb index 1cd6c64841b..00ee45740bd 100644 --- a/app/models/ci/runner.rb +++ b/app/models/ci/runner.rb @@ -10,7 +10,7 @@ module Ci include TokenAuthenticatable include IgnorableColumns - add_authentication_token_field :token, encrypted: -> { Feature.enabled?(:ci_runners_tokens_optional_encryption, default_enabled: true) ? :optional : :required } + add_authentication_token_field :token, encrypted: -> { Feature.enabled?(:ci_runners_tokens_optional_encryption) ? :optional : :required } enum access_level: { not_protected: 0, diff --git a/app/models/ci/stage.rb b/app/models/ci/stage.rb index 41215601704..cc6bd1870b9 100644 --- a/app/models/ci/stage.rb +++ b/app/models/ci/stage.rb @@ -113,7 +113,7 @@ module Ci end def has_warnings? - number_of_warnings.positive? + number_of_warnings > 0 end def number_of_warnings @@ -138,7 +138,7 @@ module Ci end def latest_stage_status - statuses.latest.slow_composite_status(project: project) || 'skipped' + statuses.latest.composite_status || 'skipped' end end end diff --git a/app/models/clusters/agent.rb b/app/models/clusters/agent.rb new file mode 100644 index 00000000000..c21759a3c3b --- /dev/null +++ b/app/models/clusters/agent.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module Clusters + class Agent < ApplicationRecord + self.table_name = 'cluster_agents' + + belongs_to :project, class_name: '::Project' # Otherwise, it will load ::Clusters::Project + + has_many :agent_tokens, class_name: 'Clusters::AgentToken' + + validates :name, + presence: true, + length: { maximum: 63 }, + uniqueness: { scope: :project_id }, + format: { + with: Gitlab::Regex.cluster_agent_name_regex, + message: Gitlab::Regex.cluster_agent_name_regex_message + } + end +end diff --git a/app/models/clusters/agent_token.rb b/app/models/clusters/agent_token.rb new file mode 100644 index 00000000000..e9f1ee4e033 --- /dev/null +++ b/app/models/clusters/agent_token.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +module Clusters + class AgentToken < ApplicationRecord + include TokenAuthenticatable + add_authentication_token_field :token, encrypted: :required + + self.table_name = 'cluster_agent_tokens' + + belongs_to :agent, class_name: 'Clusters::Agent' + + before_save :ensure_token + end +end diff --git a/app/models/clusters/applications/cert_manager.rb b/app/models/clusters/applications/cert_manager.rb index 53c90fa56d5..1efa44c39c5 100644 --- a/app/models/clusters/applications/cert_manager.rb +++ b/app/models/clusters/applications/cert_manager.rb @@ -38,8 +38,7 @@ module Clusters chart: chart, files: files.merge(cluster_issuer_file), preinstall: pre_install_script, - postinstall: post_install_script, - local_tiller_enabled: cluster.local_tiller_enabled? + postinstall: post_install_script ) end @@ -48,8 +47,7 @@ module Clusters name: 'certmanager', rbac: cluster.platform_kubernetes_rbac?, files: files, - postdelete: post_delete_script, - local_tiller_enabled: cluster.local_tiller_enabled? + postdelete: post_delete_script ) end diff --git a/app/models/clusters/applications/crossplane.rb b/app/models/clusters/applications/crossplane.rb index 2e5a8210b3c..420e56c1742 100644 --- a/app/models/clusters/applications/crossplane.rb +++ b/app/models/clusters/applications/crossplane.rb @@ -35,8 +35,7 @@ module Clusters version: VERSION, rbac: cluster.platform_kubernetes_rbac?, chart: chart, - files: files, - local_tiller_enabled: cluster.local_tiller_enabled? + files: files ) end diff --git a/app/models/clusters/applications/elastic_stack.rb b/app/models/clusters/applications/elastic_stack.rb index 58ac0c1f188..77996748b81 100644 --- a/app/models/clusters/applications/elastic_stack.rb +++ b/app/models/clusters/applications/elastic_stack.rb @@ -34,8 +34,7 @@ module Clusters repository: repository, files: files, preinstall: migrate_to_3_script, - postinstall: post_install_script, - local_tiller_enabled: cluster.local_tiller_enabled? + postinstall: post_install_script ) end @@ -44,8 +43,7 @@ module Clusters name: 'elastic-stack', rbac: cluster.platform_kubernetes_rbac?, files: files, - postdelete: post_delete_script, - local_tiller_enabled: cluster.local_tiller_enabled? + postdelete: post_delete_script ) end @@ -121,8 +119,7 @@ module Clusters Gitlab::Kubernetes::Helm::DeleteCommand.new( name: 'elastic-stack', rbac: cluster.platform_kubernetes_rbac?, - files: files, - local_tiller_enabled: cluster.local_tiller_enabled? + files: files ).delete_command, Gitlab::Kubernetes::KubectlCmd.delete("pvc", "--selector", "release=elastic-stack", "--namespace", Gitlab::Kubernetes::Helm::NAMESPACE) ] diff --git a/app/models/clusters/applications/fluentd.rb b/app/models/clusters/applications/fluentd.rb index 1bcd39618f6..3fd6e870edc 100644 --- a/app/models/clusters/applications/fluentd.rb +++ b/app/models/clusters/applications/fluentd.rb @@ -32,8 +32,7 @@ module Clusters version: VERSION, rbac: cluster.platform_kubernetes_rbac?, chart: chart, - files: files, - local_tiller_enabled: cluster.local_tiller_enabled? + files: files ) end diff --git a/app/models/clusters/applications/helm.rb b/app/models/clusters/applications/helm.rb index 226a9c26db0..4a1bcac4bb7 100644 --- a/app/models/clusters/applications/helm.rb +++ b/app/models/clusters/applications/helm.rb @@ -52,8 +52,7 @@ module Clusters Gitlab::Kubernetes::Helm::InitCommand.new( name: name, files: files, - rbac: cluster.platform_kubernetes_rbac?, - local_tiller_enabled: cluster.local_tiller_enabled? + rbac: cluster.platform_kubernetes_rbac? ) end @@ -61,8 +60,7 @@ module Clusters Gitlab::Kubernetes::Helm::ResetCommand.new( name: name, files: files, - rbac: cluster.platform_kubernetes_rbac?, - local_tiller_enabled: cluster.local_tiller_enabled? + rbac: cluster.platform_kubernetes_rbac? ) end diff --git a/app/models/clusters/applications/ingress.rb b/app/models/clusters/applications/ingress.rb index a44450ec7a9..1d08f38a2f1 100644 --- a/app/models/clusters/applications/ingress.rb +++ b/app/models/clusters/applications/ingress.rb @@ -3,7 +3,7 @@ module Clusters module Applications class Ingress < ApplicationRecord - VERSION = '1.29.7' + VERSION = '1.40.2' INGRESS_CONTAINER_NAME = 'nginx-ingress-controller' MODSECURITY_LOG_CONTAINER_NAME = 'modsecurity-log' MODSECURITY_MODE_LOGGING = "DetectionOnly" @@ -63,8 +63,7 @@ module Clusters version: VERSION, rbac: cluster.platform_kubernetes_rbac?, chart: chart, - files: files, - local_tiller_enabled: cluster.local_tiller_enabled? + files: files ) end diff --git a/app/models/clusters/applications/jupyter.rb b/app/models/clusters/applications/jupyter.rb index b737f0f962f..056ea355de6 100644 --- a/app/models/clusters/applications/jupyter.rb +++ b/app/models/clusters/applications/jupyter.rb @@ -45,8 +45,7 @@ module Clusters rbac: cluster.platform_kubernetes_rbac?, chart: chart, files: files, - repository: repository, - local_tiller_enabled: cluster.local_tiller_enabled? + repository: repository ) end diff --git a/app/models/clusters/applications/knative.rb b/app/models/clusters/applications/knative.rb index b55fc3c45fc..3047da12dd9 100644 --- a/app/models/clusters/applications/knative.rb +++ b/app/models/clusters/applications/knative.rb @@ -77,8 +77,7 @@ module Clusters chart: chart, files: files, repository: REPOSITORY, - postinstall: install_knative_metrics, - local_tiller_enabled: cluster.local_tiller_enabled? + postinstall: install_knative_metrics ) end @@ -100,8 +99,7 @@ module Clusters rbac: cluster.platform_kubernetes_rbac?, files: files, predelete: delete_knative_services_and_metrics, - postdelete: delete_knative_istio_leftovers, - local_tiller_enabled: cluster.local_tiller_enabled? + postdelete: delete_knative_istio_leftovers ) end diff --git a/app/models/clusters/applications/prometheus.rb b/app/models/clusters/applications/prometheus.rb index 101d782db3a..216bbbc1c5a 100644 --- a/app/models/clusters/applications/prometheus.rb +++ b/app/models/clusters/applications/prometheus.rb @@ -69,8 +69,7 @@ module Clusters rbac: cluster.platform_kubernetes_rbac?, chart: chart, files: files, - postinstall: install_knative_metrics, - local_tiller_enabled: cluster.local_tiller_enabled? + postinstall: install_knative_metrics ) end @@ -80,8 +79,7 @@ module Clusters version: version, rbac: cluster.platform_kubernetes_rbac?, chart: chart, - files: files_with_replaced_values(values), - local_tiller_enabled: cluster.local_tiller_enabled? + files: files_with_replaced_values(values) ) end @@ -90,8 +88,7 @@ module Clusters name: name, rbac: cluster.platform_kubernetes_rbac?, files: files, - predelete: delete_knative_istio_metrics, - local_tiller_enabled: cluster.local_tiller_enabled? + predelete: delete_knative_istio_metrics ) end diff --git a/app/models/clusters/applications/runner.rb b/app/models/clusters/applications/runner.rb index 3f0b4edde35..c041f605e6c 100644 --- a/app/models/clusters/applications/runner.rb +++ b/app/models/clusters/applications/runner.rb @@ -3,7 +3,7 @@ module Clusters module Applications class Runner < ApplicationRecord - VERSION = '0.18.2' + VERSION = '0.19.2' self.table_name = 'clusters_applications_runners' @@ -36,8 +36,7 @@ module Clusters rbac: cluster.platform_kubernetes_rbac?, chart: chart, files: files, - repository: repository, - local_tiller_enabled: cluster.local_tiller_enabled? + repository: repository ) end diff --git a/app/models/clusters/cluster.rb b/app/models/clusters/cluster.rb index 7641b6d2a4b..63aebdf1bdb 100644 --- a/app/models/clusters/cluster.rb +++ b/app/models/clusters/cluster.rb @@ -218,6 +218,24 @@ module Clusters provider&.status_name || connection_status.presence || :created end + def connection_error + with_reactive_cache do |data| + data[:connection_error] + end + end + + def node_connection_error + with_reactive_cache do |data| + data[:node_connection_error] + end + end + + def metrics_connection_error + with_reactive_cache do |data| + data[:metrics_connection_error] + end + end + def connection_status with_reactive_cache do |data| data[:connection_status] @@ -233,9 +251,7 @@ module Clusters def calculate_reactive_cache return unless enabled? - gitlab_kubernetes_nodes = Gitlab::Kubernetes::Node.new(self) - - { connection_status: retrieve_connection_status, nodes: gitlab_kubernetes_nodes.all.presence } + connection_data.merge(Gitlab::Kubernetes::Node.new(self).all) end def persisted_applications @@ -341,10 +357,6 @@ module Clusters end end - def local_tiller_enabled? - Feature.enabled?(:managed_apps_local_tiller, clusterable, default_enabled: true) - end - def prometheus_adapter application_prometheus end @@ -395,9 +407,10 @@ module Clusters @instance_domain ||= Gitlab::CurrentSettings.auto_devops_domain end - def retrieve_connection_status + def connection_data result = ::Gitlab::Kubernetes::KubeClient.graceful_request(id) { kubeclient.core_client.discover } - result[:status] + + { connection_status: result[:status], connection_error: result[:connection_error] }.compact end # To keep backward compatibility with AUTO_DEVOPS_DOMAIN diff --git a/app/models/clusters/concerns/application_core.rb b/app/models/clusters/concerns/application_core.rb index c1f63758906..760576ea1eb 100644 --- a/app/models/clusters/concerns/application_core.rb +++ b/app/models/clusters/concerns/application_core.rb @@ -15,7 +15,7 @@ module Clusters def set_initial_status return unless not_installable? - self.status = status_states[:installable] if cluster&.application_helm_available? || cluster&.local_tiller_enabled? + self.status = status_states[:installable] end def can_uninstall? diff --git a/app/models/clusters/concerns/application_data.rb b/app/models/clusters/concerns/application_data.rb index ade27e69642..22e597e9747 100644 --- a/app/models/clusters/concerns/application_data.rb +++ b/app/models/clusters/concerns/application_data.rb @@ -7,8 +7,7 @@ module Clusters Gitlab::Kubernetes::Helm::DeleteCommand.new( name: name, rbac: cluster.platform_kubernetes_rbac?, - files: files, - local_tiller_enabled: cluster.local_tiller_enabled? + files: files ) end @@ -21,23 +20,11 @@ module Clusters end def files - @files ||= begin - files = { 'values.yaml': values } - - files.merge!(certificate_files) if use_tiller_ssl? - - files - end + @files ||= { 'values.yaml': values } end private - def use_tiller_ssl? - return false if cluster.local_tiller_enabled? - - cluster.application_helm.has_ssl? - end - def certificate_files { 'ca.pem': ca_cert, diff --git a/app/models/clusters/concerns/application_status.rb b/app/models/clusters/concerns/application_status.rb index 86d74ed7b1c..95ac95448dd 100644 --- a/app/models/clusters/concerns/application_status.rb +++ b/app/models/clusters/concerns/application_status.rb @@ -79,7 +79,7 @@ module Clusters transition [:scheduled] => :uninstalling end - before_transition any => [:scheduled] do |application, _| + before_transition any => [:scheduled, :installed, :uninstalled] do |application, _| application.status_reason = nil end @@ -97,24 +97,6 @@ module Clusters application.status_reason = status_reason if status_reason end - before_transition any => [:installed, :updated] do |application, transition| - unless application.cluster.local_tiller_enabled? || application.is_a?(Clusters::Applications::Helm) - if transition.event == :make_externally_installed - # If an application is externally installed - # We assume the helm application is externally installed too - helm = application.cluster.application_helm || application.cluster.build_application_helm - - helm.make_externally_installed! - else - # When installing any application we are also performing an update - # of tiller (see Gitlab::Kubernetes::Helm::ClientCommand) so - # therefore we need to reflect that in the database. - - application.cluster.application_helm.update!(version: Gitlab::Kubernetes::Helm::HELM_VERSION) - end - end - end - after_transition any => [:uninstalling], :use_transactions => false do |application, _| application.prepare_uninstall end diff --git a/app/models/clusters/providers/aws.rb b/app/models/clusters/providers/aws.rb index faf587fb83d..86869361ed8 100644 --- a/app/models/clusters/providers/aws.rb +++ b/app/models/clusters/providers/aws.rb @@ -5,6 +5,9 @@ module Clusters class Aws < ApplicationRecord include Gitlab::Utils::StrongMemoize include Clusters::Concerns::ProviderStatus + include IgnorableColumns + + ignore_column :created_by_user_id, remove_with: '13.4', remove_after: '2020-08-22' self.table_name = 'cluster_providers_aws' diff --git a/app/models/commit.rb b/app/models/commit.rb index 53bcdf8165f..4f18ece9e50 100644 --- a/app/models/commit.rb +++ b/app/models/commit.rb @@ -21,7 +21,6 @@ class Commit participant :committer participant :notes_with_associations - attr_accessor :author attr_accessor :redacted_description_html attr_accessor :redacted_title_html attr_accessor :redacted_full_title_html diff --git a/app/models/commit_collection.rb b/app/models/commit_collection.rb index b8653f47392..07c49ed48e6 100644 --- a/app/models/commit_collection.rb +++ b/app/models/commit_collection.rb @@ -47,7 +47,10 @@ class CommitCollection pipelines = project.ci_pipelines.latest_pipeline_per_commit(map(&:id), ref) each do |commit| - commit.set_latest_pipeline_for_ref(ref, pipelines[commit.id]) + pipeline = pipelines[commit.id] + pipeline&.number_of_warnings # preload number of warnings + + commit.set_latest_pipeline_for_ref(ref, pipeline) end self diff --git a/app/models/commit_status.rb b/app/models/commit_status.rb index c85292feb25..8aba74bedbc 100644 --- a/app/models/commit_status.rb +++ b/app/models/commit_status.rb @@ -100,9 +100,7 @@ class CommitStatus < ApplicationRecord # will not be refreshed to pick the change self.processed_will_change! - if !::Gitlab::Ci::Features.atomic_processing?(project) - self.processed = nil - elsif latest? + if latest? self.processed = false # force refresh of all dependent ones elsif retried? self.processed = true # retried are considered to be already processed @@ -164,8 +162,7 @@ class CommitStatus < ApplicationRecord next unless commit_status.project commit_status.run_after_commit do - schedule_stage_and_pipeline_update - + PipelineProcessWorker.perform_async(pipeline_id) ExpireJobCacheWorker.perform_async(id) end end @@ -186,14 +183,6 @@ class CommitStatus < ApplicationRecord select(:name) end - def self.status_for_prior_stages(index, project:) - before_stage(index).latest.slow_composite_status(project: project) || 'success' - end - - def self.status_for_names(names, project:) - where(name: names).latest.slow_composite_status(project: project) || 'success' - end - def self.update_as_processed! # Marks items as processed # we do not increase `lock_version`, as we are the one @@ -286,21 +275,6 @@ class CommitStatus < ApplicationRecord def unrecoverable_failure? script_failure? || missing_dependency_failure? || archived_failure? || scheduler_failure? || data_integrity_failure? end - - def schedule_stage_and_pipeline_update - if ::Gitlab::Ci::Features.atomic_processing?(project) - # Atomic Processing requires only single Worker - PipelineProcessWorker.perform_async(pipeline_id, [id]) - else - if complete? || manual? - PipelineProcessWorker.perform_async(pipeline_id, [id]) - else - PipelineUpdateWorker.perform_async(pipeline_id) - end - - StageUpdateWorker.perform_async(stage_id) - end - end end CommitStatus.prepend_if_ee('::EE::CommitStatus') diff --git a/app/models/commit_status_enums.rb b/app/models/commit_status_enums.rb index caebff91022..ad90929b8fa 100644 --- a/app/models/commit_status_enums.rb +++ b/app/models/commit_status_enums.rb @@ -23,7 +23,8 @@ module CommitStatusEnums downstream_bridge_project_not_found: 1_002, invalid_bridge_trigger: 1_003, bridge_pipeline_is_child_pipeline: 1_006, - downstream_pipeline_creation_failed: 1_007 + downstream_pipeline_creation_failed: 1_007, + secrets_provider_not_found: 1_008 } end end diff --git a/app/models/concerns/avatarable.rb b/app/models/concerns/avatarable.rb index 60de20c3b31..0dd55ab67b5 100644 --- a/app/models/concerns/avatarable.rb +++ b/app/models/concerns/avatarable.rb @@ -3,6 +3,17 @@ module Avatarable extend ActiveSupport::Concern + ALLOWED_IMAGE_SCALER_WIDTHS = [ + 400, + 200, + 64, + 48, + 40, + 26, + 20, + 16 + ].freeze + included do prepend ShadowMethods include ObjectStorage::BackgroundMove diff --git a/app/models/concerns/cache_markdown_field.rb b/app/models/concerns/cache_markdown_field.rb index 04eb4659469..49fc780f372 100644 --- a/app/models/concerns/cache_markdown_field.rb +++ b/app/models/concerns/cache_markdown_field.rb @@ -39,6 +39,10 @@ module CacheMarkdownField context[:markdown_engine] = :common_mark + if Feature.enabled?(:personal_snippet_reference_filters, context[:author]) + context[:user] = self.parent_user + end + context end @@ -132,6 +136,10 @@ module CacheMarkdownField end end + def parent_user + nil + end + included do cattr_reader :cached_markdown_fields do Gitlab::MarkdownCache::FieldData.new diff --git a/app/models/concerns/ci/artifactable.rb b/app/models/concerns/ci/artifactable.rb new file mode 100644 index 00000000000..54fb9021f2f --- /dev/null +++ b/app/models/concerns/ci/artifactable.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module Ci + module Artifactable + extend ActiveSupport::Concern + + FILE_FORMAT_ADAPTERS = { + gzip: Gitlab::Ci::Build::Artifacts::Adapters::GzipStream, + raw: Gitlab::Ci::Build::Artifacts::Adapters::RawStream + }.freeze + + included do + enum file_format: { + raw: 1, + zip: 2, + gzip: 3 + }, _suffix: true + end + end +end diff --git a/app/models/concerns/ci/contextable.rb b/app/models/concerns/ci/contextable.rb index 10df5e1a8dc..c8b55e7b39f 100644 --- a/app/models/concerns/ci/contextable.rb +++ b/app/models/concerns/ci/contextable.rb @@ -9,7 +9,7 @@ module Ci ## # Variables in the environment name scope. # - def scoped_variables(environment: expanded_environment_name) + def scoped_variables(environment: expanded_environment_name, dependencies: true) Gitlab::Ci::Variables::Collection.new.tap do |variables| variables.concat(predefined_variables) variables.concat(project.predefined_variables) @@ -18,7 +18,7 @@ module Ci variables.concat(deployment_variables(environment: environment)) variables.concat(yaml_variables) variables.concat(user_variables) - variables.concat(dependency_variables) + variables.concat(dependency_variables) if dependencies variables.concat(secret_instance_variables) variables.concat(secret_group_variables) variables.concat(secret_project_variables(environment: environment)) @@ -45,6 +45,12 @@ module Ci end end + def simple_variables_without_dependencies + strong_memoize(:variables_without_dependencies) do + scoped_variables(environment: nil, dependencies: false).to_runner_variables + end + end + def user_variables Gitlab::Ci::Variables::Collection.new.tap do |variables| break variables if user.blank? @@ -64,7 +70,7 @@ module Ci variables.append(key: 'CI_PIPELINE_TRIGGERED', value: 'true') if trigger_request variables.append(key: 'CI_NODE_INDEX', value: self.options[:instance].to_s) if self.options&.include?(:instance) - variables.append(key: 'CI_NODE_TOTAL', value: (self.options&.dig(:parallel) || 1).to_s) + variables.append(key: 'CI_NODE_TOTAL', value: ci_node_total_value.to_s) # legacy variables variables.append(key: 'CI_BUILD_NAME', value: name) @@ -96,5 +102,13 @@ module Ci def secret_project_variables(environment: persisted_environment) project.ci_variables_for(ref: git_ref, environment: environment) end + + private + + def ci_node_total_value + parallel = self.options&.dig(:parallel) + parallel = parallel.dig(:total) if parallel.is_a?(Hash) + parallel || 1 + end end end diff --git a/app/models/concerns/ci/has_status.rb b/app/models/concerns/ci/has_status.rb index c52807ec501..1cc2e8a51e3 100644 --- a/app/models/concerns/ci/has_status.rb +++ b/app/models/concerns/ci/has_status.rb @@ -20,60 +20,10 @@ module Ci UnknownStatusError = Class.new(StandardError) class_methods do - def legacy_status_sql - scope_relevant = respond_to?(:exclude_ignored) ? exclude_ignored : all - scope_warnings = respond_to?(:failed_but_allowed) ? failed_but_allowed : none - - builds = scope_relevant.select('count(*)').to_sql - created = scope_relevant.created.select('count(*)').to_sql - success = scope_relevant.success.select('count(*)').to_sql - manual = scope_relevant.manual.select('count(*)').to_sql - scheduled = scope_relevant.scheduled.select('count(*)').to_sql - preparing = scope_relevant.preparing.select('count(*)').to_sql - waiting_for_resource = scope_relevant.waiting_for_resource.select('count(*)').to_sql - pending = scope_relevant.pending.select('count(*)').to_sql - running = scope_relevant.running.select('count(*)').to_sql - skipped = scope_relevant.skipped.select('count(*)').to_sql - canceled = scope_relevant.canceled.select('count(*)').to_sql - warnings = scope_warnings.select('count(*) > 0').to_sql.presence || 'false' - - Arel.sql( - "(CASE - WHEN (#{builds})=(#{skipped}) AND (#{warnings}) THEN 'success' - WHEN (#{builds})=(#{skipped}) THEN 'skipped' - WHEN (#{builds})=(#{success}) THEN 'success' - WHEN (#{builds})=(#{created}) THEN 'created' - WHEN (#{builds})=(#{preparing}) THEN 'preparing' - WHEN (#{builds})=(#{success})+(#{skipped}) THEN 'success' - WHEN (#{builds})=(#{success})+(#{skipped})+(#{canceled}) THEN 'canceled' - WHEN (#{builds})=(#{created})+(#{skipped})+(#{pending}) THEN 'pending' - WHEN (#{running})+(#{pending})>0 THEN 'running' - WHEN (#{waiting_for_resource})>0 THEN 'waiting_for_resource' - WHEN (#{manual})>0 THEN 'manual' - WHEN (#{scheduled})>0 THEN 'scheduled' - WHEN (#{preparing})>0 THEN 'preparing' - WHEN (#{created})>0 THEN 'running' - ELSE 'failed' - END)" - ) - end - - def legacy_status - all.pluck(legacy_status_sql).first - end - - # This method should not be used. - # This method performs expensive calculation of status: - # 1. By plucking all related objects, - # 2. Or executes expensive SQL query - def slow_composite_status(project:) - if ::Gitlab::Ci::Features.composite_status?(project) - Gitlab::Ci::Status::Composite - .new(all, with_allow_failure: columns_hash.key?('allow_failure')) - .status - else - legacy_status - end + def composite_status + Gitlab::Ci::Status::Composite + .new(all, with_allow_failure: columns_hash.key?('allow_failure')) + .status end def started_at diff --git a/app/models/concerns/counter_attribute.rb b/app/models/concerns/counter_attribute.rb new file mode 100644 index 00000000000..a5c7393e8f7 --- /dev/null +++ b/app/models/concerns/counter_attribute.rb @@ -0,0 +1,143 @@ +# frozen_string_literal: true + +# Add capabilities to increment a numeric model attribute efficiently by +# using Redis and flushing the increments asynchronously to the database +# after a period of time (10 minutes). +# When an attribute is incremented by a value, the increment is added +# to a Redis key. Then, FlushCounterIncrementsWorker will execute +# `flush_increments_to_database!` which removes increments from Redis for a +# given model attribute and updates the values in the database. +# +# @example: +# +# class ProjectStatistics +# include CounterAttribute +# +# counter_attribute :commit_count +# counter_attribute :storage_size +# end +# +# To increment the counter we can use the method: +# delayed_increment_counter(:commit_count, 3) +# +module CounterAttribute + extend ActiveSupport::Concern + extend AfterCommitQueue + include Gitlab::ExclusiveLeaseHelpers + + LUA_STEAL_INCREMENT_SCRIPT = <<~EOS.freeze + local increment_key, flushed_key = KEYS[1], KEYS[2] + local increment_value = redis.call("get", increment_key) or 0 + local flushed_value = redis.call("incrby", flushed_key, increment_value) + if flushed_value == 0 then + redis.call("del", increment_key, flushed_key) + else + redis.call("del", increment_key) + end + return flushed_value + EOS + + WORKER_DELAY = 10.minutes + WORKER_LOCK_TTL = 10.minutes + + class_methods do + def counter_attribute(attribute) + counter_attributes << attribute + end + + def counter_attributes + @counter_attributes ||= Set.new + end + end + + # This method must only be called by FlushCounterIncrementsWorker + # because it should run asynchronously and with exclusive lease. + # This will + # 1. temporarily move the pending increment for a given attribute + # to a relative "flushed" Redis key, delete the increment key and return + # the value. If new increments are performed at this point, the increment + # key is recreated as part of `delayed_increment_counter`. + # The "flushed" key is used to ensure that we can keep incrementing + # counters in Redis while flushing existing values. + # 2. then the value is used to update the counter in the database. + # 3. finally the "flushed" key is deleted. + def flush_increments_to_database!(attribute) + lock_key = counter_lock_key(attribute) + + with_exclusive_lease(lock_key) do + increment_key = counter_key(attribute) + flushed_key = counter_flushed_key(attribute) + increment_value = steal_increments(increment_key, flushed_key) + + next if increment_value == 0 + + transaction do + unsafe_update_counters(id, attribute => increment_value) + redis_state { |redis| redis.del(flushed_key) } + end + end + end + + def delayed_increment_counter(attribute, increment) + return if increment == 0 + + run_after_commit_or_now do + if counter_attribute_enabled?(attribute) + redis_state do |redis| + redis.incrby(counter_key(attribute), increment) + end + + FlushCounterIncrementsWorker.perform_in(WORKER_DELAY, self.class.name, self.id, attribute) + else + legacy_increment!(attribute, increment) + end + end + + true + end + + def counter_key(attribute) + "project:{#{project_id}}:counters:#{self.class}:#{id}:#{attribute}" + end + + def counter_flushed_key(attribute) + counter_key(attribute) + ':flushed' + end + + def counter_lock_key(attribute) + counter_key(attribute) + ':lock' + end + + private + + def counter_attribute_enabled?(attribute) + Feature.enabled?(:efficient_counter_attribute, project) && + self.class.counter_attributes.include?(attribute) + end + + def steal_increments(increment_key, flushed_key) + redis_state do |redis| + redis.eval(LUA_STEAL_INCREMENT_SCRIPT, keys: [increment_key, flushed_key]) + end + end + + def legacy_increment!(attribute, increment) + increment!(attribute, increment) + end + + def unsafe_update_counters(id, increments) + self.class.update_counters(id, increments) + end + + def redis_state(&block) + Gitlab::Redis::SharedState.with(&block) + end + + def with_exclusive_lease(lock_key) + in_lock(lock_key, ttl: WORKER_LOCK_TTL) do + yield + end + rescue Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError + # a worker is already updating the counters + end +end diff --git a/app/models/concerns/file_store_mounter.rb b/app/models/concerns/file_store_mounter.rb new file mode 100644 index 00000000000..9d4463e5297 --- /dev/null +++ b/app/models/concerns/file_store_mounter.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +module FileStoreMounter + extend ActiveSupport::Concern + + class_methods do + def mount_file_store_uploader(uploader) + mount_uploader(:file, uploader) + + after_save :update_file_store, if: :saved_change_to_file? + end + end + + private + + def update_file_store + # The file.object_store is set during `uploader.store!` + # which happens after object is inserted/updated + self.update_column(:file_store, file.object_store) + end +end diff --git a/app/models/concerns/has_wiki.rb b/app/models/concerns/has_wiki.rb index 4dd72216e77..3e7cb940a62 100644 --- a/app/models/concerns/has_wiki.rb +++ b/app/models/concerns/has_wiki.rb @@ -17,7 +17,7 @@ module HasWiki def wiki strong_memoize(:wiki) do - Wiki.for_container(self, self.owner) + Wiki.for_container(self, self.default_owner) end end diff --git a/app/models/concerns/issuable.rb b/app/models/concerns/issuable.rb index 715cbd15d93..dd5aedbb760 100644 --- a/app/models/concerns/issuable.rb +++ b/app/models/concerns/issuable.rb @@ -61,11 +61,13 @@ module Issuable end end + has_many :note_authors, -> { distinct }, through: :notes, source: :author + has_many :label_links, as: :target, dependent: :destroy, inverse_of: :target # rubocop:disable Cop/ActiveRecordDependent has_many :labels, through: :label_links has_many :todos, as: :target, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent - has_one :metrics + has_one :metrics, inverse_of: model_name.singular.to_sym, autosave: true delegate :name, :email, diff --git a/app/models/concerns/milestoneable.rb b/app/models/concerns/milestoneable.rb index 8f8494a9678..ccb334343ff 100644 --- a/app/models/concerns/milestoneable.rb +++ b/app/models/concerns/milestoneable.rb @@ -15,7 +15,7 @@ module Milestoneable validate :milestone_is_valid scope :of_milestones, ->(ids) { where(milestone_id: ids) } - scope :any_milestone, -> { where('milestone_id IS NOT NULL') } + scope :any_milestone, -> { where.not(milestone_id: nil) } scope :with_milestone, ->(title) { left_joins_milestones.where(milestones: { title: title }) } scope :without_particular_milestone, ->(title) { left_outer_joins(:milestone).where("milestones.title != ? OR milestone_id IS NULL", title) } scope :any_release, -> { joins_milestone_releases } diff --git a/app/models/concerns/relative_positioning.rb b/app/models/concerns/relative_positioning.rb index 1d89a4497d9..d1f04609693 100644 --- a/app/models/concerns/relative_positioning.rb +++ b/app/models/concerns/relative_positioning.rb @@ -3,11 +3,15 @@ # This module makes it possible to handle items as a list, where the order of items can be easily altered # Requirements: # -# - Only works for ActiveRecord models -# - relative_position integer field must present on the model -# - This module uses GROUP BY: the model should have a parent relation, example: project -> issues, project is the parent relation (issues table has a parent_id column) +# The model must have the following named columns: +# - id: integer +# - relative_position: integer # -# Setup like this in the body of your class: +# The model must support a concept of siblings via a child->parent relationship, +# to enable rebalancing and `GROUP BY` in queries. +# - example: project -> issues, project is the parent relation (issues table has a parent_id column) +# +# Two class methods must be defined when including this concern: # # include RelativePositioning # @@ -24,53 +28,167 @@ module RelativePositioning extend ActiveSupport::Concern - MIN_POSITION = 0 - START_POSITION = Gitlab::Database::MAX_INT_VALUE / 2 + STEPS = 10 + IDEAL_DISTANCE = 2**(STEPS - 1) + 1 + + MIN_POSITION = Gitlab::Database::MIN_INT_VALUE + START_POSITION = 0 MAX_POSITION = Gitlab::Database::MAX_INT_VALUE - IDEAL_DISTANCE = 500 - class_methods do - def move_nulls_to_end(objects) - objects = objects.reject(&:relative_position) + MAX_GAP = IDEAL_DISTANCE * 2 + MIN_GAP = 2 - return if objects.empty? + NoSpaceLeft = Class.new(StandardError) - max_relative_position = objects.first.max_relative_position + class_methods do + def move_nulls_to_end(objects) + move_nulls(objects, at_end: true) + end - self.transaction do - objects.each do |object| - relative_position = position_between(max_relative_position || START_POSITION, MAX_POSITION) - object.relative_position = relative_position - max_relative_position = relative_position - object.save(touch: false) - end - end + def move_nulls_to_start(objects) + move_nulls(objects, at_end: false) end # This method takes two integer values (positions) and # calculates the position between them. The range is huge as - # the maximum integer value is 2147483647. We are incrementing position by IDEAL_DISTANCE * 2 every time - # when we have enough space. If distance is less than IDEAL_DISTANCE, we are calculating an average number. + # the maximum integer value is 2147483647. + # + # We avoid open ranges by clamping the range to [MIN_POSITION, MAX_POSITION]. + # + # Then we handle one of three cases: + # - If the gap is too small, we raise NoSpaceLeft + # - If the gap is larger than MAX_GAP, we place the new position at most + # IDEAL_DISTANCE from the edge of the gap. + # - otherwise we place the new position at the midpoint. + # + # The new position will always satisfy: pos_before <= midpoint <= pos_after + # + # As a precondition, the gap between pos_before and pos_after MUST be >= 2. + # If the gap is too small, NoSpaceLeft is raised. + # + # This class method should only be called by instance methods of this module, which + # include handling for minimum gap size. + # + # @raises NoSpaceLeft + # @api private def position_between(pos_before, pos_after) pos_before ||= MIN_POSITION pos_after ||= MAX_POSITION pos_before, pos_after = [pos_before, pos_after].sort - halfway = (pos_after + pos_before) / 2 - distance_to_halfway = pos_after - halfway + gap_width = pos_after - pos_before + midpoint = [pos_after - 1, pos_before + (gap_width / 2)].min - if distance_to_halfway < IDEAL_DISTANCE - halfway - else + if gap_width < MIN_GAP + raise NoSpaceLeft + elsif gap_width > MAX_GAP if pos_before == MIN_POSITION pos_after - IDEAL_DISTANCE elsif pos_after == MAX_POSITION pos_before + IDEAL_DISTANCE else - halfway + midpoint + end + else + midpoint + end + end + + private + + # @api private + def gap_size(object, gaps:, at_end:, starting_from:) + total_width = IDEAL_DISTANCE * gaps + size = if at_end && starting_from + total_width >= MAX_POSITION + (MAX_POSITION - starting_from) / gaps + elsif !at_end && starting_from - total_width <= MIN_POSITION + (starting_from - MIN_POSITION) / gaps + else + IDEAL_DISTANCE + end + + # Shift max elements leftwards if there isn't enough space + return [size, starting_from] if size >= MIN_GAP + + order = at_end ? :desc : :asc + terminus = object + .send(:relative_siblings) # rubocop:disable GitlabSecurity/PublicSend + .where('relative_position IS NOT NULL') + .order(relative_position: order) + .first + + if at_end + terminus.move_sequence_before(true) + max_relative_position = terminus.reset.relative_position + [[(MAX_POSITION - max_relative_position) / gaps, IDEAL_DISTANCE].min, max_relative_position] + else + terminus.move_sequence_after(true) + min_relative_position = terminus.reset.relative_position + [[(min_relative_position - MIN_POSITION) / gaps, IDEAL_DISTANCE].min, min_relative_position] + end + end + + # @api private + # @param [Array<RelativePositioning>] objects The objects to give positions to. The relative + # order will be preserved (i.e. when this method returns, + # objects.first.relative_position < objects.last.relative_position) + # @param [Boolean] at_end: The placement. + # If `true`, then all objects with `null` positions are placed _after_ + # all siblings with positions. If `false`, all objects with `null` + # positions are placed _before_ all siblings with positions. + # @returns [Number] The number of moved records. + def move_nulls(objects, at_end:) + objects = objects.reject(&:relative_position) + return 0 if objects.empty? + + representative = objects.first + number_of_gaps = objects.size + 1 # 1 at left, one between each, and one at right + position = if at_end + representative.max_relative_position + else + representative.min_relative_position + end + + position ||= START_POSITION # If there are no positioned siblings, start from START_POSITION + + gap, position = gap_size(representative, gaps: number_of_gaps, at_end: at_end, starting_from: position) + + # Raise if we could not make enough space + raise NoSpaceLeft if gap < MIN_GAP + + indexed = objects.each_with_index.to_a + starting_from = at_end ? position : position - (gap * number_of_gaps) + + # Some classes are polymorphic, and not all siblings are in the same table. + by_model = indexed.group_by { |pair| pair.first.class } + + by_model.each do |model, pairs| + model.transaction do + pairs.each_slice(100) do |batch| + # These are known to be integers, one from the DB, and the other + # calculated by us, and thus safe to interpolate + values = batch.map do |obj, i| + pos = starting_from + gap * (i + 1) + obj.relative_position = pos + "(#{obj.id}, #{pos})" + end.join(', ') + + model.connection.exec_query(<<~SQL, "UPDATE #{model.table_name} positions") + WITH cte(cte_id, new_pos) AS ( + SELECT * + FROM (VALUES #{values}) as t (id, pos) + ) + UPDATE #{model.table_name} + SET relative_position = cte.new_pos + FROM cte + WHERE cte_id = id + SQL + end end end + + objects.size end end @@ -82,11 +200,12 @@ module RelativePositioning calculate_relative_position('MAX', &block) end - def prev_relative_position + def prev_relative_position(ignoring: nil) prev_pos = nil if self.relative_position prev_pos = max_relative_position do |relation| + relation = relation.id_not_in(ignoring.id) if ignoring.present? relation.where('relative_position < ?', self.relative_position) end end @@ -94,11 +213,12 @@ module RelativePositioning prev_pos end - def next_relative_position + def next_relative_position(ignoring: nil) next_pos = nil if self.relative_position next_pos = min_relative_position do |relation| + relation = relation.id_not_in(ignoring.id) if ignoring.present? relation.where('relative_position > ?', self.relative_position) end end @@ -110,24 +230,44 @@ module RelativePositioning return move_after(before) unless after return move_before(after) unless before - # If there is no place to insert an item we need to create one by moving the item - # before this and all preceding items until there is a gap before, after = after, before if after.relative_position < before.relative_position - if (after.relative_position - before.relative_position) < 2 - after.move_sequence_before - before.reset + + pos_left = before.relative_position + pos_right = after.relative_position + + if pos_right - pos_left < MIN_GAP + # Not enough room! Make space by shifting all previous elements to the left + # if there is enough space, else to the right + gap = after.send(:find_next_gap_before) # rubocop:disable GitlabSecurity/PublicSend + + if gap.present? + after.move_sequence_before(next_gap: gap) + pos_left -= optimum_delta_for_gap(gap) + else + before.move_sequence_after + pos_right = after.reset.relative_position + end end - self.relative_position = self.class.position_between(before.relative_position, after.relative_position) + new_position = self.class.position_between(pos_left, pos_right) + + self.relative_position = new_position end def move_after(before = self) pos_before = before.relative_position - pos_after = before.next_relative_position + pos_after = before.next_relative_position(ignoring: self) + + if pos_before == MAX_POSITION || gap_too_small?(pos_after, pos_before) + gap = before.send(:find_next_gap_after) # rubocop:disable GitlabSecurity/PublicSend - if pos_after && (pos_after - pos_before) < 2 - before.move_sequence_after - pos_after = before.next_relative_position + if gap.nil? + before.move_sequence_before(true) + pos_before = before.reset.relative_position + else + before.move_sequence_after(next_gap: gap) + pos_after += optimum_delta_for_gap(gap) + end end self.relative_position = self.class.position_between(pos_before, pos_after) @@ -135,80 +275,186 @@ module RelativePositioning def move_before(after = self) pos_after = after.relative_position - pos_before = after.prev_relative_position + pos_before = after.prev_relative_position(ignoring: self) + + if pos_after == MIN_POSITION || gap_too_small?(pos_before, pos_after) + gap = after.send(:find_next_gap_before) # rubocop:disable GitlabSecurity/PublicSend - if pos_before && (pos_after - pos_before) < 2 - after.move_sequence_before - pos_before = after.prev_relative_position + if gap.nil? + after.move_sequence_after(true) + pos_after = after.reset.relative_position + else + after.move_sequence_before(next_gap: gap) + pos_before -= optimum_delta_for_gap(gap) + end end self.relative_position = self.class.position_between(pos_before, pos_after) end def move_to_end - self.relative_position = self.class.position_between(max_relative_position || START_POSITION, MAX_POSITION) + max_pos = max_relative_position + + if max_pos.nil? + self.relative_position = START_POSITION + elsif gap_too_small?(max_pos, MAX_POSITION) + max = relative_siblings.order(Gitlab::Database.nulls_last_order('relative_position', 'DESC')).first + max.move_sequence_before(true) + max.reset + self.relative_position = self.class.position_between(max.relative_position, MAX_POSITION) + else + self.relative_position = self.class.position_between(max_pos, MAX_POSITION) + end end def move_to_start - self.relative_position = self.class.position_between(min_relative_position || START_POSITION, MIN_POSITION) + min_pos = min_relative_position + + if min_pos.nil? + self.relative_position = START_POSITION + elsif gap_too_small?(min_pos, MIN_POSITION) + min = relative_siblings.order(Gitlab::Database.nulls_last_order('relative_position', 'ASC')).first + min.move_sequence_after(true) + min.reset + self.relative_position = self.class.position_between(MIN_POSITION, min.relative_position) + else + self.relative_position = self.class.position_between(MIN_POSITION, min_pos) + end end # Moves the sequence before the current item to the middle of the next gap - # For example, we have 5 11 12 13 14 15 and the current item is 15 - # This moves the sequence 11 12 13 14 to 8 9 10 11 - def move_sequence_before - next_gap = find_next_gap_before + # For example, we have + # + # 5 . . . . . 11 12 13 14 [15] 16 . 17 + # ----------- + # + # This moves the sequence [11 12 13 14] to [8 9 10 11], so we have: + # + # 5 . . 8 9 10 11 . . . [15] 16 . 17 + # --------- + # + # Creating a gap to the left of the current item. We can understand this as + # dividing the 5 spaces between 5 and 11 into two smaller gaps of 2 and 3. + # + # If `include_self` is true, the current item will also be moved, creating a + # gap to the right of the current item: + # + # 5 . . 8 9 10 11 [14] . . . 16 . 17 + # -------------- + # + # As an optimization, the gap can be precalculated and passed to this method. + # + # @api private + # @raises NoSpaceLeft if the sequence cannot be moved + def move_sequence_before(include_self = false, next_gap: find_next_gap_before) + raise NoSpaceLeft unless next_gap.present? + delta = optimum_delta_for_gap(next_gap) - move_sequence(next_gap[:start], relative_position, -delta) + move_sequence(next_gap[:start], relative_position, -delta, include_self) end # Moves the sequence after the current item to the middle of the next gap - # For example, we have 11 12 13 14 15 21 and the current item is 11 - # This moves the sequence 12 13 14 15 to 15 16 17 18 - def move_sequence_after - next_gap = find_next_gap_after + # For example, we have: + # + # 8 . 10 [11] 12 13 14 15 . . . . . 21 + # ----------- + # + # This moves the sequence [12 13 14 15] to [15 16 17 18], so we have: + # + # 8 . 10 [11] . . . 15 16 17 18 . . 21 + # ----------- + # + # Creating a gap to the right of the current item. We can understand this as + # dividing the 5 spaces between 15 and 21 into two smaller gaps of 3 and 2. + # + # If `include_self` is true, the current item will also be moved, creating a + # gap to the left of the current item: + # + # 8 . 10 . . . [14] 15 16 17 18 . . 21 + # ---------------- + # + # As an optimization, the gap can be precalculated and passed to this method. + # + # @api private + # @raises NoSpaceLeft if the sequence cannot be moved + def move_sequence_after(include_self = false, next_gap: find_next_gap_after) + raise NoSpaceLeft unless next_gap.present? + delta = optimum_delta_for_gap(next_gap) - move_sequence(relative_position, next_gap[:start], delta) + move_sequence(relative_position, next_gap[:start], delta, include_self) end private - # Supposing that we have a sequence of items: 1 5 11 12 13 and the current item is 13 - # This would return: `{ start: 11, end: 5 }` + def gap_too_small?(pos_a, pos_b) + return false unless pos_a && pos_b + + (pos_a - pos_b).abs < MIN_GAP + end + + # Find the first suitable gap to the left of the current position. + # + # Satisfies the relations: + # - gap[:start] <= relative_position + # - abs(gap[:start] - gap[:end]) >= MIN_GAP + # - MIN_POSITION <= gap[:start] <= MAX_POSITION + # - MIN_POSITION <= gap[:end] <= MAX_POSITION + # + # Supposing that the current item is 13, and we have a sequence of items: + # + # 1 . . . 5 . . . . 11 12 [13] 14 . . 17 + # ^---------^ + # + # Then we return: `{ start: 11, end: 5 }` + # + # Here start refers to the end of the gap closest to the current item. def find_next_gap_before items_with_next_pos = scoped_items .select('relative_position AS pos, LEAD(relative_position) OVER (ORDER BY relative_position DESC) AS next_pos') .where('relative_position <= ?', relative_position) .order(relative_position: :desc) - find_next_gap(items_with_next_pos).tap do |gap| - gap[:end] ||= MIN_POSITION - end + find_next_gap(items_with_next_pos, MIN_POSITION) end - # Supposing that we have a sequence of items: 13 14 15 20 24 and the current item is 13 - # This would return: `{ start: 15, end: 20 }` + # Find the first suitable gap to the right of the current position. + # + # Satisfies the relations: + # - gap[:start] >= relative_position + # - abs(gap[:start] - gap[:end]) >= MIN_GAP + # - MIN_POSITION <= gap[:start] <= MAX_POSITION + # - MIN_POSITION <= gap[:end] <= MAX_POSITION + # + # Supposing the current item is 13, and that we have a sequence of items: + # + # 9 . . . [13] 14 15 . . . . 20 . . . 24 + # ^---------^ + # + # Then we return: `{ start: 15, end: 20 }` + # + # Here start refers to the end of the gap closest to the current item. def find_next_gap_after items_with_next_pos = scoped_items .select('relative_position AS pos, LEAD(relative_position) OVER (ORDER BY relative_position ASC) AS next_pos') .where('relative_position >= ?', relative_position) .order(:relative_position) - find_next_gap(items_with_next_pos).tap do |gap| - gap[:end] ||= MAX_POSITION - end + find_next_gap(items_with_next_pos, MAX_POSITION) end - def find_next_gap(items_with_next_pos) - gap = self.class.from(items_with_next_pos, :items_with_next_pos) - .where('ABS(pos - next_pos) > 1 OR next_pos IS NULL') - .limit(1) - .pluck(:pos, :next_pos) - .first + def find_next_gap(items_with_next_pos, end_is_nil) + gap = self.class + .from(items_with_next_pos, :items) + .where('next_pos IS NULL OR ABS(pos::bigint - next_pos::bigint) >= ?', MIN_GAP) + .limit(1) + .pluck(:pos, :next_pos) + .first + + return if gap.nil? || gap.first == end_is_nil - { start: gap[0], end: gap[1] } + { start: gap.first, end: gap.second || end_is_nil } end def optimum_delta_for_gap(gap) @@ -217,9 +463,10 @@ module RelativePositioning [delta, IDEAL_DISTANCE].min end - def move_sequence(start_pos, end_pos, delta) - scoped_items - .where.not(id: self.id) + def move_sequence(start_pos, end_pos, delta, include_self = false) + relation = include_self ? scoped_items : relative_siblings + + relation .where('relative_position BETWEEN ? AND ?', start_pos, end_pos) .update_all("relative_position = relative_position + #{delta}") end @@ -240,6 +487,10 @@ module RelativePositioning .first&.last end + def relative_siblings(relation = scoped_items) + relation.id_not_in(id) + end + def scoped_items self.class.relative_positioning_query_base(self) end diff --git a/app/models/concerns/sha_attribute.rb b/app/models/concerns/sha_attribute.rb index c807dcbf418..cbac6a210c7 100644 --- a/app/models/concerns/sha_attribute.rb +++ b/app/models/concerns/sha_attribute.rb @@ -7,7 +7,7 @@ module ShaAttribute def sha_attribute(name) return if ENV['STATIC_VERIFICATION'] - validate_binary_column_exists!(name) unless Rails.env.production? + validate_binary_column_exists!(name) if Rails.env.development? attribute(name, Gitlab::Database::ShaAttribute.new) end @@ -17,18 +17,11 @@ module ShaAttribute # See https://gitlab.com/gitlab-org/gitlab/merge_requests/5502 for more discussion def validate_binary_column_exists!(name) return unless database_exists? - - unless table_exists? - warn "WARNING: sha_attribute #{name.inspect} is invalid since the table doesn't exist - you may need to run database migrations" - return - end + return unless table_exists? column = columns.find { |c| c.name == name.to_s } - unless column - warn "WARNING: sha_attribute #{name.inspect} is invalid since the column doesn't exist - you may need to run database migrations" - return - end + return unless column unless column.type == :binary raise ArgumentError.new("sha_attribute #{name.inspect} is invalid since the column type is not :binary") diff --git a/app/models/concerns/time_trackable.rb b/app/models/concerns/time_trackable.rb index dddf96837b7..a1e7d06b1c1 100644 --- a/app/models/concerns/time_trackable.rb +++ b/app/models/concerns/time_trackable.rb @@ -26,6 +26,7 @@ module TimeTrackable # rubocop:disable Gitlab/ModuleWithInstanceVariables def spend_time(options) @time_spent = options[:duration] + @time_spent_note_id = options[:note_id] @time_spent_user = User.find(options[:user_id]) @spent_at = options[:spent_at] @original_total_time_spent = nil @@ -67,6 +68,7 @@ module TimeTrackable def add_or_subtract_spent_time timelogs.new( time_spent: time_spent, + note_id: @time_spent_note_id, user: @time_spent_user, spent_at: @spent_at ) diff --git a/app/models/concerns/triggerable_hooks.rb b/app/models/concerns/triggerable_hooks.rb index c52baa0524c..b64a9e4f70b 100644 --- a/app/models/concerns/triggerable_hooks.rb +++ b/app/models/concerns/triggerable_hooks.rb @@ -12,7 +12,8 @@ module TriggerableHooks merge_request_hooks: :merge_requests_events, job_hooks: :job_events, pipeline_hooks: :pipeline_events, - wiki_page_hooks: :wiki_page_events + wiki_page_hooks: :wiki_page_events, + deployment_hooks: :deployment_events }.freeze extend ActiveSupport::Concern diff --git a/app/models/concerns/update_project_statistics.rb b/app/models/concerns/update_project_statistics.rb index c0fa14d3369..a7028e18451 100644 --- a/app/models/concerns/update_project_statistics.rb +++ b/app/models/concerns/update_project_statistics.rb @@ -75,7 +75,7 @@ module UpdateProjectStatistics end def schedule_update_project_statistic(delta) - return if delta.zero? + return if delta == 0 return if project.nil? run_after_commit do diff --git a/app/models/deployment.rb b/app/models/deployment.rb index aa3e3a8f66d..d6508ffceba 100644 --- a/app/models/deployment.rb +++ b/app/models/deployment.rb @@ -148,6 +148,7 @@ class Deployment < ApplicationRecord def execute_hooks deployment_data = Gitlab::DataBuilder::Deployment.build(self) + project.execute_hooks(deployment_data, :deployment_hooks) if Feature.enabled?(:deployment_webhooks, project, default_enabled: true) project.execute_services(deployment_data, :deployment_hooks) end diff --git a/app/models/design_management/design.rb b/app/models/design_management/design.rb index 0dca6333fa1..deda814d689 100644 --- a/app/models/design_management/design.rb +++ b/app/models/design_management/design.rb @@ -9,6 +9,7 @@ module DesignManagement include Referable include Mentionable include WhereComposite + include RelativePositioning belongs_to :project, inverse_of: :designs belongs_to :issue @@ -75,9 +76,23 @@ module DesignManagement join = designs.join(actions) .on(actions[:design_id].eq(designs[:id])) - joins(join.join_sources).where(actions[:event].not_eq(deletion)).order(:id) + joins(join.join_sources).where(actions[:event].not_eq(deletion)) end + scope :ordered, -> (project) do + # TODO: Always order by relative position after the feature flag is removed + # https://gitlab.com/gitlab-org/gitlab/-/issues/34382 + if Feature.enabled?(:reorder_designs, project, default_enabled: true) + # We need to additionally sort by `id` to support keyset pagination. + # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/17788/diffs#note_230875678 + order(:relative_position, :id) + else + in_creation_order + end + end + + scope :in_creation_order, -> { reorder(:id) } + scope :with_filename, -> (filenames) { where(filename: filenames) } scope :on_issue, ->(issue) { where(issue_id: issue) } @@ -87,6 +102,14 @@ module DesignManagement # A design is current if the most recent event is not a deletion scope :current, -> { visible_at_version(nil) } + def self.relative_positioning_query_base(design) + default_scoped.on_issue(design.issue_id) + end + + def self.relative_positioning_parent_column + :issue_id + end + def status if new_design? :new @@ -196,6 +219,17 @@ module DesignManagement project end + def immediately_before?(next_design) + return false if next_design.relative_position <= relative_position + + interloper = self.class.on_issue(issue).where( + "relative_position <@ int4range(?, ?, '()')", + *[self, next_design].map(&:relative_position) + ) + + !interloper.exists? + end + private def head_version diff --git a/app/models/design_management/design_collection.rb b/app/models/design_management/design_collection.rb index 18d1541e9c7..96d5f4c2419 100644 --- a/app/models/design_management/design_collection.rb +++ b/app/models/design_management/design_collection.rb @@ -10,6 +10,10 @@ module DesignManagement @issue = issue end + def ==(other) + other.is_a?(self.class) && issue == other.issue + end + def find_or_create_design!(filename:) designs.find { |design| design.filename == filename } || designs.safe_find_or_create_by!(project: project, filename: filename) diff --git a/app/models/discussion.rb b/app/models/discussion.rb index e928bb0959a..adcb2217d85 100644 --- a/app/models/discussion.rb +++ b/app/models/discussion.rb @@ -23,6 +23,7 @@ class Discussion :resolved_by_id, :system_note_with_references_visible_for?, :resource_parent, + :save, to: :first_note diff --git a/app/models/environment.rb b/app/models/environment.rb index bddc84f10b5..c6a08c996da 100644 --- a/app/models/environment.rb +++ b/app/models/environment.rb @@ -29,6 +29,7 @@ class Environment < ApplicationRecord has_one :last_visible_deployment, -> { visible.distinct_on_environment }, inverse_of: :environment, class_name: 'Deployment' has_one :last_visible_deployable, through: :last_visible_deployment, source: 'deployable', source_type: 'CommitStatus' has_one :last_visible_pipeline, through: :last_visible_deployable, source: 'pipeline' + has_one :latest_opened_most_severe_alert, -> { order_severity_with_open_prometheus_alert }, class_name: 'AlertManagement::Alert', inverse_of: :environment before_validation :nullify_external_url before_validation :generate_slug, if: ->(env) { env.slug.blank? } @@ -291,6 +292,10 @@ class Environment < ApplicationRecord !!ENV['USE_SAMPLE_METRICS'] end + def has_opened_alert? + latest_opened_most_severe_alert.present? + end + def metrics prometheus_adapter.query(:environment, self) if has_metrics_and_can_query? end diff --git a/app/models/event.rb b/app/models/event.rb index 56d7742c51a..92609144576 100644 --- a/app/models/event.rb +++ b/app/models/event.rb @@ -8,6 +8,7 @@ class Event < ApplicationRecord include CreatedAtFilterable include Gitlab::Utils::StrongMemoize include UsageStatistics + include ShaAttribute default_scope { reorder(nil) } # rubocop:disable Cop/DefaultScope @@ -48,6 +49,8 @@ class Event < ApplicationRecord RESET_PROJECT_ACTIVITY_INTERVAL = 1.hour REPOSITORY_UPDATED_AT_INTERVAL = 5.minutes + sha_attribute :fingerprint + enum action: ACTIONS, _suffix: true delegate :name, :email, :public_email, :username, to: :author, prefix: true, allow_nil: true @@ -82,6 +85,10 @@ class Event < ApplicationRecord scope :recent, -> { reorder(id: :desc) } scope :for_wiki_page, -> { where(target_type: 'WikiPage::Meta') } scope :for_design, -> { where(target_type: 'DesignManagement::Design') } + scope :for_fingerprint, ->(fingerprint) do + fingerprint.present? ? where(fingerprint: fingerprint) : none + end + scope :for_action, ->(action) { where(action: action) } scope :with_associations, -> do # We're using preload for "push_event_payload" as otherwise the association diff --git a/app/models/experiment.rb b/app/models/experiment.rb new file mode 100644 index 00000000000..25640385536 --- /dev/null +++ b/app/models/experiment.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +class Experiment < ApplicationRecord + has_many :experiment_users + has_many :users, through: :experiment_users + has_many :control_group_users, -> { merge(ExperimentUser.control) }, through: :experiment_users, source: :user + has_many :experimental_group_users, -> { merge(ExperimentUser.experimental) }, through: :experiment_users, source: :user + + validates :name, presence: true, uniqueness: true, length: { maximum: 255 } + + def self.add_user(name, group_type, user) + experiment = find_or_create_by(name: name) + + return unless experiment + return if experiment.experiment_users.where(user: user).exists? + + group_type == ::Gitlab::Experimentation::GROUP_CONTROL ? experiment.add_control_user(user) : experiment.add_experimental_user(user) + end + + def add_control_user(user) + control_group_users << user + end + + def add_experimental_user(user) + experimental_group_users << user + end +end diff --git a/app/models/experiment_user.rb b/app/models/experiment_user.rb new file mode 100644 index 00000000000..1571b0c3439 --- /dev/null +++ b/app/models/experiment_user.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +class ExperimentUser < ApplicationRecord + belongs_to :experiment + belongs_to :user + + enum group_type: { control: 0, experimental: 1 } + + validates :group_type, presence: true +end diff --git a/app/models/external_pull_request.rb b/app/models/external_pull_request.rb index 9c6d05f773a..1487a6387f0 100644 --- a/app/models/external_pull_request.rb +++ b/app/models/external_pull_request.rb @@ -63,6 +63,8 @@ class ExternalPullRequest < ApplicationRecord def predefined_variables Gitlab::Ci::Variables::Collection.new.tap do |variables| variables.append(key: 'CI_EXTERNAL_PULL_REQUEST_IID', value: pull_request_iid.to_s) + variables.append(key: 'CI_EXTERNAL_PULL_REQUEST_SOURCE_REPOSITORY', value: source_repository) + variables.append(key: 'CI_EXTERNAL_PULL_REQUEST_TARGET_REPOSITORY', value: target_repository) variables.append(key: 'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_SHA', value: source_sha) variables.append(key: 'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_SHA', value: target_sha) variables.append(key: 'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_NAME', value: source_branch) diff --git a/app/models/group.rb b/app/models/group.rb index c38ddbdf6fb..f8cbaa2495c 100644 --- a/app/models/group.rb +++ b/app/models/group.rb @@ -64,6 +64,8 @@ class Group < Namespace has_one :import_state, class_name: 'GroupImportState', inverse_of: :group + has_many :group_deploy_keys_groups, inverse_of: :group + has_many :group_deploy_keys, through: :group_deploy_keys_groups has_many :group_deploy_tokens has_many :deploy_tokens, through: :group_deploy_tokens @@ -172,6 +174,10 @@ class Group < Namespace notification_settings(hierarchy_order: hierarchy_order).where(user: user) end + def packages_feature_enabled? + ::Gitlab.config.packages.enabled + end + def notification_email_for(user) # Finds the closest notification_setting with a `notification_email` notification_settings = notification_settings_for(user, hierarchy_order: :asc) @@ -557,6 +563,10 @@ class Group < Namespace all_projects.update_all(shared_runners_enabled: false) end + def default_owner + owners.first || parent&.default_owner || owner + end + private def update_two_factor_requirement diff --git a/app/models/group_deploy_key.rb b/app/models/group_deploy_key.rb index d1f1aa544cd..160ac28b33b 100644 --- a/app/models/group_deploy_key.rb +++ b/app/models/group_deploy_key.rb @@ -3,9 +3,31 @@ class GroupDeployKey < Key self.table_name = 'group_deploy_keys' + has_many :group_deploy_keys_groups, inverse_of: :group_deploy_key, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent + has_many :groups, through: :group_deploy_keys_groups + validates :user, presence: true def type 'DeployKey' end + + def group_deploy_keys_group_for(group) + group_deploy_keys_groups.find_by(group: group) + end + + def can_be_edited_for?(user, group) + Ability.allowed?(user, :update_group_deploy_key, self) || + Ability.allowed?( + user, + :update_group_deploy_key_for_group, + group_deploy_keys_group_for(group) + ) + end + + def group_deploy_keys_groups_for_user(user) + group_deploy_keys_groups.select do |group_deploy_keys_group| + Ability.allowed?(user, :read_group, group_deploy_keys_group.group) + end + end end diff --git a/app/models/group_deploy_keys_group.rb b/app/models/group_deploy_keys_group.rb new file mode 100644 index 00000000000..2fbfd2983b4 --- /dev/null +++ b/app/models/group_deploy_keys_group.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +class GroupDeployKeysGroup < ApplicationRecord + belongs_to :group, inverse_of: :group_deploy_keys_groups + belongs_to :group_deploy_key, inverse_of: :group_deploy_keys_groups + + validates :group_deploy_key, presence: true + validates :group_deploy_key_id, uniqueness: { scope: [:group_id], message: "already exists in group" } + validates :group_id, presence: true +end diff --git a/app/models/hooks/project_hook.rb b/app/models/hooks/project_hook.rb index 71494b6de4d..2d1bdecc770 100644 --- a/app/models/hooks/project_hook.rb +++ b/app/models/hooks/project_hook.rb @@ -17,7 +17,8 @@ class ProjectHook < WebHook :merge_request_hooks, :job_hooks, :pipeline_hooks, - :wiki_page_hooks + :wiki_page_hooks, + :deployment_hooks ] belongs_to :project diff --git a/app/models/individual_note_discussion.rb b/app/models/individual_note_discussion.rb index bdfa6dcc6bd..c3ccf44d27e 100644 --- a/app/models/individual_note_discussion.rb +++ b/app/models/individual_note_discussion.rb @@ -17,12 +17,8 @@ class IndividualNoteDiscussion < Discussion noteable.supports_replying_to_individual_notes? end - def convert_to_discussion!(save: false) - first_note.becomes!(Discussion.note_class).to_discussion.tap do - # Save needs to be called on first_note instead of the transformed note - # because of https://gitlab.com/gitlab-org/gitlab-foss/issues/57324 - first_note.save if save - end + def convert_to_discussion! + first_note.becomes!(Discussion.note_class).to_discussion end def reply_attributes diff --git a/app/models/issue.rb b/app/models/issue.rb index 619555f369d..a0003df87e1 100644 --- a/app/models/issue.rb +++ b/app/models/issue.rb @@ -30,6 +30,8 @@ class Issue < ApplicationRecord SORTING_PREFERENCE_FIELD = :issues_sort belongs_to :project + has_one :namespace, through: :project + belongs_to :duplicated_to, class_name: 'Issue' belongs_to :closed_by, class_name: 'User' belongs_to :iteration, foreign_key: 'sprint_id' @@ -66,6 +68,12 @@ class Issue < ApplicationRecord accepts_nested_attributes_for :sentry_issue validates :project, presence: true + validates :issue_type, presence: true + + enum issue_type: { + issue: 0, + incident: 1 + } alias_attribute :parent_ids, :project_id alias_method :issuing_parent, :project @@ -87,11 +95,18 @@ class Issue < ApplicationRecord scope :order_created_at_desc, -> { reorder(created_at: :desc) } scope :preload_associated_models, -> { preload(:assignees, :labels, project: :namespace) } + scope :with_web_entity_associations, -> { preload(:author, :project) } scope :with_api_entity_associations, -> { preload(:timelogs, :assignees, :author, :notes, :labels, project: [:route, { namespace: :route }] ) } scope :with_label_attributes, ->(label_attributes) { joins(:labels).where(labels: label_attributes) } scope :with_alert_management_alerts, -> { joins(:alert_management_alert) } scope :with_prometheus_alert_events, -> { joins(:issues_prometheus_alert_events) } scope :with_self_managed_prometheus_alert_events, -> { joins(:issues_self_managed_prometheus_alert_events) } + scope :with_api_entity_associations, -> { + preload(:timelogs, :closed_by, :assignees, :author, :notes, :labels, + milestone: { project: [:route, { namespace: :route }] }, + project: [:route, { namespace: :route }]) + } + scope :with_issue_type, ->(types) { where(issue_type: types) } scope :public_only, -> { where(confidential: false) } scope :confidential_only, -> { where(confidential: true) } @@ -146,10 +161,6 @@ class Issue < ApplicationRecord issue.closed_at = nil issue.closed_by = nil end - - after_transition any => :closed do |issue| - issue.resolve_associated_alert_management_alert - end end # Alias to state machine .with_state_id method @@ -363,18 +374,6 @@ class Issue < ApplicationRecord @design_collection ||= ::DesignManagement::DesignCollection.new(self) end - def resolve_associated_alert_management_alert - return unless alert_management_alert - return if alert_management_alert.resolve - - Gitlab::AppLogger.warn( - message: 'Cannot resolve an associated Alert Management alert', - issue_id: id, - alert_id: alert_management_alert.id, - alert_errors: alert_management_alert.errors.messages - ) - end - def from_service_desk? author.id == User.support_bot.id end diff --git a/app/models/iteration.rb b/app/models/iteration.rb index 0b59cf047f7..3495f099064 100644 --- a/app/models/iteration.rb +++ b/app/models/iteration.rb @@ -4,6 +4,7 @@ class Iteration < ApplicationRecord self.table_name = 'sprints' attr_accessor :skip_future_date_validation + attr_accessor :skip_project_validation STATE_ENUM_MAP = { upcoming: 1, @@ -24,6 +25,7 @@ class Iteration < ApplicationRecord validate :dates_do_not_overlap, if: :start_or_due_dates_changed? validate :future_date, if: :start_or_due_dates_changed?, unless: :skip_future_date_validation + validate :no_project, unless: :skip_project_validation scope :upcoming, -> { with_state(:upcoming) } scope :started, -> { with_state(:started) } @@ -113,6 +115,12 @@ class Iteration < ApplicationRecord errors.add(:due_date, s_("Iteration|cannot be more than 500 years in the future")) if due_date > 500.years.from_now end end + + def no_project + return unless project_id.present? + + errors.add(:project_id, s_("is not allowed. We do not currently support project-level iterations")) + end end Iteration.prepend_if_ee('EE::Iteration') diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb index 3761484b15d..d60baa299cb 100644 --- a/app/models/lfs_object.rb +++ b/app/models/lfs_object.rb @@ -5,6 +5,7 @@ class LfsObject < ApplicationRecord include Checksummable include EachBatch include ObjectStorage::BackgroundMove + include FileStoreMounter has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent has_many :projects, -> { distinct }, through: :lfs_objects_projects @@ -15,21 +16,13 @@ class LfsObject < ApplicationRecord validates :oid, presence: true, uniqueness: true - mount_uploader :file, LfsObjectUploader - - after_save :update_file_store, if: :saved_change_to_file? + mount_file_store_uploader LfsObjectUploader def self.not_linked_to_project(project) where('NOT EXISTS (?)', project.lfs_objects_projects.select(1).where('lfs_objects_projects.lfs_object_id = lfs_objects.id')) end - def update_file_store - # The file.object_store is set during `uploader.store!` - # which happens after object is inserted/updated - self.update_column(:file_store, file.object_store) - end - def project_allowed_access?(project) if project.fork_network_member lfs_objects_projects diff --git a/app/models/member.rb b/app/models/member.rb index 36f9741ce01..2c62ea55785 100644 --- a/app/models/member.rb +++ b/app/models/member.rb @@ -86,6 +86,7 @@ class Member < ApplicationRecord scope :owners, -> { active.where(access_level: OWNER) } scope :owners_and_maintainers, -> { active.where(access_level: [OWNER, MAINTAINER]) } scope :with_user, -> (user) { where(user: user) } + scope :preload_user_and_notification_settings, -> { preload(user: :notification_settings) } scope :with_source_id, ->(source_id) { where(source_id: source_id) } scope :including_source, -> { includes(:source) } diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb index b7885771781..f4c2d568b4d 100644 --- a/app/models/merge_request.rb +++ b/app/models/merge_request.rb @@ -40,7 +40,7 @@ class MergeRequest < ApplicationRecord has_internal_id :iid, scope: :target_project, track_if: -> { !importing? }, init: ->(s) { s&.target_project&.merge_requests&.maximum(:iid) } has_many :merge_request_diffs - has_many :merge_request_context_commits + has_many :merge_request_context_commits, inverse_of: :merge_request has_many :merge_request_context_commit_diff_files, through: :merge_request_context_commits, source: :diff_files has_one :merge_request_diff, @@ -251,17 +251,12 @@ class MergeRequest < ApplicationRecord end scope :join_project, -> { joins(:target_project) } scope :references_project, -> { references(:target_project) } - - PROJECT_ROUTE_AND_NAMESPACE_ROUTE = [ - target_project: [:route, { namespace: :route }], - source_project: [:route, { namespace: :route }] - ].freeze - scope :with_api_entity_associations, -> { - preload(:assignees, :author, :unresolved_notes, :labels, :milestone, - :timelogs, :latest_merge_request_diff, - *PROJECT_ROUTE_AND_NAMESPACE_ROUTE, - metrics: [:latest_closed_by, :merged_by]) + preload_routables + .preload(:assignees, :author, :unresolved_notes, :labels, :milestone, + :timelogs, :latest_merge_request_diff, + target_project: :project_feature, + metrics: [:latest_closed_by, :merged_by]) } scope :by_target_branch_wildcard, ->(wildcard_branch_name) do @@ -269,6 +264,14 @@ class MergeRequest < ApplicationRecord end scope :by_target_branch, ->(branch_name) { where(target_branch: branch_name) } scope :preload_source_project, -> { preload(:source_project) } + scope :preload_target_project, -> { preload(:target_project) } + scope :preload_routables, -> do + preload(target_project: [:route, { namespace: :route }], + source_project: [:route, { namespace: :route }]) + end + scope :preload_author, -> { preload(:author) } + scope :preload_approved_by_users, -> { preload(:approved_by_users) } + scope :preload_metrics, -> (relation) { preload(metrics: relation) } scope :with_auto_merge_enabled, -> do with_state(:opened).where(auto_merge_enabled: true) @@ -428,7 +431,7 @@ class MergeRequest < ApplicationRecord end def context_commits(limit: nil) - @context_commits ||= merge_request_context_commits.limit(limit).map(&:to_commit) + @context_commits ||= merge_request_context_commits.order_by_committed_date_desc.limit(limit).map(&:to_commit) end def recent_context_commits @@ -1181,12 +1184,12 @@ class MergeRequest < ApplicationRecord end def can_be_merged_by?(user) - access = ::Gitlab::UserAccess.new(user, project: project) + access = ::Gitlab::UserAccess.new(user, container: project) access.can_update_branch?(target_branch) end def can_be_merged_via_command_line_by?(user) - access = ::Gitlab::UserAccess.new(user, project: project) + access = ::Gitlab::UserAccess.new(user, container: project) access.can_push_to_branch?(target_branch) end @@ -1608,7 +1611,12 @@ class MergeRequest < ApplicationRecord override :ensure_metrics def ensure_metrics - MergeRequest::Metrics.safe_find_or_create_by(merge_request_id: id).tap do |metrics_record| + # Backward compatibility: some merge request metrics records will not have target_project_id filled in. + # In that case the first `safe_find_or_create_by` will return false. + # The second finder call will be eliminated in https://gitlab.com/gitlab-org/gitlab/-/issues/233507 + metrics_record = MergeRequest::Metrics.safe_find_or_create_by(merge_request_id: id, target_project_id: target_project_id) || MergeRequest::Metrics.safe_find_or_create_by(merge_request_id: id) + + metrics_record.tap do |metrics_record| # Make sure we refresh the loaded association object with the newly created/loaded item. # This is needed in order to have the exact functionality than before. # @@ -1618,6 +1626,8 @@ class MergeRequest < ApplicationRecord # merge_request.ensure_metrics # merge_request.metrics # should return the metrics record and not nil # merge_request.metrics.merge_request # should return the same MR record + + metrics_record.target_project_id = target_project_id metrics_record.association(:merge_request).target = self association(:metrics).target = metrics_record end diff --git a/app/models/merge_request/metrics.rb b/app/models/merge_request/metrics.rb index ba363019c72..66bff3f5982 100644 --- a/app/models/merge_request/metrics.rb +++ b/app/models/merge_request/metrics.rb @@ -1,10 +1,21 @@ # frozen_string_literal: true class MergeRequest::Metrics < ApplicationRecord - belongs_to :merge_request + belongs_to :merge_request, inverse_of: :metrics belongs_to :pipeline, class_name: 'Ci::Pipeline', foreign_key: :pipeline_id belongs_to :latest_closed_by, class_name: 'User' belongs_to :merged_by, class_name: 'User' + + before_save :ensure_target_project_id + + scope :merged_after, ->(date) { where(arel_table[:merged_at].gteq(date)) } + scope :merged_before, ->(date) { where(arel_table[:merged_at].lteq(date)) } + + private + + def ensure_target_project_id + self.target_project_id ||= merge_request.target_project_id + end end MergeRequest::Metrics.prepend_if_ee('EE::MergeRequest::Metrics') diff --git a/app/models/merge_request_context_commit.rb b/app/models/merge_request_context_commit.rb index de97fc33f8d..a2982a5dd73 100644 --- a/app/models/merge_request_context_commit.rb +++ b/app/models/merge_request_context_commit.rb @@ -12,6 +12,9 @@ class MergeRequestContextCommit < ApplicationRecord validates :sha, presence: true validates :sha, uniqueness: { message: 'has already been added' } + # Sort by committed date in descending order to ensure latest commits comes on the top + scope :order_by_committed_date_desc, -> { order('committed_date DESC') } + # delete all MergeRequestContextCommit & MergeRequestContextCommitDiffFile for given merge_request & commit SHAs def self.delete_bulk(merge_request, commits) commit_ids = commits.map(&:sha) diff --git a/app/models/merge_request_diff.rb b/app/models/merge_request_diff.rb index eb5250d5cf6..b70340a98cd 100644 --- a/app/models/merge_request_diff.rb +++ b/app/models/merge_request_diff.rb @@ -51,14 +51,16 @@ class MergeRequestDiff < ApplicationRecord scope :by_commit_sha, ->(sha) do joins(:merge_request_diff_commits).where(merge_request_diff_commits: { sha: sha }).reorder(nil) end - scope :has_diff_files, -> { where(id: MergeRequestDiffFile.select(:merge_request_diff_id)) } scope :by_project_id, -> (project_id) do joins(:merge_request).where(merge_requests: { target_project_id: project_id }) end scope :recent, -> { order(id: :desc).limit(100) } - scope :files_in_database, -> { has_diff_files.where(stored_externally: [false, nil]) } + + scope :files_in_database, -> do + where(stored_externally: [false, nil]).where(arel_table[:files_count].gt(0)) + end scope :not_latest_diffs, -> do merge_requests = MergeRequest.arel_table @@ -100,14 +102,25 @@ class MergeRequestDiff < ApplicationRecord joins(merge_request: :metrics).where(condition) end - def self.ids_for_external_storage_migration(limit:) - # No point doing any work unless the feature is enabled - return [] unless Gitlab.config.external_diffs.enabled + class << self + def ids_for_external_storage_migration(limit:) + return [] unless Gitlab.config.external_diffs.enabled - case Gitlab.config.external_diffs.when - when 'always' + case Gitlab.config.external_diffs.when + when 'always' + ids_for_external_storage_migration_strategy_always(limit: limit) + when 'outdated' + ids_for_external_storage_migration_strategy_outdated(limit: limit) + else + [] + end + end + + def ids_for_external_storage_migration_strategy_always(limit:) files_in_database.limit(limit).pluck(:id) - when 'outdated' + end + + def ids_for_external_storage_migration_strategy_outdated(limit:) # Outdated is too complex to be a single SQL query, so split into three before = EXTERNAL_DIFF_CUTOFF.ago @@ -129,8 +142,6 @@ class MergeRequestDiff < ApplicationRecord .not_latest_diffs .limit(limit - ids.size) .pluck(:id) - else - [] end end @@ -139,6 +150,7 @@ class MergeRequestDiff < ApplicationRecord # All diff information is collected from repository after object is created. # It allows you to override variables like head_commit_sha before getting diff. after_create :save_git_content, unless: :importing? + after_create :set_count_columns after_create_commit :set_as_latest_diff, unless: :importing? after_save :update_external_diff_store @@ -621,7 +633,7 @@ class MergeRequestDiff < ApplicationRecord def save_diffs new_attributes = {} - if compare.commits.size.zero? + if compare.commits.empty? new_attributes[:state] = :empty else diff_collection = compare.diffs(Commit.max_diff_options) @@ -632,6 +644,7 @@ class MergeRequestDiff < ApplicationRecord rows = build_merge_request_diff_files(diff_collection) create_merge_request_diff_files(rows) + self.class.uncached { merge_request_diff_files.reset } end # Set our state to 'overflow' to make the #empty? and #collected? @@ -647,12 +660,14 @@ class MergeRequestDiff < ApplicationRecord def save_commits MergeRequestDiffCommit.create_bulk(self.id, compare.commits.reverse) + self.class.uncached { merge_request_diff_commits.reset } + end - # merge_request_diff_commits.reset is preferred way to reload associated - # objects but it returns cached result for some reason in this case - # we can circumvent that by specifying that we need an uncached reload - commits = self.class.uncached { merge_request_diff_commits.reset } - self.commits_count = commits.size + def set_count_columns + update_columns( + commits_count: merge_request_diff_commits.size, + files_count: merge_request_diff_files.size + ) end def repository diff --git a/app/models/milestone.rb b/app/models/milestone.rb index 58adfd5f70b..55326b9a282 100644 --- a/app/models/milestone.rb +++ b/app/models/milestone.rb @@ -127,7 +127,7 @@ class Milestone < ApplicationRecord end def can_be_closed? - active? && issues.opened.count.zero? + active? && issues.opened.count == 0 end def author_id diff --git a/app/models/network/graph.rb b/app/models/network/graph.rb index 6b5ea0fc3fc..9da454125eb 100644 --- a/app/models/network/graph.rb +++ b/app/models/network/graph.rb @@ -211,7 +211,7 @@ module Network # Visit branching chains leaves.each do |l| - parents = l.parents(@map).select {|p| p.space.zero?} + parents = l.parents(@map).select {|p| p.space == 0} parents.each do |p| place_chain(p, l.time) end @@ -266,14 +266,14 @@ module Network def take_left_leaves(raw_commit) commit = @map[raw_commit.id] leaves = [] - leaves.push(commit) if commit.space.zero? + leaves.push(commit) if commit.space == 0 loop do - return leaves if commit.parents(@map).count.zero? + return leaves if commit.parents(@map).count == 0 commit = commit.parents(@map).first - return leaves unless commit.space.zero? + return leaves unless commit.space == 0 leaves.push(commit) end diff --git a/app/models/note.rb b/app/models/note.rb index 2db7e4e406d..e1fc16818b3 100644 --- a/app/models/note.rb +++ b/app/models/note.rb @@ -61,7 +61,7 @@ class Note < ApplicationRecord attr_accessor :commands_changes # A special role that may be displayed on issuable's discussions - attr_accessor :special_role + attr_reader :special_role default_value_for :system, false @@ -417,7 +417,7 @@ class Note < ApplicationRecord end def can_create_todo? - # Skip system notes, and notes on project snippet + # Skip system notes, and notes on snippets !system? && !for_snippet? end @@ -559,6 +559,10 @@ class Note < ApplicationRecord (!system_note_with_references? || all_referenced_mentionables_allowed?(user)) && system_note_viewable_by?(user) end + def parent_user + noteable.author if for_personal_snippet? + end + private # Using this method followed by a call to `save` may result in ActiveRecord::RecordNotUnique exception diff --git a/app/models/notification_recipient.rb b/app/models/notification_recipient.rb index 36b7cd64c73..6a6b2bb1b58 100644 --- a/app/models/notification_recipient.rb +++ b/app/models/notification_recipient.rb @@ -52,10 +52,9 @@ class NotificationRecipient when :mention @type == :mention when :participating - %i[failed_pipeline fixed_pipeline].include?(@custom_action) || - %i[participating mention].include?(@type) + participating_custom_action? || participating_or_mention? when :custom - custom_enabled? || %i[participating mention].include?(@type) + custom_enabled? || participating_or_mention? when :watch !excluded_watcher_action? else @@ -175,4 +174,12 @@ class NotificationRecipient .where.not(level: NotificationSetting.levels[:global]) .first end + + def participating_custom_action? + %i[failed_pipeline fixed_pipeline moved_project].include?(@custom_action) + end + + def participating_or_mention? + %i[participating mention].include?(@type) + end end diff --git a/app/models/notification_setting.rb b/app/models/notification_setting.rb index c8c1f47c182..c003a20f0fc 100644 --- a/app/models/notification_setting.rb +++ b/app/models/notification_setting.rb @@ -46,7 +46,8 @@ class NotificationSetting < ApplicationRecord :merge_merge_request, :failed_pipeline, :fixed_pipeline, - :success_pipeline + :success_pipeline, + :moved_project ].freeze # Update unfound_translations.rb when events are changed @@ -96,7 +97,11 @@ class NotificationSetting < ApplicationRecord alias_method :fixed_pipeline?, :fixed_pipeline def event_enabled?(event) - respond_to?(event) && !!public_send(event) # rubocop:disable GitlabSecurity/PublicSend + # We override these two attributes, so we can't use read_attribute + return failed_pipeline if event.to_sym == :failed_pipeline + return fixed_pipeline if event.to_sym == :fixed_pipeline + + has_attribute?(event) && !!read_attribute(event) end def owns_notification_email diff --git a/app/models/packages/package_file.rb b/app/models/packages/package_file.rb index 567b5a14603..4ebd96797db 100644 --- a/app/models/packages/package_file.rb +++ b/app/models/packages/package_file.rb @@ -15,6 +15,8 @@ class Packages::PackageFile < ApplicationRecord validates :file, presence: true validates :file_name, presence: true + validates :file_name, uniqueness: { scope: :package }, if: -> { package&.pypi? } + scope :recent, -> { order(id: :desc) } scope :with_file_name, ->(file_name) { where(file_name: file_name) } scope :with_file_name_like, ->(file_name) { where(arel_table[:file_name].matches(file_name)) } @@ -37,20 +39,27 @@ class Packages::PackageFile < ApplicationRecord update_project_statistics project_statistics_name: :packages_size + before_save :update_size_from_file + def update_file_metadata # The file.object_store is set during `uploader.store!` # which happens after object is inserted/updated self.update_column(:file_store, file.object_store) - self.update_column(:size, file.size) unless file.size == self.size end def download_path - Gitlab::Routing.url_helpers.download_project_package_file_path(project, self) if ::Gitlab.ee? + Gitlab::Routing.url_helpers.download_project_package_file_path(project, self) end def local? file_store == ::Packages::PackageFileUploader::Store::LOCAL end + + private + + def update_size_from_file + self.size ||= file.size + end end -Packages::PackageFile.prepend_if_ee('EE::Packages::PackageFileGeo') +Packages::PackageFile.prepend_if_ee('EE::Packages::PackageFile') diff --git a/app/models/pages_domain.rb b/app/models/pages_domain.rb index 856496f0941..d071d2d3c89 100644 --- a/app/models/pages_domain.rb +++ b/app/models/pages_domain.rb @@ -3,6 +3,7 @@ class PagesDomain < ApplicationRecord include Presentable include FromUnion + include AfterCommitQueue VERIFICATION_KEY = 'gitlab-pages-verification-code' VERIFICATION_THRESHOLD = 3.days.freeze @@ -222,6 +223,8 @@ class PagesDomain < ApplicationRecord private def pages_deployed? + return false unless project + # TODO: remove once `pages_metadatum` is migrated # https://gitlab.com/gitlab-org/gitlab/issues/33106 unless project.pages_metadatum @@ -244,8 +247,13 @@ class PagesDomain < ApplicationRecord # rubocop: disable CodeReuse/ServiceClass def update_daemon return if usage_serverless? + return unless pages_deployed? - ::Projects::UpdatePagesConfigurationService.new(project).execute + if Feature.enabled?(:async_update_pages_config, project) + run_after_commit { PagesUpdateConfigurationWorker.perform_async(project_id) } + else + Projects::UpdatePagesConfigurationService.new(project).execute + end end # rubocop: enable CodeReuse/ServiceClass diff --git a/app/models/personal_access_token.rb b/app/models/personal_access_token.rb index 488ebd531a8..e01cb0530a5 100644 --- a/app/models/personal_access_token.rb +++ b/app/models/personal_access_token.rb @@ -19,6 +19,7 @@ class PersonalAccessToken < ApplicationRecord scope :active, -> { where("revoked = false AND (expires_at >= CURRENT_DATE OR expires_at IS NULL)") } scope :expiring_and_not_notified, ->(date) { where(["revoked = false AND expire_notification_delivered = false AND expires_at >= CURRENT_DATE AND expires_at <= ?", date]) } + scope :expired_today_and_not_notified, -> { where(["revoked = false AND expires_at = CURRENT_DATE AND after_expiry_notification_delivered = false"]) } scope :inactive, -> { where("revoked = true OR expires_at < CURRENT_DATE") } scope :with_impersonation, -> { where(impersonation: true) } scope :without_impersonation, -> { where(impersonation: false) } diff --git a/app/models/personal_snippet.rb b/app/models/personal_snippet.rb index 197795dccfe..0915278fb65 100644 --- a/app/models/personal_snippet.rb +++ b/app/models/personal_snippet.rb @@ -3,6 +3,10 @@ class PersonalSnippet < Snippet include WithUploads + def parent_user + author + end + def skip_project_check? true end diff --git a/app/models/postgresql/replication_slot.rb b/app/models/postgresql/replication_slot.rb index 7a123deb719..a4370eda5ba 100644 --- a/app/models/postgresql/replication_slot.rb +++ b/app/models/postgresql/replication_slot.rb @@ -33,7 +33,7 @@ module Postgresql # If too many replicas are falling behind too much, the availability of a # GitLab instance might suffer. To prevent this from happening we require # at least 1 replica to have data recent enough. - if sizes.any? && too_great.positive? + if sizes.any? && too_great > 0 (sizes.length - too_great) <= 1 else false diff --git a/app/models/product_analytics_event.rb b/app/models/product_analytics_event.rb index 95a2e7a26c4..579ea88c272 100644 --- a/app/models/product_analytics_event.rb +++ b/app/models/product_analytics_event.rb @@ -19,4 +19,12 @@ class ProductAnalyticsEvent < ApplicationRecord scope :timerange, ->(duration, today = Time.zone.today) { where('collector_tstamp BETWEEN ? AND ? ', today - duration + 1, today + 1) } + + def self.count_by_graph(graph, days) + group(graph).timerange(days).count + end + + def as_json_wo_empty + as_json.compact + end end diff --git a/app/models/project.rb b/app/models/project.rb index 3aa0db56404..e1b6a9c41dd 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -109,7 +109,6 @@ class Project < ApplicationRecord after_update :update_forks_visibility_level before_destroy :remove_private_deploy_keys - before_destroy :cleanup_chat_names use_fast_destroy :build_trace_chunks @@ -168,7 +167,6 @@ class Project < ApplicationRecord has_one :youtrack_service has_one :custom_issue_tracker_service has_one :bugzilla_service - has_one :gitlab_issue_tracker_service, inverse_of: :project has_one :confluence_service has_one :external_wiki_service has_one :prometheus_service, inverse_of: :project @@ -261,6 +259,7 @@ class Project < ApplicationRecord has_many :clusters, through: :cluster_project, class_name: 'Clusters::Cluster' has_many :kubernetes_namespaces, class_name: 'Clusters::KubernetesNamespace' has_many :management_clusters, class_name: 'Clusters::Cluster', foreign_key: :management_project_id, inverse_of: :management_project + has_many :cluster_agents, class_name: 'Clusters::Agent' has_many :prometheus_metrics has_many :prometheus_alerts, inverse_of: :project @@ -300,6 +299,7 @@ class Project < ApplicationRecord has_many :build_trace_chunks, class_name: 'Ci::BuildTraceChunk', through: :builds, source: :trace_chunks has_many :build_report_results, class_name: 'Ci::BuildReportResult', inverse_of: :project has_many :job_artifacts, class_name: 'Ci::JobArtifact' + has_many :pipeline_artifacts, class_name: 'Ci::PipelineArtifact', inverse_of: :project has_many :runner_projects, class_name: 'Ci::RunnerProject', inverse_of: :project has_many :runners, through: :runner_projects, source: :runner, class_name: 'Ci::Runner' has_many :variables, class_name: 'Ci::Variable' @@ -339,6 +339,10 @@ class Project < ApplicationRecord has_many :webide_pipelines, -> { webide_source }, class_name: 'Ci::Pipeline', inverse_of: :project has_many :reviews, inverse_of: :project + # Can be too many records. We need to implement delete_all in batches. + # Issue https://gitlab.com/gitlab-org/gitlab/-/issues/228637 + has_many :product_analytics_events, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent + accepts_nested_attributes_for :variables, allow_destroy: true accepts_nested_attributes_for :project_feature, update_only: true accepts_nested_attributes_for :project_setting, update_only: true @@ -450,6 +454,16 @@ class Project < ApplicationRecord # Sometimes queries (e.g. using CTEs) require explicit disambiguation with table name scope :projects_order_id_desc, -> { reorder(self.arel_table['id'].desc) } + scope :sorted_by_similarity_desc, -> (search) do + order_expression = Gitlab::Database::SimilarityScore.build_expression(search: search, rules: [ + { column: arel_table["path"], multiplier: 1 }, + { column: arel_table["name"], multiplier: 0.7 }, + { column: arel_table["description"], multiplier: 0.2 } + ]) + + reorder(order_expression.desc, arel_table['id'].desc) + end + scope :with_packages, -> { joins(:packages) } scope :in_namespace, ->(namespace_ids) { where(namespace_id: namespace_ids) } scope :personal, ->(user) { where(namespace_id: user.namespace_id) } @@ -637,6 +651,8 @@ class Project < ApplicationRecord scope :joins_import_state, -> { joins("INNER JOIN project_mirror_data import_state ON import_state.project_id = projects.id") } scope :for_group, -> (group) { where(group: group) } scope :for_group_and_its_subgroups, ->(group) { where(namespace_id: group.self_and_descendants.select(:id)) } + scope :for_repository_storage, -> (repository_storage) { where(repository_storage: repository_storage) } + scope :excluding_repository_storage, -> (repository_storage) { where.not(repository_storage: repository_storage) } class << self # Searches for a list of projects based on the query given in `query`. @@ -838,6 +854,10 @@ class Project < ApplicationRecord auto_devops_config[:scope] != :project && !auto_devops_config[:status] end + def has_packages?(package_type) + packages.where(package_type: package_type).exists? + end + def first_auto_devops_config return namespace.first_auto_devops_config if auto_devops&.enabled.nil? @@ -1103,7 +1123,7 @@ class Project < ApplicationRecord limit = creator.projects_limit error = - if limit.zero? + if limit == 0 _('Personal project creation is not allowed. Please contact your administrator with questions') else _('Your project limit is %{limit} projects! Please contact your administrator to increase it') @@ -1375,6 +1395,16 @@ class Project < ApplicationRecord group || namespace.try(:owner) end + def default_owner + obj = owner + + if obj.respond_to?(:default_owner) + obj.default_owner + else + obj + end + end + def to_ability_name model_name.singular end @@ -1725,7 +1755,7 @@ class Project < ApplicationRecord end def pages_deployed? - Dir.exist?(public_pages_path) + pages_metadatum&.deployed? end def pages_group_url @@ -1758,10 +1788,6 @@ class Project < ApplicationRecord File.join(Settings.pages.path, full_path) end - def public_pages_path - File.join(pages_path, 'public') - end - def pages_available? Gitlab.config.pages.enabled end @@ -1788,7 +1814,6 @@ class Project < ApplicationRecord return unless namespace mark_pages_as_not_deployed unless destroyed? - ::Projects::UpdatePagesConfigurationService.new(self).execute # 1. We rename pages to temporary directory # 2. We wait 5 minutes, due to NFS caching @@ -1926,17 +1951,6 @@ class Project < ApplicationRecord import_export_upload&.export_file end - # Before 12.9 we did not correctly clean up chat names and this causes issues. - # In 12.9, we add a foreign key relationship, but this code is used ensure the chat names are cleaned up while a post - # migration enables the foreign key relationship. - # - # This should be removed in 13.0. - # - # https://gitlab.com/gitlab-org/gitlab/issues/204787 - def cleanup_chat_names - ChatName.where(service: services.select(:id)).delete_all - end - def full_path_slug Gitlab::Utils.slugify(full_path.to_s) end @@ -2466,6 +2480,10 @@ class Project < ApplicationRecord alias_method :service_desk_enabled?, :service_desk_enabled def service_desk_address + service_desk_custom_address || service_desk_incoming_address + end + + def service_desk_incoming_address return unless service_desk_enabled? config = Gitlab.config.incoming_email @@ -2474,6 +2492,16 @@ class Project < ApplicationRecord config.address&.gsub(wildcard, "#{full_path_slug}-#{id}-issue-") end + def service_desk_custom_address + return unless ::Gitlab::ServiceDeskEmail.enabled? + return unless ::Feature.enabled?(:service_desk_custom_address, self) + + key = service_desk_setting&.project_key + return unless key.present? + + ::Gitlab::ServiceDeskEmail.address_for_key("#{full_path_slug}-#{key}") + end + def root_namespace if namespace.has_parent? namespace.root_ancestor @@ -2578,6 +2606,8 @@ class Project < ApplicationRecord namespace != from.namespace when Namespace namespace != from + when User + true end end diff --git a/app/models/project_repository_storage_move.rb b/app/models/project_repository_storage_move.rb index b18d9765a57..2b74d9ccd88 100644 --- a/app/models/project_repository_storage_move.rb +++ b/app/models/project_repository_storage_move.rb @@ -29,12 +29,17 @@ class ProjectRepositoryStorageMove < ApplicationRecord transition scheduled: :started end - event :finish do - transition started: :finished + event :finish_replication do + transition started: :replicated + end + + event :finish_cleanup do + transition replicated: :finished end event :do_fail do transition [:initial, :scheduled, :started] => :failed + transition replicated: :cleanup_failed end after_transition initial: :scheduled do |storage_move| @@ -49,7 +54,7 @@ class ProjectRepositoryStorageMove < ApplicationRecord end end - after_transition started: :finished do |storage_move| + after_transition started: :replicated do |storage_move| storage_move.project.update_columns( repository_read_only: false, repository_storage: storage_move.destination_storage_name @@ -65,6 +70,8 @@ class ProjectRepositoryStorageMove < ApplicationRecord state :started, value: 3 state :finished, value: 4 state :failed, value: 5 + state :replicated, value: 6 + state :cleanup_failed, value: 7 end scope :order_created_at_desc, -> { order(created_at: :desc) } diff --git a/app/models/project_services/buildkite_service.rb b/app/models/project_services/buildkite_service.rb index fc7a0180786..53bb7b47b41 100644 --- a/app/models/project_services/buildkite_service.rb +++ b/app/models/project_services/buildkite_service.rb @@ -8,13 +8,32 @@ class BuildkiteService < CiService ENDPOINT = "https://buildkite.com" prop_accessor :project_url, :token - boolean_accessor :enable_ssl_verification validates :project_url, presence: true, public_url: true, if: :activated? validates :token, presence: true, if: :activated? after_save :compose_service_hook, if: :activated? + def self.supported_events + %w(push merge_request tag_push) + end + + # This is a stub method to work with deprecated API response + # TODO: remove enable_ssl_verification after 14.0 + # https://gitlab.com/gitlab-org/gitlab/-/issues/222808 + def enable_ssl_verification + true + end + + # Since SSL verification will always be enabled for Buildkite, + # we no longer needs to store the boolean. + # This is a stub method to work with deprecated API param. + # TODO: remove enable_ssl_verification after 14.0 + # https://gitlab.com/gitlab-org/gitlab/-/issues/222808 + def enable_ssl_verification=(_value) + self.properties.delete('enable_ssl_verification') # Remove unused key + end + def webhook_url "#{buildkite_endpoint('webhook')}/deliver/#{webhook_token}" end @@ -22,7 +41,7 @@ class BuildkiteService < CiService def compose_service_hook hook = service_hook || build_service_hook hook.url = webhook_url - hook.enable_ssl_verification = !!enable_ssl_verification + hook.enable_ssl_verification = true hook.save end @@ -49,7 +68,7 @@ class BuildkiteService < CiService end def description - 'Continuous integration and deployments' + 'Buildkite is a platform for running fast, secure, and scalable continuous integration pipelines on your own infrastructure' end def self.to_param @@ -60,15 +79,15 @@ class BuildkiteService < CiService [ { type: 'text', name: 'token', - placeholder: 'Buildkite project GitLab token', required: true }, + title: 'Integration Token', + help: 'This token will be provided when you create a Buildkite pipeline with a GitLab repository', + required: true }, { type: 'text', name: 'project_url', - placeholder: "#{ENDPOINT}/example/project", required: true }, - - { type: 'checkbox', - name: 'enable_ssl_verification', - title: "Enable SSL verification" } + title: 'Pipeline URL', + placeholder: "#{ENDPOINT}/acme-inc/test-pipeline", + required: true } ] end diff --git a/app/models/project_services/gitlab_issue_tracker_service.rb b/app/models/project_services/gitlab_issue_tracker_service.rb deleted file mode 100644 index b3f44e040bc..00000000000 --- a/app/models/project_services/gitlab_issue_tracker_service.rb +++ /dev/null @@ -1,45 +0,0 @@ -# frozen_string_literal: true - -class GitlabIssueTrackerService < IssueTrackerService - include Gitlab::Routing - - validates :project_url, :issues_url, :new_issue_url, presence: true, public_url: true, if: :activated? - - default_value_for :default, true - - def title - 'GitLab' - end - - def description - s_('IssueTracker|GitLab issue tracker') - end - - def self.to_param - 'gitlab' - end - - def project_url - project_issues_url(project) - end - - def new_issue_url - new_project_issue_url(project) - end - - def issue_url(iid) - project_issue_url(project, id: iid) - end - - def issue_tracker_path - project_issues_path(project) - end - - def new_issue_path - new_project_issue_path(project) - end - - def issue_path(iid) - project_issue_path(project, id: iid) - end -end diff --git a/app/models/project_services/jira_service.rb b/app/models/project_services/jira_service.rb index 4ea2ec10f11..36d7026de30 100644 --- a/app/models/project_services/jira_service.rb +++ b/app/models/project_services/jira_service.rb @@ -8,6 +8,12 @@ class JiraService < IssueTrackerService PROJECTS_PER_PAGE = 50 + # TODO: use jira_service.deployment_type enum when https://gitlab.com/gitlab-org/gitlab/-/merge_requests/37003 is merged + DEPLOYMENT_TYPES = { + server: 'SERVER', + cloud: 'CLOUD' + }.freeze + validates :url, public_url: true, presence: true, if: :activated? validates :api_url, public_url: true, allow_blank: true validates :username, presence: true, if: :activated? @@ -375,7 +381,6 @@ class JiraService < IssueTrackerService def build_entity_url(noteable_type, entity_id) polymorphic_url( [ - self.project.namespace.becomes(Namespace), self.project, noteable_type.to_sym ], diff --git a/app/models/project_services/jira_tracker_data.rb b/app/models/project_services/jira_tracker_data.rb index f24ba8877d2..00b6ab6a70f 100644 --- a/app/models/project_services/jira_tracker_data.rb +++ b/app/models/project_services/jira_tracker_data.rb @@ -7,4 +7,6 @@ class JiraTrackerData < ApplicationRecord attr_encrypted :api_url, encryption_options attr_encrypted :username, encryption_options attr_encrypted :password, encryption_options + + enum deployment_type: { unknown: 0, server: 1, cloud: 2 }, _prefix: :deployment end diff --git a/app/models/project_services/prometheus_service.rb b/app/models/project_services/prometheus_service.rb index 997c6eba91a..950cd4f6859 100644 --- a/app/models/project_services/prometheus_service.rb +++ b/app/models/project_services/prometheus_service.rb @@ -97,7 +97,13 @@ class PrometheusService < MonitoringService def prometheus_client return unless should_return_client? - options = { allow_local_requests: allow_local_api_url? } + options = { + allow_local_requests: allow_local_api_url?, + # We should choose more conservative timeouts, but some queries we run are now busting our + # default timeouts, which are stricter. We should make those queries faster instead. + # See https://gitlab.com/gitlab-org/gitlab/-/issues/233109 + timeout: 60 + } if behind_iap? # Adds the Authorization header diff --git a/app/models/prometheus_alert.rb b/app/models/prometheus_alert.rb index 32f9809e538..f0441d4a3cb 100644 --- a/app/models/prometheus_alert.rb +++ b/app/models/prometheus_alert.rb @@ -3,6 +3,7 @@ class PrometheusAlert < ApplicationRecord include Sortable include UsageStatistics + include Presentable OPERATORS_MAP = { lt: "<", @@ -21,7 +22,9 @@ class PrometheusAlert < ApplicationRecord after_save :clear_prometheus_adapter_cache! after_destroy :clear_prometheus_adapter_cache! - validates :environment, :project, :prometheus_metric, presence: true + validates :environment, :project, :prometheus_metric, :threshold, :operator, presence: true + validates :runbook_url, length: { maximum: 255 }, allow_blank: true, + addressable_url: { enforce_sanitization: true, ascii_only: true } validate :require_valid_environment_project! validate :require_valid_metric_project! @@ -59,6 +62,9 @@ class PrometheusAlert < ApplicationRecord "gitlab" => "hook", "gitlab_alert_id" => prometheus_metric_id, "gitlab_prometheus_alert_id" => id + }, + "annotations" => { + "runbook" => runbook_url } } end diff --git a/app/models/raw_usage_data.rb b/app/models/raw_usage_data.rb new file mode 100644 index 00000000000..18cee55d06e --- /dev/null +++ b/app/models/raw_usage_data.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +class RawUsageData < ApplicationRecord + validates :payload, presence: true + validates :recorded_at, presence: true, uniqueness: true + + def update_sent_at! + self.update_column(:sent_at, Time.current) if Feature.enabled?(:save_raw_usage_data) + end +end diff --git a/app/models/release.rb b/app/models/release.rb index a0245105cd9..4c9d89105d7 100644 --- a/app/models/release.rb +++ b/app/models/release.rb @@ -18,12 +18,10 @@ class Release < ApplicationRecord has_many :milestones, through: :milestone_releases has_many :evidences, inverse_of: :release, class_name: 'Releases::Evidence' - default_value_for :released_at, allows_nil: false do - Time.zone.now - end - accepts_nested_attributes_for :links, allow_destroy: true + before_create :set_released_at + validates :project, :tag, presence: true validates_associated :milestone_releases, message: -> (_, obj) { obj[:value].map(&:errors).map(&:full_messages).join(",") } @@ -90,6 +88,10 @@ class Release < ApplicationRecord repository.find_tag(tag) end end + + def set_released_at + self.released_at ||= created_at + end end Release.prepend_if_ee('EE::Release') diff --git a/app/models/releases/link.rb b/app/models/releases/link.rb index dc7e78a85a9..e1dc3b904b9 100644 --- a/app/models/releases/link.rb +++ b/app/models/releases/link.rb @@ -6,7 +6,7 @@ module Releases belongs_to :release - FILEPATH_REGEX = /\A\/([\-\.\w]+\/?)*[\da-zA-Z]+\z/.freeze + FILEPATH_REGEX = %r{\A/(?:[\-\.\w]+/?)*[\da-zA-Z]+\z}.freeze validates :url, presence: true, addressable_url: { schemes: %w(http https ftp) }, uniqueness: { scope: :release } validates :name, presence: true, uniqueness: { scope: :release } diff --git a/app/models/repository.rb b/app/models/repository.rb index 48e96d4c193..07122db36b3 100644 --- a/app/models/repository.rb +++ b/app/models/repository.rb @@ -43,7 +43,7 @@ class Repository gitlab_ci_yml branch_names tag_names branch_count tag_count avatar exists? root_ref merged_branch_names has_visible_content? issue_template_names merge_request_template_names - metrics_dashboard_paths xcode_project?).freeze + user_defined_metrics_dashboard_paths xcode_project? has_ambiguous_refs?).freeze # Methods that use cache_method but only memoize the value MEMOIZED_CACHED_METHODS = %i(license).freeze @@ -61,7 +61,7 @@ class Repository avatar: :avatar, issue_template: :issue_template_names, merge_request_template: :merge_request_template_names, - metrics_dashboard: :metrics_dashboard_paths, + metrics_dashboard: :user_defined_metrics_dashboard_paths, xcode_config: :xcode_project? }.freeze @@ -196,6 +196,32 @@ class Repository tag_exists?(ref) && branch_exists?(ref) end + # It's possible for a tag name to be a prefix (including slash) of a branch + # name, or vice versa. For instance, a tag named `foo` means we can't create a + # tag `foo/bar`, but we _can_ create a branch `foo/bar`. + # + # If we know a repository has no refs of this type (which is the common case) + # then separating refs from paths - as in ExtractsRef - can be faster. + # + # This method only checks one level deep, so only prefixes that contain no + # slashes are considered. If a repository has a tag `foo/bar` and a branch + # `foo/bar/baz`, it will return false. + def has_ambiguous_refs? + return false unless branch_names.present? && tag_names.present? + + with_slash, no_slash = (branch_names + tag_names).partition { |ref| ref.include?('/') } + + return false if with_slash.empty? + + prefixes = no_slash.map { |ref| Regexp.escape(ref) }.join('|') + prefix_regex = %r{^#{prefixes}/} + + with_slash.any? do |ref| + prefix_regex.match?(ref) + end + end + cache_method :has_ambiguous_refs? + def expand_ref(ref) if tag_exists?(ref) Gitlab::Git::TAG_REF_PREFIX + ref @@ -286,14 +312,16 @@ class Repository end def expire_tags_cache - expire_method_caches(%i(tag_names tag_count)) + expire_method_caches(%i(tag_names tag_count has_ambiguous_refs?)) @tags = nil + @tag_names_include = nil end def expire_branches_cache - expire_method_caches(%i(branch_names merged_branch_names branch_count has_visible_content?)) + expire_method_caches(%i(branch_names merged_branch_names branch_count has_visible_content? has_ambiguous_refs?)) @local_branches = nil @branch_exists_memo = nil + @branch_names_include = nil end def expire_statistics_caches @@ -576,10 +604,10 @@ class Repository end cache_method :merge_request_template_names, fallback: [] - def metrics_dashboard_paths - Gitlab::Metrics::Dashboard::Finder.find_all_paths_from_source(project) + def user_defined_metrics_dashboard_paths + Gitlab::Metrics::Dashboard::RepoDashboardFinder.list_dashboards(project) end - cache_method :metrics_dashboard_paths + cache_method :user_defined_metrics_dashboard_paths, fallback: [] def readme head_tree&.readme @@ -852,7 +880,7 @@ class Repository def revert( user, commit, branch_name, message, - start_branch_name: nil, start_project: project) + start_branch_name: nil, start_project: project, dry_run: false) with_cache_hooks do raw_repository.revert( @@ -861,14 +889,15 @@ class Repository branch_name: branch_name, message: message, start_branch_name: start_branch_name, - start_repository: start_project.repository.raw_repository + start_repository: start_project.repository.raw_repository, + dry_run: dry_run ) end end def cherry_pick( user, commit, branch_name, message, - start_branch_name: nil, start_project: project) + start_branch_name: nil, start_project: project, dry_run: false) with_cache_hooks do raw_repository.cherry_pick( @@ -877,7 +906,8 @@ class Repository branch_name: branch_name, message: message, start_branch_name: start_branch_name, - start_repository: start_project.repository.raw_repository + start_repository: start_project.repository.raw_repository, + dry_run: dry_run ) end end diff --git a/app/models/resource_iteration_event.rb b/app/models/resource_iteration_event.rb new file mode 100644 index 00000000000..78d85ea8b95 --- /dev/null +++ b/app/models/resource_iteration_event.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class ResourceIterationEvent < ResourceTimeboxEvent + belongs_to :iteration +end diff --git a/app/models/resource_milestone_event.rb b/app/models/resource_milestone_event.rb index 36068cf508b..5fd71612de0 100644 --- a/app/models/resource_milestone_event.rb +++ b/app/models/resource_milestone_event.rb @@ -1,30 +1,17 @@ # frozen_string_literal: true -class ResourceMilestoneEvent < ResourceEvent +class ResourceMilestoneEvent < ResourceTimeboxEvent include IgnorableColumns - include IssueResourceEvent - include MergeRequestResourceEvent belongs_to :milestone - validate :exactly_one_issuable - scope :include_relations, -> { includes(:user, milestone: [:project, :group]) } - enum action: { - add: 1, - remove: 2 - } - # state is used for issue and merge request states. enum state: Issue.available_states.merge(MergeRequest.available_states) ignore_columns %i[reference reference_html cached_markdown_version], remove_with: '13.1', remove_after: '2020-06-22' - def self.issuable_attrs - %i(issue merge_request).freeze - end - def milestone_title milestone&.title end @@ -32,8 +19,4 @@ class ResourceMilestoneEvent < ResourceEvent def milestone_parent milestone&.parent end - - def issuable - issue || merge_request - end end diff --git a/app/models/resource_timebox_event.rb b/app/models/resource_timebox_event.rb new file mode 100644 index 00000000000..44f48915425 --- /dev/null +++ b/app/models/resource_timebox_event.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +class ResourceTimeboxEvent < ResourceEvent + self.abstract_class = true + + include IssueResourceEvent + include MergeRequestResourceEvent + + validate :exactly_one_issuable + + enum action: { + add: 1, + remove: 2 + } + + def self.issuable_attrs + %i(issue merge_request).freeze + end + + def issuable + issue || merge_request + end +end diff --git a/app/models/service.rb b/app/models/service.rb index 89bde61bfe1..40e7e5552d1 100644 --- a/app/models/service.rb +++ b/app/models/service.rb @@ -10,6 +10,7 @@ class Service < ApplicationRecord include IgnorableColumns ignore_columns %i[title description], remove_with: '13.4', remove_after: '2020-09-22' + ignore_columns %i[default], remove_with: '13.5', remove_after: '2020-10-22' SERVICE_NAMES = %w[ alerts asana assembla bamboo bugzilla buildkite campfire confluence custom_issue_tracker discord @@ -47,19 +48,20 @@ class Service < ApplicationRecord belongs_to :project, inverse_of: :services has_one :service_hook - validates :project_id, presence: true, unless: -> { template? || instance? } - validates :project_id, absence: true, if: -> { template? || instance? } - validates :type, uniqueness: { scope: :project_id }, unless: -> { template? || instance? }, on: :create + validates :project_id, presence: true, unless: -> { template? || instance? || group_id } + validates :group_id, presence: true, unless: -> { template? || instance? || project_id } + validates :project_id, :group_id, absence: true, if: -> { template? || instance? } + validates :type, uniqueness: { scope: :project_id }, unless: -> { template? || instance? || group_id }, on: :create + validates :type, uniqueness: { scope: :group_id }, unless: -> { template? || instance? || project_id } validates :type, presence: true validates :template, uniqueness: { scope: :type }, if: -> { template? } validates :instance, uniqueness: { scope: :type }, if: -> { instance? } validate :validate_is_instance_or_template + validate :validate_belongs_to_project_or_group - scope :visible, -> { where.not(type: 'GitlabIssueTrackerService') } - scope :issue_trackers, -> { where(category: 'issue_tracker') } + scope :external_issue_trackers, -> { where(category: 'issue_tracker').active } scope :external_wikis, -> { where(type: 'ExternalWikiService').active } scope :active, -> { where(active: true) } - scope :without_defaults, -> { where(default: false) } scope :by_type, -> (type) { where(type: type) } scope :by_active_flag, -> (flag) { where(active: flag) } scope :templates, -> { where(template: true, type: available_services_types) } @@ -77,7 +79,6 @@ class Service < ApplicationRecord scope :wiki_page_hooks, -> { where(wiki_page_events: true, active: true) } scope :deployment_hooks, -> { where(deployment_events: true, active: true) } scope :alert_hooks, -> { where(alert_events: true, active: true) } - scope :external_issue_trackers, -> { issue_trackers.active.without_defaults } scope :deployment, -> { where(category: 'deployment') } default_value_for :category, 'common' @@ -379,6 +380,10 @@ class Service < ApplicationRecord errors.add(:template, 'The service should be a service template or instance-level integration') if template? && instance? end + def validate_belongs_to_project_or_group + errors.add(:project_id, 'The service cannot belong to both a project and a group') if project_id && group_id + end + def cache_project_has_external_issue_tracker if project && !project.destroyed? project.cache_has_external_issue_tracker diff --git a/app/models/suggestion.rb b/app/models/suggestion.rb index 94f3a140098..8c72bd5ae7e 100644 --- a/app/models/suggestion.rb +++ b/app/models/suggestion.rb @@ -43,12 +43,12 @@ class Suggestion < ApplicationRecord def inapplicable_reason(cached: true) strong_memoize("inapplicable_reason_#{cached}") do - next :applied if applied? - next :merge_request_merged if noteable.merged? - next :merge_request_closed if noteable.closed? - next :source_branch_deleted unless noteable.source_branch_exists? - next :outdated if outdated?(cached: cached) || !note.active? - next :same_content unless different_content? + next _("Can't apply this suggestion.") if applied? + next _("This merge request was merged. To apply this suggestion, edit this file directly.") if noteable.merged? + next _("This merge request is closed. To apply this suggestion, edit this file directly.") if noteable.closed? + next _("Can't apply as the source branch was deleted.") unless noteable.source_branch_exists? + next outdated_reason if outdated?(cached: cached) || !note.active? + next _("This suggestion already matches its content.") unless different_content? end end @@ -61,7 +61,7 @@ class Suggestion < ApplicationRecord end def single_line? - lines_above.zero? && lines_below.zero? + lines_above == 0 && lines_below == 0 end def target_line @@ -73,4 +73,12 @@ class Suggestion < ApplicationRecord def different_content? from_content != to_content end + + def outdated_reason + if single_line? + _("Can't apply as this line was changed in a more recent version.") + else + _("Can't apply as these lines were changed in a more recent version.") + end + end end diff --git a/app/models/terraform/state.rb b/app/models/terraform/state.rb index 6ed074b2190..c50b9da1310 100644 --- a/app/models/terraform/state.rb +++ b/app/models/terraform/state.rb @@ -3,6 +3,7 @@ module Terraform class State < ApplicationRecord include UsageStatistics + include FileStoreMounter DEFAULT = '{"version":1}'.freeze HEX_REGEXP = %r{\A\h+\z}.freeze @@ -17,24 +18,22 @@ module Terraform default_value_for(:uuid, allows_nil: false) { SecureRandom.hex(UUID_LENGTH / 2) } - after_save :update_file_store, if: :saved_change_to_file? - - mount_uploader :file, StateUploader + mount_file_store_uploader StateUploader default_value_for(:file) { CarrierWaveStringFile.new(DEFAULT) } - def update_file_store - # The file.object_store is set during `uploader.store!` - # which happens after object is inserted/updated - self.update_column(:file_store, file.object_store) - end - def file_store super || StateUploader.default_store end + def local? + file_store == ObjectStorage::Store::LOCAL + end + def locked? self.lock_xid.present? end end end + +Terraform::State.prepend_if_ee('EE::Terraform::State') diff --git a/app/models/user.rb b/app/models/user.rb index 643b759e6f4..1a67116c1f2 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -104,6 +104,7 @@ class User < ApplicationRecord # Profile has_many :keys, -> { regular_keys }, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent has_many :deploy_keys, -> { where(type: 'DeployKey') }, dependent: :nullify # rubocop:disable Cop/ActiveRecordDependent + has_many :group_deploy_keys has_many :gpg_keys has_many :emails, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent @@ -350,11 +351,25 @@ class User < ApplicationRecord .without_impersonation .expiring_and_not_notified(at).select(1)) end + scope :with_personal_access_tokens_expired_today, -> do + where('EXISTS (?)', + ::PersonalAccessToken + .select(1) + .where('personal_access_tokens.user_id = users.id') + .without_impersonation + .expired_today_and_not_notified) + end scope :order_recent_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'DESC')) } scope :order_oldest_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'ASC')) } scope :order_recent_last_activity, -> { reorder(Gitlab::Database.nulls_last_order('last_activity_on', 'DESC')) } scope :order_oldest_last_activity, -> { reorder(Gitlab::Database.nulls_first_order('last_activity_on', 'ASC')) } + def preferred_language + read_attribute('preferred_language') || + I18n.default_locale.to_s.presence_in(Gitlab::I18n::AVAILABLE_LANGUAGES.keys) || + 'en' + end + def active_for_authentication? super && can?(:log_in) end @@ -948,7 +963,7 @@ class User < ApplicationRecord def require_ssh_key? count = Users::KeysCountService.new(self).count - count.zero? && Gitlab::ProtocolAccess.allowed?('ssh') + count == 0 && Gitlab::ProtocolAccess.allowed?('ssh') end # rubocop: enable CodeReuse/ServiceClass diff --git a/app/models/user_callout_enums.rb b/app/models/user_callout_enums.rb index 226c8cd9ab5..5b64befd284 100644 --- a/app/models/user_callout_enums.rb +++ b/app/models/user_callout_enums.rb @@ -18,7 +18,9 @@ module UserCalloutEnums tabs_position_highlight: 10, webhooks_moved: 13, admin_integrations_moved: 15, - personal_access_token_expiry: 21 # EE-only + personal_access_token_expiry: 21, # EE-only + suggest_pipeline: 22, + customize_homepage: 23 } end end diff --git a/app/models/wiki.rb b/app/models/wiki.rb index 4c497cc304c..30273d646cf 100644 --- a/app/models/wiki.rb +++ b/app/models/wiki.rb @@ -35,6 +35,7 @@ class Wiki def initialize(container, user = nil) @container = container @user = user + raise ArgumentError, "user must be a User, got #{user.class}" if user && !user.is_a?(User) end def path diff --git a/app/models/wiki_page.rb b/app/models/wiki_page.rb index 3dc90edb331..faf3d19d936 100644 --- a/app/models/wiki_page.rb +++ b/app/models/wiki_page.rb @@ -65,6 +65,7 @@ class WikiPage validates :title, presence: true validates :content, presence: true validate :validate_path_limits, if: :title_changed? + validate :validate_content_size_limit, if: :content_changed? # The GitLab Wiki instance. attr_reader :wiki @@ -97,6 +98,7 @@ class WikiPage def slug attributes[:slug].presence || wiki.wiki.preview_slug(title, format) end + alias_method :id, :slug # required to use build_stubbed alias_method :to_param, :slug @@ -264,8 +266,8 @@ class WikiPage '../shared/wikis/wiki_page' end - def id - page.version.to_s + def sha + page.version&.sha end def title_changed? @@ -282,6 +284,17 @@ class WikiPage end end + def content_changed? + if persisted? + # gollum-lib always converts CRLFs to LFs in Gollum::Wiki#normalize, + # so we need to do the same here. + # Also see https://gitlab.com/gitlab-org/gitlab/-/issues/21431 + raw_content.delete("\r") != page&.text_data + else + raw_content.present? + end + end + # Updates the current @attributes hash by merging a hash of params def update_attributes(attrs) attrs[:title] = process_title(attrs[:title]) if attrs[:title].present? @@ -391,4 +404,15 @@ class WikiPage }) end end + + def validate_content_size_limit + current_value = raw_content.to_s.bytesize + max_size = Gitlab::CurrentSettings.wiki_page_max_content_bytes + return if current_value <= max_size + + errors.add(:content, _('is too long (%{current_value}). The maximum size is %{max_size}.') % { + current_value: ActiveSupport::NumberHelper.number_to_human_size(current_value), + max_size: ActiveSupport::NumberHelper.number_to_human_size(max_size) + }) + end end |