summaryrefslogtreecommitdiff
path: root/app/models
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-09-19 01:45:44 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-09-19 01:45:44 +0000
commit85dc423f7090da0a52c73eb66faf22ddb20efff9 (patch)
tree9160f299afd8c80c038f08e1545be119f5e3f1e1 /app/models
parent15c2c8c66dbe422588e5411eee7e68f1fa440bb8 (diff)
downloadgitlab-ce-85dc423f7090da0a52c73eb66faf22ddb20efff9.tar.gz
Add latest changes from gitlab-org/gitlab@13-4-stable-ee
Diffstat (limited to 'app/models')
-rw-r--r--app/models/alert_management/alert.rb43
-rw-r--r--app/models/analytics/instance_statistics.rb9
-rw-r--r--app/models/analytics/instance_statistics/measurement.rb31
-rw-r--r--app/models/application_record.rb4
-rw-r--r--app/models/application_setting.rb19
-rw-r--r--app/models/application_setting_implementation.rb7
-rw-r--r--app/models/atlassian/identity.rb26
-rw-r--r--app/models/audit_event.rb12
-rw-r--r--app/models/authentication_event.rb12
-rw-r--r--app/models/blob_viewer/dependency_manager.rb4
-rw-r--r--app/models/blob_viewer/metrics_dashboard_yml.rb26
-rw-r--r--app/models/ci/bridge.rb12
-rw-r--r--app/models/ci/build.rb52
-rw-r--r--app/models/ci/build_pending_state.rb12
-rw-r--r--app/models/ci/build_trace_chunk.rb88
-rw-r--r--app/models/ci/build_trace_chunks/database.rb2
-rw-r--r--app/models/ci/build_trace_chunks/redis.rb4
-rw-r--r--app/models/ci/daily_build_group_report_result.rb2
-rw-r--r--app/models/ci/job_artifact.rb17
-rw-r--r--app/models/ci/pipeline.rb98
-rw-r--r--app/models/ci/pipeline_artifact.rb29
-rw-r--r--app/models/ci/pipeline_enums.rb73
-rw-r--r--app/models/ci/ref.rb2
-rw-r--r--app/models/ci/runner.rb2
-rw-r--r--app/models/ci_platform_metric.rb39
-rw-r--r--app/models/clusters/agent.rb2
-rw-r--r--app/models/clusters/applications/prometheus.rb6
-rw-r--r--app/models/clusters/applications/runner.rb2
-rw-r--r--app/models/clusters/cluster.rb1
-rw-r--r--app/models/clusters/instance.rb2
-rw-r--r--app/models/clusters/providers/aws.rb2
-rw-r--r--app/models/commit.rb6
-rw-r--r--app/models/commit_status.rb15
-rw-r--r--app/models/commit_status_enums.rb32
-rw-r--r--app/models/concerns/admin_changed_password_notifier.rb60
-rw-r--r--app/models/concerns/bulk_member_access_load.rb2
-rw-r--r--app/models/concerns/checksummable.rb6
-rw-r--r--app/models/concerns/ci/artifactable.rb20
-rw-r--r--app/models/concerns/discussion_on_diff.rb2
-rw-r--r--app/models/concerns/enums/ci/pipeline.rb74
-rw-r--r--app/models/concerns/enums/commit_status.rb35
-rw-r--r--app/models/concerns/enums/internal_id.rb (renamed from app/models/internal_id_enums.rb)14
-rw-r--r--app/models/concerns/enums/prometheus_metric.rb91
-rw-r--r--app/models/concerns/from_except.rb37
-rw-r--r--app/models/concerns/from_intersect.rb37
-rw-r--r--app/models/concerns/from_set_operator.rb19
-rw-r--r--app/models/concerns/from_union.rb16
-rw-r--r--app/models/concerns/has_wiki.rb4
-rw-r--r--app/models/concerns/id_in_ordered.rb20
-rw-r--r--app/models/concerns/issuable.rb39
-rw-r--r--app/models/concerns/loaded_in_group_list.rb3
-rw-r--r--app/models/concerns/mentionable/reference_regexes.rb2
-rw-r--r--app/models/concerns/milestoneable.rb2
-rw-r--r--app/models/concerns/optimized_issuable_label_filter.rb107
-rw-r--r--app/models/concerns/prometheus_adapter.rb11
-rw-r--r--app/models/concerns/relative_positioning.rb424
-rw-r--r--app/models/concerns/resolvable_discussion.rb1
-rw-r--r--app/models/concerns/storage/legacy_namespace.rb16
-rw-r--r--app/models/concerns/timebox.rb8
-rw-r--r--app/models/cycle_analytics/level_base.rb2
-rw-r--r--app/models/data_list.rb10
-rw-r--r--app/models/deployment.rb2
-rw-r--r--app/models/design_management/design.rb14
-rw-r--r--app/models/design_management/design_collection.rb60
-rw-r--r--app/models/dev_ops_report/card.rb (renamed from app/models/dev_ops_score/card.rb)2
-rw-r--r--app/models/dev_ops_report/idea_to_production_step.rb (renamed from app/models/dev_ops_score/idea_to_production_step.rb)2
-rw-r--r--app/models/dev_ops_report/metric.rb (renamed from app/models/dev_ops_score/metric.rb)2
-rw-r--r--app/models/diff_discussion.rb1
-rw-r--r--app/models/discussion.rb1
-rw-r--r--app/models/environment.rb2
-rw-r--r--app/models/group.rb23
-rw-r--r--app/models/group_deploy_key.rb4
-rw-r--r--app/models/internal_id.rb2
-rw-r--r--app/models/issuable_severity.rb18
-rw-r--r--app/models/issue.rb42
-rw-r--r--app/models/issue_link.rb38
-rw-r--r--app/models/iteration.rb8
-rw-r--r--app/models/jira_connect_installation.rb22
-rw-r--r--app/models/jira_connect_subscription.rb12
-rw-r--r--app/models/jira_import_state.rb4
-rw-r--r--app/models/lfs_objects_project.rb1
-rw-r--r--app/models/member.rb13
-rw-r--r--app/models/members/group_member.rb9
-rw-r--r--app/models/members_preloader.rb1
-rw-r--r--app/models/merge_request.rb79
-rw-r--r--app/models/merge_request_assignee.rb2
-rw-r--r--app/models/merge_request_diff.rb23
-rw-r--r--app/models/merge_request_diff_file.rb12
-rw-r--r--app/models/merge_request_reviewer.rb6
-rw-r--r--app/models/namespace.rb8
-rw-r--r--app/models/namespace/root_storage_statistics.rb14
-rw-r--r--app/models/note.rb45
-rw-r--r--app/models/operations/feature_flag.rb101
-rw-r--r--app/models/operations/feature_flag_scope.rb62
-rw-r--r--app/models/operations/feature_flags/scope.rb13
-rw-r--r--app/models/operations/feature_flags/strategy.rb94
-rw-r--r--app/models/operations/feature_flags/strategy_user_list.rb12
-rw-r--r--app/models/operations/feature_flags/user_list.rb36
-rw-r--r--app/models/operations/feature_flags_client.rb25
-rw-r--r--app/models/packages/conan/file_metadatum.rb3
-rw-r--r--app/models/packages/package.rb14
-rw-r--r--app/models/packages/pypi/metadatum.rb1
-rw-r--r--app/models/pages/lookup_path.rb32
-rw-r--r--app/models/pages_deployment.rb11
-rw-r--r--app/models/pages_domain.rb6
-rw-r--r--app/models/performance_monitoring/prometheus_dashboard.rb11
-rw-r--r--app/models/product_analytics_event.rb9
-rw-r--r--app/models/project.rb147
-rw-r--r--app/models/project_feature_usage.rb31
-rw-r--r--app/models/project_pages_metadatum.rb1
-rw-r--r--app/models/project_services/chat_message/merge_message.rb16
-rw-r--r--app/models/project_services/ewm_service.rb29
-rw-r--r--app/models/project_services/jira_service.rb32
-rw-r--r--app/models/project_services/prometheus_service.rb10
-rw-r--r--app/models/project_statistics.rb12
-rw-r--r--app/models/project_team.rb34
-rw-r--r--app/models/project_wiki.rb17
-rw-r--r--app/models/prometheus_metric.rb6
-rw-r--r--app/models/prometheus_metric_enums.rb84
-rw-r--r--app/models/protected_branch.rb4
-rw-r--r--app/models/remote_mirror.rb4
-rw-r--r--app/models/repository.rb2
-rw-r--r--app/models/resource_iteration_event.rb5
-rw-r--r--app/models/resource_state_event.rb2
-rw-r--r--app/models/security_event.rb4
-rw-r--r--app/models/service.rb352
-rw-r--r--app/models/service_list.rb14
-rw-r--r--app/models/snippet.rb17
-rw-r--r--app/models/snippet_input_action.rb2
-rw-r--r--app/models/snippet_repository.rb4
-rw-r--r--app/models/snippet_statistics.rb2
-rw-r--r--app/models/system_note_metadata.rb3
-rw-r--r--app/models/terraform/state.rb24
-rw-r--r--app/models/terraform/state_version.rb18
-rw-r--r--app/models/timelog.rb1
-rw-r--r--app/models/todo.rb6
-rw-r--r--app/models/user.rb43
-rw-r--r--app/models/user_callout.rb27
-rw-r--r--app/models/user_callout_enums.rb28
-rw-r--r--app/models/vulnerability.rb17
-rw-r--r--app/models/wiki.rb26
141 files changed, 2520 insertions, 1161 deletions
diff --git a/app/models/alert_management/alert.rb b/app/models/alert_management/alert.rb
index 75581805b49..e9b89af45c6 100644
--- a/app/models/alert_management/alert.rb
+++ b/app/models/alert_management/alert.rb
@@ -11,6 +11,8 @@ module AlertManagement
include Noteable
include Gitlab::SQL::Pattern
include Presentable
+ include Gitlab::Utils::StrongMemoize
+ include Referable
STATUSES = {
triggered: 0,
@@ -31,8 +33,6 @@ module AlertManagement
:acknowledged
].freeze
- DETAILS_IGNORED_PARAMS = %w(start_time).freeze
-
belongs_to :project
belongs_to :issue, optional: true
belongs_to :prometheus_alert, optional: true
@@ -118,7 +118,7 @@ module AlertManagement
end
delegate :iid, to: :issue, prefix: true, allow_nil: true
- delegate :metrics_dashboard_url, :runbook, :details_url, to: :present
+ delegate :details_url, to: :present
scope :for_iid, -> (iid) { where(iid: iid) }
scope :for_status, -> (status) { where(status: status) }
@@ -171,10 +171,23 @@ module AlertManagement
with_prometheus_alert.where(id: ids)
end
- def details
- details_payload = payload.except(*attributes.keys, *DETAILS_IGNORED_PARAMS)
+ def self.reference_prefix
+ '^alert#'
+ end
- Gitlab::Utils::InlineHash.merge_keys(details_payload)
+ def self.reference_pattern
+ @reference_pattern ||= %r{
+ (#{Project.reference_pattern})?
+ #{Regexp.escape(reference_prefix)}(?<alert>\d+)
+ }x
+ end
+
+ def self.link_reference_pattern
+ @link_reference_pattern ||= super("alert_management", /(?<alert>\d+)\/details(\#)?/)
+ end
+
+ def self.reference_valid?(reference)
+ reference.to_i > 0 && reference.to_i <= Gitlab::Database::MAX_INT_VALUE
end
def prometheus?
@@ -185,10 +198,10 @@ module AlertManagement
increment!(:events)
end
- # required for todos (typically contains an identifier like issue iid)
- # no-op; we could use iid, but we don't have a reference prefix
- def to_reference(_from = nil, full: false)
- ''
+ def to_reference(from = nil, full: false)
+ reference = "#{self.class.reference_prefix}#{iid}"
+
+ "#{project.to_reference_base(from, full: full)}#{reference}"
end
def execute_services
@@ -197,10 +210,12 @@ module AlertManagement
project.execute_services(hook_data, :alert_hooks)
end
- def present
- return super(presenter_class: AlertManagement::PrometheusAlertPresenter) if prometheus?
-
- super
+ # Representation of the alert's payload. Avoid accessing
+ # #payload attribute directly.
+ def parsed_payload
+ strong_memoize(:parsed_payload) do
+ Gitlab::AlertManagement::Payload.parse(project, payload, monitoring_tool: monitoring_tool)
+ end
end
private
diff --git a/app/models/analytics/instance_statistics.rb b/app/models/analytics/instance_statistics.rb
new file mode 100644
index 00000000000..df7b26e4fa6
--- /dev/null
+++ b/app/models/analytics/instance_statistics.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module Analytics
+ module InstanceStatistics
+ def self.table_name_prefix
+ 'analytics_instance_statistics_'
+ end
+ end
+end
diff --git a/app/models/analytics/instance_statistics/measurement.rb b/app/models/analytics/instance_statistics/measurement.rb
new file mode 100644
index 00000000000..eaaf9e999b3
--- /dev/null
+++ b/app/models/analytics/instance_statistics/measurement.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Analytics
+ module InstanceStatistics
+ class Measurement < ApplicationRecord
+ enum identifier: {
+ projects: 1,
+ users: 2,
+ issues: 3,
+ merge_requests: 4,
+ groups: 5,
+ pipelines: 6
+ }
+
+ IDENTIFIER_QUERY_MAPPING = {
+ identifiers[:projects] => -> { Project },
+ identifiers[:users] => -> { User },
+ identifiers[:issues] => -> { Issue },
+ identifiers[:merge_requests] => -> { MergeRequest },
+ identifiers[:groups] => -> { Group },
+ identifiers[:pipelines] => -> { Ci::Pipeline }
+ }.freeze
+
+ validates :recorded_at, :identifier, :count, presence: true
+ validates :recorded_at, uniqueness: { scope: :identifier }
+
+ scope :order_by_latest, -> { order(recorded_at: :desc) }
+ scope :with_identifier, -> (identifier) { where(identifier: identifier) }
+ end
+ end
+end
diff --git a/app/models/application_record.rb b/app/models/application_record.rb
index 91b8bfedcbb..6ffb9b7642a 100644
--- a/app/models/application_record.rb
+++ b/app/models/application_record.rb
@@ -13,6 +13,10 @@ class ApplicationRecord < ActiveRecord::Base
where(id: ids)
end
+ def self.primary_key_in(values)
+ where(primary_key => values)
+ end
+
def self.iid_in(iids)
where(iid: iids)
end
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index eb46be65858..e9a3dcf39df 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -8,6 +8,8 @@ class ApplicationSetting < ApplicationRecord
include IgnorableColumns
ignore_column :namespace_storage_size_limit, remove_with: '13.5', remove_after: '2020-09-22'
+ ignore_column :instance_statistics_visibility_private, remove_with: '13.6', remove_after: '2020-10-22'
+ ignore_column :snowplow_iglu_registry_url, remove_with: '13.6', remove_after: '2020-11-22'
GRAFANA_URL_ERROR_MESSAGE = 'Please check your Grafana URL setting in ' \
'Admin Area > Settings > Metrics and profiling > Metrics - Grafana'
@@ -20,7 +22,9 @@ class ApplicationSetting < ApplicationRecord
belongs_to :push_rule
alias_attribute :self_monitoring_project_id, :instance_administration_project_id
- belongs_to :instance_administrators_group, class_name: "Group"
+ belongs_to :instance_group, class_name: "Group", foreign_key: 'instance_administrators_group_id'
+ alias_attribute :instance_group_id, :instance_administrators_group_id
+ alias_attribute :instance_administrators_group, :instance_group
def self.repository_storages_weighted_attributes
@repository_storages_weighted_atributes ||= Gitlab.config.repositories.storages.keys.map { |k| "repository_storages_weighted_#{k}".to_sym }.freeze
@@ -128,16 +132,16 @@ class ApplicationSetting < ApplicationRecord
presence: true,
if: :sourcegraph_enabled
+ validates :gitpod_url,
+ presence: true,
+ addressable_url: { enforce_sanitization: true },
+ if: :gitpod_enabled
+
validates :snowplow_collector_hostname,
presence: true,
hostname: true,
if: :snowplow_enabled
- validates :snowplow_iglu_registry_url,
- addressable_url: true,
- allow_blank: true,
- if: :snowplow_enabled
-
validates :max_attachment_size,
presence: true,
numericality: { only_integer: true, greater_than: 0 }
@@ -281,6 +285,9 @@ class ApplicationSetting < ApplicationRecord
validates :hashed_storage_enabled, inclusion: { in: [true], message: _("Hashed storage can't be disabled anymore for new projects") }
+ validates :container_registry_delete_tags_service_timeout,
+ numericality: { only_integer: true, greater_than_or_equal_to: 0 }
+
SUPPORTED_KEY_TYPES.each do |type|
validates :"#{type}_key_restriction", presence: true, key_restriction: { type: type }
end
diff --git a/app/models/application_setting_implementation.rb b/app/models/application_setting_implementation.rb
index 8bdb80a65b1..7a869d16a31 100644
--- a/app/models/application_setting_implementation.rb
+++ b/app/models/application_setting_implementation.rb
@@ -74,6 +74,8 @@ module ApplicationSettingImplementation
gitaly_timeout_default: 55,
gitaly_timeout_fast: 10,
gitaly_timeout_medium: 30,
+ gitpod_enabled: false,
+ gitpod_url: 'https://gitpod.io/',
gravatar_enabled: Settings.gravatar['enabled'],
group_download_export_limit: 1,
group_export_limit: 6,
@@ -87,7 +89,6 @@ module ApplicationSettingImplementation
housekeeping_gc_period: 200,
housekeeping_incremental_repack_period: 10,
import_sources: Settings.gitlab['import_sources'],
- instance_statistics_visibility_private: false,
issues_create_limit: 300,
local_markdown_version: 0,
login_recaptcha_protection_enabled: false,
@@ -132,7 +133,6 @@ module ApplicationSettingImplementation
snowplow_collector_hostname: nil,
snowplow_cookie_domain: nil,
snowplow_enabled: false,
- snowplow_iglu_registry_url: nil,
sourcegraph_enabled: false,
sourcegraph_public_only: true,
sourcegraph_url: nil,
@@ -164,7 +164,8 @@ module ApplicationSettingImplementation
user_default_external: false,
user_default_internal_regex: nil,
user_show_add_ssh_key_message: true,
- wiki_page_max_content_bytes: 50.megabytes
+ wiki_page_max_content_bytes: 50.megabytes,
+ container_registry_delete_tags_service_timeout: 100
}
end
diff --git a/app/models/atlassian/identity.rb b/app/models/atlassian/identity.rb
new file mode 100644
index 00000000000..906f2be0fbf
--- /dev/null
+++ b/app/models/atlassian/identity.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module Atlassian
+ class Identity < ApplicationRecord
+ self.table_name = 'atlassian_identities'
+
+ belongs_to :user
+
+ validates :extern_uid, presence: true, uniqueness: true
+ validates :user, presence: true, uniqueness: true
+
+ attr_encrypted :token,
+ mode: :per_attribute_iv,
+ key: Settings.attr_encrypted_db_key_base_truncated,
+ algorithm: 'aes-256-gcm',
+ encode: false,
+ encode_iv: false
+
+ attr_encrypted :refresh_token,
+ mode: :per_attribute_iv,
+ key: Settings.attr_encrypted_db_key_base_truncated,
+ algorithm: 'aes-256-gcm',
+ encode: false,
+ encode_iv: false
+ end
+end
diff --git a/app/models/audit_event.rb b/app/models/audit_event.rb
index e7cfa30a892..f46803be057 100644
--- a/app/models/audit_event.rb
+++ b/app/models/audit_event.rb
@@ -5,9 +5,9 @@ class AuditEvent < ApplicationRecord
include IgnorableColumns
include BulkInsertSafe
- PARALLEL_PERSISTENCE_COLUMNS = [:author_name, :entity_path, :target_details].freeze
+ PARALLEL_PERSISTENCE_COLUMNS = [:author_name, :entity_path, :target_details, :target_type].freeze
- ignore_column :updated_at, remove_with: '13.4', remove_after: '2020-09-22'
+ ignore_column :type, remove_with: '13.6', remove_after: '2020-11-22'
serialize :details, Hash # rubocop:disable Cop/ActiveRecordSerialize
@@ -29,6 +29,14 @@ class AuditEvent < ApplicationRecord
# https://gitlab.com/groups/gitlab-org/-/epics/2765
after_validation :parallel_persist
+ # Note: After loading records, do not attempt to type cast objects it finds.
+ # We are in the process of deprecating STI (i.e. SecurityEvent) out of AuditEvent.
+ #
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/216845
+ def self.inheritance_column
+ :_type_disabled
+ end
+
def self.order_by(method)
case method.to_s
when 'created_asc'
diff --git a/app/models/authentication_event.rb b/app/models/authentication_event.rb
new file mode 100644
index 00000000000..1ac3c5fbd9c
--- /dev/null
+++ b/app/models/authentication_event.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+class AuthenticationEvent < ApplicationRecord
+ belongs_to :user, optional: true
+
+ validates :provider, :user_name, :result, presence: true
+
+ enum result: {
+ failed: 0,
+ success: 1
+ }
+end
diff --git a/app/models/blob_viewer/dependency_manager.rb b/app/models/blob_viewer/dependency_manager.rb
index a851f22bfcd..1be7120a955 100644
--- a/app/models/blob_viewer/dependency_manager.rb
+++ b/app/models/blob_viewer/dependency_manager.rb
@@ -33,8 +33,8 @@ module BlobViewer
@json_data ||= begin
prepare!
Gitlab::Json.parse(blob.data)
- rescue
- {}
+ rescue
+ {}
end
end
diff --git a/app/models/blob_viewer/metrics_dashboard_yml.rb b/app/models/blob_viewer/metrics_dashboard_yml.rb
index c05fb5d88d6..88643253d3d 100644
--- a/app/models/blob_viewer/metrics_dashboard_yml.rb
+++ b/app/models/blob_viewer/metrics_dashboard_yml.rb
@@ -25,20 +25,30 @@ module BlobViewer
private
def parse_blob_data
- yaml = ::Gitlab::Config::Loader::Yaml.new(blob.data).load_raw!
+ if Feature.enabled?(:metrics_dashboard_exhaustive_validations, project)
+ exhaustive_metrics_dashboard_validation
+ else
+ old_metrics_dashboard_validation
+ end
+ end
+ def old_metrics_dashboard_validation
+ yaml = ::Gitlab::Config::Loader::Yaml.new(blob.data).load_raw!
::PerformanceMonitoring::PrometheusDashboard.from_json(yaml)
- nil
+ []
rescue Gitlab::Config::Loader::FormatError => error
- wrap_yml_syntax_error(error)
+ ["YAML syntax: #{error.message}"]
rescue ActiveModel::ValidationError => invalid
- invalid.model.errors
+ invalid.model.errors.messages.map { |messages| messages.join(': ') }
end
- def wrap_yml_syntax_error(error)
- ::PerformanceMonitoring::PrometheusDashboard.new.errors.tap do |errors|
- errors.add(:'YAML syntax', error.message)
- end
+ def exhaustive_metrics_dashboard_validation
+ yaml = ::Gitlab::Config::Loader::Yaml.new(blob.data).load_raw!
+ Gitlab::Metrics::Dashboard::Validator
+ .errors(yaml, dashboard_path: blob.path, project: project)
+ .map(&:message)
+ rescue Gitlab::Config::Loader::FormatError => error
+ [error.message]
end
end
end
diff --git a/app/models/ci/bridge.rb b/app/models/ci/bridge.rb
index 58c26e8c806..1697067f633 100644
--- a/app/models/ci/bridge.rb
+++ b/app/models/ci/bridge.rb
@@ -35,6 +35,10 @@ module Ci
end
end
+ event :pending do
+ transition all => :pending
+ end
+
event :manual do
transition all => :manual
end
@@ -48,6 +52,14 @@ module Ci
raise NotImplementedError
end
+ def self.with_preloads
+ preload(
+ :metadata,
+ downstream_pipeline: [project: [:route, { namespace: :route }]],
+ project: [:namespace]
+ )
+ end
+
def schedule_downstream_pipeline!
raise InvalidBridgeTypeError unless downstream_project
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index af4e6bb0494..99580a52e96 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -38,14 +38,17 @@ module Ci
has_one :deployment, as: :deployable, class_name: 'Deployment'
has_one :resource, class_name: 'Ci::Resource', inverse_of: :build
+ has_one :pending_state, class_name: 'Ci::BuildPendingState', inverse_of: :build
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
- has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id
+ has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id, inverse_of: :build
has_many :report_results, class_name: 'Ci::BuildReportResult', inverse_of: :build
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_job_id
+ has_many :pages_deployments, inverse_of: :ci_build
+
Ci::JobArtifact.file_types.each do |key, value|
has_one :"job_artifacts_#{key}", -> { where(file_type: value) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
end
@@ -90,9 +93,9 @@ module Ci
Ci::BuildMetadata.scoped_build.with_interruptible.select(:id))
end
- scope :unstarted, ->() { where(runner_id: nil) }
- scope :ignore_failures, ->() { where(allow_failure: false) }
- scope :with_downloadable_artifacts, ->() do
+ scope :unstarted, -> { where(runner_id: nil) }
+ scope :ignore_failures, -> { where(allow_failure: false) }
+ scope :with_downloadable_artifacts, -> do
where('EXISTS (?)',
Ci::JobArtifact.select(1)
.where('ci_builds.id = ci_job_artifacts.job_id')
@@ -104,11 +107,11 @@ module Ci
where('EXISTS (?)', ::Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').merge(query))
end
- scope :with_archived_trace, ->() do
+ scope :with_archived_trace, -> do
with_existing_job_artifacts(Ci::JobArtifact.trace)
end
- scope :without_archived_trace, ->() do
+ scope :without_archived_trace, -> do
where('NOT EXISTS (?)', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').trace)
end
@@ -139,11 +142,11 @@ module Ci
.includes(:metadata, :job_artifacts_metadata)
end
- scope :with_artifacts_not_expired, ->() { with_downloadable_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.current) }
- scope :with_expired_artifacts, ->() { with_downloadable_artifacts.where('artifacts_expire_at < ?', Time.current) }
- scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
- scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + %i[manual]) }
- scope :scheduled_actions, ->() { where(when: :delayed, status: COMPLETED_STATUSES + %i[scheduled]) }
+ scope :with_artifacts_not_expired, -> { with_downloadable_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.current) }
+ scope :with_expired_artifacts, -> { with_downloadable_artifacts.where('artifacts_expire_at < ?', Time.current) }
+ scope :last_month, -> { where('created_at > ?', Date.today - 1.month) }
+ scope :manual_actions, -> { where(when: :manual, status: COMPLETED_STATUSES + %i[manual]) }
+ scope :scheduled_actions, -> { where(when: :delayed, status: COMPLETED_STATUSES + %i[scheduled]) }
scope :ref_protected, -> { where(protected: true) }
scope :with_live_trace, -> { where('EXISTS (?)', Ci::BuildTraceChunk.where('ci_builds.id = ci_build_trace_chunks.build_id').select(1)) }
scope :with_stale_live_trace, -> { with_live_trace.finished_before(12.hours.ago) }
@@ -175,7 +178,6 @@ module Ci
end
scope :queued_before, ->(time) { where(arel_table[:queued_at].lt(time)) }
- scope :order_id_desc, -> { order('ci_builds.id DESC') }
scope :preload_project_and_pipeline_project, -> do
preload(Ci::Pipeline::PROJECT_ROUTE_AND_NAMESPACE_ROUTE,
@@ -213,6 +215,10 @@ module Ci
.execute(build)
# rubocop: enable CodeReuse/ServiceClass
end
+
+ def with_preloads
+ preload(:job_artifacts_archive, :job_artifacts, project: [:namespace])
+ end
end
state_machine :status do
@@ -647,6 +653,10 @@ module Ci
!artifacts_expired? && artifacts_file&.exists?
end
+ def locked_artifacts?
+ pipeline.artifacts_locked? && artifacts_file&.exists?
+ end
+
# This method is similar to #artifacts? but it includes the artifacts
# locking mechanics. A new method was created to prevent breaking existing
# behavior and avoid introducing N+1s.
@@ -867,13 +877,17 @@ module Ci
options.dig(:release)&.any?
end
- def hide_secrets(trace)
+ def hide_secrets(data, metrics = ::Gitlab::Ci::Trace::Metrics.new)
return unless trace
- trace = trace.dup
- Gitlab::Ci::MaskSecret.mask!(trace, project.runners_token) if project
- Gitlab::Ci::MaskSecret.mask!(trace, token) if token
- trace
+ data.dup.tap do |trace|
+ Gitlab::Ci::MaskSecret.mask!(trace, project.runners_token) if project
+ Gitlab::Ci::MaskSecret.mask!(trace, token) if token
+
+ if trace != data
+ metrics.increment_trace_operation(operation: :mutated)
+ end
+ end
end
def serializable_hash(options = {})
@@ -945,6 +959,10 @@ module Ci
var[:value]&.to_i if var
end
+ def remove_pending_state!
+ pending_state.try(:delete)
+ end
+
private
def auto_retry
diff --git a/app/models/ci/build_pending_state.rb b/app/models/ci/build_pending_state.rb
new file mode 100644
index 00000000000..45f323adec2
--- /dev/null
+++ b/app/models/ci/build_pending_state.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+class Ci::BuildPendingState < ApplicationRecord
+ extend Gitlab::Ci::Model
+
+ belongs_to :build, class_name: 'Ci::Build', foreign_key: :build_id
+
+ enum state: Ci::Stage.statuses
+ enum failure_reason: CommitStatus.failure_reasons
+
+ validates :build, presence: true
+end
diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb
index 407802baf09..444742062d9 100644
--- a/app/models/ci/build_trace_chunk.rb
+++ b/app/models/ci/build_trace_chunk.rb
@@ -2,14 +2,17 @@
module Ci
class BuildTraceChunk < ApplicationRecord
- include FastDestroyAll
+ extend ::Gitlab::Ci::Model
+ include ::FastDestroyAll
+ include ::Checksummable
include ::Gitlab::ExclusiveLeaseHelpers
- extend Gitlab::Ci::Model
belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
default_value_for :data_store, :redis
+ after_create { metrics.increment_trace_operation(operation: :chunked) }
+
CHUNK_SIZE = 128.kilobytes
WRITE_LOCK_RETRY = 10
WRITE_LOCK_SLEEP = 0.01.seconds
@@ -25,6 +28,8 @@ module Ci
fog: 3
}
+ scope :live, -> { redis }
+
class << self
def all_stores
@all_stores ||= self.data_stores.keys
@@ -60,8 +65,6 @@ module Ci
end
end
- ##
- # Data is memoized for optimizing #size and #end_offset
def data
@data ||= get_data.to_s
end
@@ -80,11 +83,11 @@ module Ci
in_lock(*lock_params) { unsafe_append_data!(new_data, offset) }
- schedule_to_persist if full?
+ schedule_to_persist! if full?
end
def size
- @size ||= current_store.size(self) || data&.bytesize
+ @size ||= @data&.bytesize || current_store.size(self) || data&.bytesize
end
def start_offset
@@ -100,35 +103,68 @@ module Ci
end
def persist_data!
- in_lock(*lock_params) do # Write operation is atomic
- unsafe_persist_to!(self.class.persistable_store)
- end
+ in_lock(*lock_params) { unsafe_persist_data! }
+ end
+
+ def schedule_to_persist!
+ return if persisted?
+
+ Ci::BuildTraceChunkFlushWorker.perform_async(id)
+ end
+
+ def persisted?
+ !redis?
+ end
+
+ def live?
+ redis?
+ end
+
+ ##
+ # Build trace chunk is final (the last one that we do not expect to ever
+ # become full) when a runner submitted a build pending state and there is
+ # no chunk with higher index in the database.
+ #
+ def final?
+ build.pending_state.present? &&
+ build.trace_chunks.maximum(:chunk_index).to_i == chunk_index
end
private
- def unsafe_persist_to!(new_store)
+ def get_data
+ # Redis / database return UTF-8 encoded string by default
+ current_store.data(self)&.force_encoding(Encoding::BINARY)
+ end
+
+ def unsafe_persist_data!(new_store = self.class.persistable_store)
return if data_store == new_store.to_s
- current_data = get_data
+ current_data = data
+ old_store_class = current_store
+ current_size = current_data&.bytesize.to_i
- unless current_data&.bytesize.to_i == CHUNK_SIZE
+ unless current_size == CHUNK_SIZE || final?
raise FailedToPersistDataError, 'Data is not fulfilled in a bucket'
end
- old_store_class = current_store
-
self.raw_data = nil
self.data_store = new_store
+ self.checksum = crc32(current_data)
+
+ ##
+ # We need to so persist data then save a new store identifier before we
+ # remove data from the previous store to make this operation
+ # trasnaction-safe. `unsafe_set_data! calls `save!` because of this
+ # reason.
+ #
+ # TODO consider using callbacks and state machine to remove old data
+ #
unsafe_set_data!(current_data)
old_store_class.delete_data(self)
end
- def get_data
- current_store.data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
- end
-
def unsafe_set_data!(value)
raise ArgumentError, 'New data size exceeds chunk size' if value.bytesize > CHUNK_SIZE
@@ -148,6 +184,8 @@ module Ci
end
current_store.append_data(self, value, offset).then do |stored|
+ metrics.increment_trace_operation(operation: :appended)
+
raise ArgumentError, 'Trace appended incorrectly' if stored != new_size
end
@@ -157,16 +195,6 @@ module Ci
save! if changed?
end
- def schedule_to_persist
- return if data_persisted?
-
- Ci::BuildTraceChunkFlushWorker.perform_async(id)
- end
-
- def data_persisted?
- !redis?
- end
-
def full?
size == CHUNK_SIZE
end
@@ -181,5 +209,9 @@ module Ci
retries: WRITE_LOCK_RETRY,
sleep_sec: WRITE_LOCK_SLEEP }]
end
+
+ def metrics
+ @metrics ||= ::Gitlab::Ci::Trace::Metrics.new
+ end
end
end
diff --git a/app/models/ci/build_trace_chunks/database.rb b/app/models/ci/build_trace_chunks/database.rb
index 3b8e23510d9..ea8072099c6 100644
--- a/app/models/ci/build_trace_chunks/database.rb
+++ b/app/models/ci/build_trace_chunks/database.rb
@@ -29,7 +29,7 @@ module Ci
new_data = truncated_data + new_data
end
- model.raw_data = new_data
+ set_data(model, new_data)
model.raw_data.to_s.bytesize
end
diff --git a/app/models/ci/build_trace_chunks/redis.rb b/app/models/ci/build_trace_chunks/redis.rb
index 0ae563f6ce8..58d50b39c11 100644
--- a/app/models/ci/build_trace_chunks/redis.rb
+++ b/app/models/ci/build_trace_chunks/redis.rb
@@ -41,9 +41,9 @@ module Ci
end
end
- def set_data(model, data)
+ def set_data(model, new_data)
Gitlab::Redis::SharedState.with do |redis|
- redis.set(key(model), data, ex: CHUNK_REDIS_TTL)
+ redis.set(key(model), new_data, ex: CHUNK_REDIS_TTL)
end
end
diff --git a/app/models/ci/daily_build_group_report_result.rb b/app/models/ci/daily_build_group_report_result.rb
index d6617b8c2eb..e6f02f2e4f3 100644
--- a/app/models/ci/daily_build_group_report_result.rb
+++ b/app/models/ci/daily_build_group_report_result.rb
@@ -11,6 +11,8 @@ module Ci
validates :data, json_schema: { filename: "daily_build_group_report_result_data" }
+ scope :with_included_projects, -> { includes(:project) }
+
def self.upsert_reports(data)
upsert_all(data, unique_by: :index_daily_build_group_report_results_unique_columns) if data.any?
end
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 75c3ce98c95..8bbb92e319f 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -12,8 +12,6 @@ module Ci
include FileStoreMounter
extend Gitlab::Ci::Model
- NotSupportedAdapterError = Class.new(StandardError)
-
ignore_columns :locked, remove_after: '2020-07-22', remove_with: '13.4'
TEST_REPORT_FILE_TYPES = %w[junit].freeze
@@ -163,7 +161,6 @@ module Ci
where(file_type: types)
end
- scope :expired, -> (limit) { where('expire_at < ?', Time.current).limit(limit) }
scope :downloadable, -> { where(file_type: DOWNLOADABLE_TYPES) }
scope :unlocked, -> { joins(job: :pipeline).merge(::Ci::Pipeline.unlocked).order(expire_at: :desc) }
@@ -271,16 +268,6 @@ module Ci
end
end
- def each_blob(&blk)
- unless file_format_adapter_class
- raise NotSupportedAdapterError, 'This file format requires a dedicated adapter'
- end
-
- file.open do |stream|
- file_format_adapter_class.new(stream).each_blob(&blk)
- end
- end
-
def self.archived_trace_exists_for?(job_id)
where(job_id: job_id).trace.take&.file&.file&.exists?
end
@@ -298,10 +285,6 @@ module Ci
private
- def file_format_adapter_class
- FILE_FORMAT_ADAPTERS[file_format.to_sym]
- end
-
def set_size
self.size = file.size
end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 7762328d274..47eba685afe 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -19,6 +19,8 @@ module Ci
PROJECT_ROUTE_AND_NAMESPACE_ROUTE = {
project: [:project_feature, :route, { namespace: :route }]
}.freeze
+ CONFIG_EXTENSION = '.gitlab-ci.yml'
+ DEFAULT_CONFIG_PATH = CONFIG_EXTENSION
BridgeStatusError = Class.new(StandardError)
@@ -104,15 +106,15 @@ module Ci
after_create :keep_around_commits, unless: :importing?
- # We use `Ci::PipelineEnums.sources` here so that EE can more easily extend
+ # We use `Enums::Ci::Pipeline.sources` here so that EE can more easily extend
# this `Hash` with new values.
- enum_with_nil source: ::Ci::PipelineEnums.sources
+ enum_with_nil source: Enums::Ci::Pipeline.sources
- enum_with_nil config_source: ::Ci::PipelineEnums.config_sources
+ enum_with_nil config_source: Enums::Ci::Pipeline.config_sources
- # We use `Ci::PipelineEnums.failure_reasons` here so that EE can more easily
+ # We use `Enums::Ci::Pipeline.failure_reasons` here so that EE can more easily
# extend this `Hash` with new values.
- enum failure_reason: ::Ci::PipelineEnums.failure_reasons
+ enum failure_reason: Enums::Ci::Pipeline.failure_reasons
enum locked: { unlocked: 0, artifacts_locked: 1 }
@@ -229,7 +231,12 @@ module Ci
end
after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
- next unless pipeline.bridge_triggered?
+ pipeline.run_after_commit do
+ ::Ci::Pipelines::CreateArtifactWorker.perform_async(pipeline.id)
+ end
+ end
+
+ after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
next unless pipeline.bridge_waiting?
pipeline.run_after_commit do
@@ -254,7 +261,7 @@ module Ci
scope :internal, -> { where(source: internal_sources) }
scope :no_child, -> { where.not(source: :parent_pipeline) }
- scope :ci_sources, -> { where(config_source: ::Ci::PipelineEnums.ci_config_sources_values) }
+ scope :ci_sources, -> { where(source: Enums::Ci::Pipeline.ci_sources.values) }
scope :for_user, -> (user) { where(user: user) }
scope :for_sha, -> (sha) { where(sha: sha) }
scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) }
@@ -483,6 +490,12 @@ module Ci
end
end
+ def git_commit_timestamp
+ strong_memoize(:git_commit_timestamp) do
+ commit.try(:timestamp)
+ end
+ end
+
def before_sha
super || Gitlab::Git::BLANK_SHA
end
@@ -539,12 +552,6 @@ module Ci
end
# rubocop: enable CodeReuse/ServiceClass
- def mark_as_processable_after_stage(stage_idx)
- builds.skipped.after_stage(stage_idx).find_each do |build|
- Gitlab::OptimisticLocking.retry_lock(build, &:process)
- end
- end
-
def lazy_ref_commit
return unless ::Gitlab::Ci::Features.pipeline_latest?
@@ -647,7 +654,7 @@ module Ci
def config_path
return unless repository_source? || unknown_source?
- project.ci_config_path.presence || '.gitlab-ci.yml'
+ project.ci_config_path_or_default
end
def has_yaml_errors?
@@ -669,8 +676,10 @@ module Ci
messages.select(&:error?)
end
- def warning_messages
- messages.select(&:warning?)
+ def warning_messages(limit: nil)
+ messages.select(&:warning?).tap do |warnings|
+ break warnings.take(limit) if limit
+ end
end
# Manually set the notes for a Ci::Pipeline
@@ -766,6 +775,7 @@ module Ci
variables.append(key: 'CI_COMMIT_TITLE', value: git_commit_full_title.to_s)
variables.append(key: 'CI_COMMIT_DESCRIPTION', value: git_commit_description.to_s)
variables.append(key: 'CI_COMMIT_REF_PROTECTED', value: (!!protected_ref?).to_s)
+ variables.append(key: 'CI_COMMIT_TIMESTAMP', value: git_commit_timestamp.to_s)
# legacy variables
variables.append(key: 'CI_BUILD_REF', value: sha)
@@ -810,11 +820,17 @@ module Ci
all_merge_requests.order(id: :desc)
end
- # If pipeline is a child of another pipeline, include the parent
- # and the siblings, otherwise return only itself and children.
def same_family_pipeline_ids
- parent = parent_pipeline || self
- [parent.id] + parent.child_pipelines.pluck(:id)
+ if ::Gitlab::Ci::Features.child_of_child_pipeline_enabled?(project)
+ ::Gitlab::Ci::PipelineObjectHierarchy.new(
+ base_and_ancestors(same_project: true), options: { same_project: true }
+ ).base_and_descendants.select(:id)
+ else
+ # If pipeline is a child of another pipeline, include the parent
+ # and the siblings, otherwise return only itself and children.
+ parent = parent_pipeline || self
+ [parent.id] + parent.child_pipelines.pluck(:id)
+ end
end
def bridge_triggered?
@@ -858,12 +874,26 @@ module Ci
builds.latest.with_reports(reports_scope)
end
+ def builds_with_coverage
+ builds.with_coverage
+ end
+
def has_reports?(reports_scope)
complete? && latest_report_builds(reports_scope).exists?
end
+ def has_coverage_reports?
+ pipeline_artifacts&.has_code_coverage?
+ end
+
+ def can_generate_coverage_reports?
+ has_reports?(Ci::JobArtifact.coverage_reports)
+ end
+
def test_report_summary
- Gitlab::Ci::Reports::TestReportSummary.new(latest_builds_report_results)
+ strong_memoize(:test_report_summary) do
+ Gitlab::Ci::Reports::TestReportSummary.new(latest_builds_report_results)
+ end
end
def test_reports
@@ -1008,7 +1038,11 @@ module Ci
end
def cacheable?
- Ci::PipelineEnums.ci_config_sources.key?(config_source.to_sym)
+ !dangling?
+ end
+
+ def dangling?
+ Enums::Ci::Pipeline.dangling_sources.key?(source.to_sym)
end
def source_ref_path
@@ -1029,6 +1063,26 @@ module Ci
self.ci_ref = Ci::Ref.ensure_for(self)
end
+ def base_and_ancestors(same_project: false)
+ # Without using `unscoped`, caller scope is also included into the query.
+ # Using `unscoped` here will be redundant after Rails 6.1
+ ::Gitlab::Ci::PipelineObjectHierarchy
+ .new(self.class.unscoped.where(id: id), options: { same_project: same_project })
+ .base_and_ancestors
+ end
+
+ # We need `base_and_ancestors` in a specific order to "break" when needed.
+ # If we use `find_each`, then the order is broken.
+ # rubocop:disable Rails/FindEach
+ def reset_ancestor_bridges!
+ base_and_ancestors.includes(:source_bridge).each do |pipeline|
+ break unless pipeline.bridge_waiting?
+
+ pipeline.source_bridge.pending!
+ end
+ end
+ # rubocop:enable Rails/FindEach
+
private
def add_message(severity, content)
diff --git a/app/models/ci/pipeline_artifact.rb b/app/models/ci/pipeline_artifact.rb
index e7f51977ccd..b6db8cad667 100644
--- a/app/models/ci/pipeline_artifact.rb
+++ b/app/models/ci/pipeline_artifact.rb
@@ -5,33 +5,44 @@
module Ci
class PipelineArtifact < ApplicationRecord
extend Gitlab::Ci::Model
+ include UpdateProjectStatistics
include Artifactable
include FileStoreMounter
-
- FILE_STORE_SUPPORTED = [
- ObjectStorage::Store::LOCAL,
- ObjectStorage::Store::REMOTE
- ].freeze
+ include Presentable
FILE_SIZE_LIMIT = 10.megabytes.freeze
+ EXPIRATION_DATE = 1.week.freeze
+
+ DEFAULT_FILE_NAMES = {
+ code_coverage: 'code_coverage.json'
+ }.freeze
belongs_to :project, class_name: "Project", inverse_of: :pipeline_artifacts
belongs_to :pipeline, class_name: "Ci::Pipeline", inverse_of: :pipeline_artifacts
validates :pipeline, :project, :file_format, :file, presence: true
- validates :file_store, presence: true, inclusion: { in: FILE_STORE_SUPPORTED }
+ validates :file_store, presence: true, inclusion: { in: ObjectStorage::SUPPORTED_STORES }
validates :size, presence: true, numericality: { less_than_or_equal_to: FILE_SIZE_LIMIT }
validates :file_type, presence: true
mount_file_store_uploader Ci::PipelineArtifactUploader
- before_save :set_size, if: :file_changed?
+
+ update_project_statistics project_statistics_name: :pipeline_artifacts_size
enum file_type: {
code_coverage: 1
}
- def set_size
- self.size = file.size
+ def self.has_code_coverage?
+ where(file_type: :code_coverage).exists?
+ end
+
+ def self.find_with_code_coverage
+ find_by(file_type: :code_coverage)
+ end
+
+ def present
+ super(presenter_class: "Ci::PipelineArtifacts::#{self.file_type.camelize}Presenter".constantize)
end
end
end
diff --git a/app/models/ci/pipeline_enums.rb b/app/models/ci/pipeline_enums.rb
deleted file mode 100644
index 9d108ff0fa4..00000000000
--- a/app/models/ci/pipeline_enums.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- module PipelineEnums
- # Returns the `Hash` to use for creating the `failure_reason` enum for
- # `Ci::Pipeline`.
- def self.failure_reasons
- {
- unknown_failure: 0,
- config_error: 1,
- external_validation_failure: 2
- }
- end
-
- # Returns the `Hash` to use for creating the `sources` enum for
- # `Ci::Pipeline`.
- def self.sources
- {
- unknown: nil,
- push: 1,
- web: 2,
- trigger: 3,
- schedule: 4,
- api: 5,
- external: 6,
- # TODO: Rename `pipeline` to `cross_project_pipeline` in 13.0
- # https://gitlab.com/gitlab-org/gitlab/issues/195991
- pipeline: 7,
- chat: 8,
- webide: 9,
- merge_request_event: 10,
- external_pull_request_event: 11,
- parent_pipeline: 12,
- ondemand_dast_scan: 13
- }
- end
-
- # Returns the `Hash` to use for creating the `config_sources` enum for
- # `Ci::Pipeline`.
- def self.config_sources
- {
- unknown_source: nil,
- repository_source: 1,
- auto_devops_source: 2,
- webide_source: 3,
- remote_source: 4,
- external_project_source: 5,
- bridge_source: 6,
- parameter_source: 7
- }
- end
-
- def self.ci_config_sources
- config_sources.slice(
- :unknown_source,
- :repository_source,
- :auto_devops_source,
- :remote_source,
- :external_project_source
- )
- end
-
- def self.ci_config_sources_values
- ci_config_sources.values
- end
-
- def self.non_ci_config_source_values
- config_sources.values - ci_config_sources.values
- end
- end
-end
-
-Ci::PipelineEnums.prepend_if_ee('EE::Ci::PipelineEnums')
diff --git a/app/models/ci/ref.rb b/app/models/ci/ref.rb
index 3d8823728e7..6e9b8416c10 100644
--- a/app/models/ci/ref.rb
+++ b/app/models/ci/ref.rb
@@ -33,8 +33,6 @@ module Ci
state :still_failing, value: 5
after_transition any => [:fixed, :success] do |ci_ref|
- next unless ::Gitlab::Ci::Features.keep_latest_artifacts_for_ref_enabled?(ci_ref.project)
-
ci_ref.run_after_commit do
Ci::PipelineSuccessUnlockArtifactsWorker.perform_async(ci_ref.last_finished_pipeline_id)
end
diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb
index 00ee45740bd..86879b9dc68 100644
--- a/app/models/ci/runner.rb
+++ b/app/models/ci/runner.rb
@@ -52,7 +52,7 @@ module Ci
has_many :runner_namespaces, inverse_of: :runner
has_many :groups, through: :runner_namespaces
- has_one :last_build, ->() { order('id DESC') }, class_name: 'Ci::Build'
+ has_one :last_build, -> { order('id DESC') }, class_name: 'Ci::Build'
before_save :ensure_token
diff --git a/app/models/ci_platform_metric.rb b/app/models/ci_platform_metric.rb
new file mode 100644
index 00000000000..5e6e3eddce9
--- /dev/null
+++ b/app/models/ci_platform_metric.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+class CiPlatformMetric < ApplicationRecord
+ include BulkInsertSafe
+
+ PLATFORM_TARGET_MAX_LENGTH = 255
+
+ validates :recorded_at, presence: true
+ validates :platform_target,
+ exclusion: [nil], # allow '' (the empty string), but not nil
+ length: { maximum: PLATFORM_TARGET_MAX_LENGTH }
+ validates :count,
+ presence: true,
+ numericality: { only_integer: true, greater_than: 0 }
+
+ CI_VARIABLE_KEY = 'AUTO_DEVOPS_PLATFORM_TARGET'
+ ALLOWED_TARGETS = %w[ECS FARGATE].freeze
+
+ def self.insert_auto_devops_platform_targets!
+ recorded_at = Time.zone.now
+
+ # This work can NOT be done in-database because value is encrypted.
+ # However, for 'AUTO_DEVOPS_PLATFORM_TARGET', these values are only
+ # encrypted as a matter of course, rather than as a need for secrecy.
+ # So this is not a security risk, but exposing other keys possibly could be.
+ variables = Ci::Variable.by_key(CI_VARIABLE_KEY)
+
+ counts = variables.group_by(&:value).map do |value, variables|
+ # While this value is, in theory, not secret. A user could accidentally
+ # put a secret in here so we need to make sure we filter invalid values.
+ next unless ALLOWED_TARGETS.include?(value)
+
+ count = variables.count
+ self.new(recorded_at: recorded_at, platform_target: value, count: count)
+ end.compact
+
+ bulk_insert!(counts, validate: true)
+ end
+end
diff --git a/app/models/clusters/agent.rb b/app/models/clusters/agent.rb
index c21759a3c3b..874415e7bf4 100644
--- a/app/models/clusters/agent.rb
+++ b/app/models/clusters/agent.rb
@@ -8,6 +8,8 @@ module Clusters
has_many :agent_tokens, class_name: 'Clusters::AgentToken'
+ scope :with_name, -> (name) { where(name: name) }
+
validates :name,
presence: true,
length: { maximum: 63 },
diff --git a/app/models/clusters/applications/prometheus.rb b/app/models/clusters/applications/prometheus.rb
index 216bbbc1c5a..dd6a4144608 100644
--- a/app/models/clusters/applications/prometheus.rb
+++ b/app/models/clusters/applications/prometheus.rb
@@ -5,7 +5,7 @@ module Clusters
class Prometheus < ApplicationRecord
include PrometheusAdapter
- VERSION = '9.5.2'
+ VERSION = '10.4.1'
self.table_name = 'clusters_applications_prometheus'
@@ -106,7 +106,9 @@ module Clusters
proxy_url = kube_client.proxy_url('service', service_name, service_port, Gitlab::Kubernetes::Helm::NAMESPACE)
# ensures headers containing auth data are appended to original k8s client options
- options = kube_client.rest_client.options.merge(headers: kube_client.headers)
+ options = kube_client.rest_client.options
+ .merge(prometheus_client_default_options)
+ .merge(headers: kube_client.headers)
Gitlab::PrometheusClient.new(proxy_url, options)
rescue Kubeclient::HttpError, Errno::ECONNRESET, Errno::ECONNREFUSED, Errno::ENETUNREACH
# If users have mistakenly set parameters or removed the depended clusters,
diff --git a/app/models/clusters/applications/runner.rb b/app/models/clusters/applications/runner.rb
index e99ed03852a..4983de83800 100644
--- a/app/models/clusters/applications/runner.rb
+++ b/app/models/clusters/applications/runner.rb
@@ -3,7 +3,7 @@
module Clusters
module Applications
class Runner < ApplicationRecord
- VERSION = '0.19.3'
+ VERSION = '0.20.1'
self.table_name = 'clusters_applications_runners'
diff --git a/app/models/clusters/cluster.rb b/app/models/clusters/cluster.rb
index 63aebdf1bdb..b94ec3c6dea 100644
--- a/app/models/clusters/cluster.rb
+++ b/app/models/clusters/cluster.rb
@@ -99,6 +99,7 @@ module Clusters
delegate :available?, to: :application_ingress, prefix: true, allow_nil: true
delegate :available?, to: :application_prometheus, prefix: true, allow_nil: true
delegate :available?, to: :application_knative, prefix: true, allow_nil: true
+ delegate :available?, to: :application_elastic_stack, prefix: true, allow_nil: true
delegate :external_ip, to: :application_ingress, prefix: true, allow_nil: true
delegate :external_hostname, to: :application_ingress, prefix: true, allow_nil: true
diff --git a/app/models/clusters/instance.rb b/app/models/clusters/instance.rb
index 8c9d9ab9ab1..94fadace01c 100644
--- a/app/models/clusters/instance.rb
+++ b/app/models/clusters/instance.rb
@@ -7,7 +7,7 @@ module Clusters
end
def feature_available?(feature)
- ::Feature.enabled?(feature, default_enabled: true)
+ ::Feature.enabled?(feature, type: :licensed, default_enabled: true)
end
def flipper_id
diff --git a/app/models/clusters/providers/aws.rb b/app/models/clusters/providers/aws.rb
index 86869361ed8..35e8b751b3d 100644
--- a/app/models/clusters/providers/aws.rb
+++ b/app/models/clusters/providers/aws.rb
@@ -37,7 +37,7 @@ module Clusters
greater_than: 0
}
- validates :key_name, :region, :instance_type, :security_group_id, length: { in: 1..255 }
+ validates :kubernetes_version, :key_name, :region, :instance_type, :security_group_id, length: { in: 1..255 }
validates :subnet_ids, presence: true
def nullify_credentials
diff --git a/app/models/commit.rb b/app/models/commit.rb
index 4f18ece9e50..5e0fceb23a4 100644
--- a/app/models/commit.rb
+++ b/app/models/commit.rb
@@ -221,12 +221,16 @@ class Commit
description.present?
end
+ def timestamp
+ committed_date.xmlschema
+ end
+
def hook_attrs(with_changed_files: false)
data = {
id: id,
message: safe_message,
title: title,
- timestamp: committed_date.xmlschema,
+ timestamp: timestamp,
url: Gitlab::UrlBuilder.build(self),
author: {
name: author_name,
diff --git a/app/models/commit_status.rb b/app/models/commit_status.rb
index 8aba74bedbc..2f0596c93cc 100644
--- a/app/models/commit_status.rb
+++ b/app/models/commit_status.rb
@@ -32,6 +32,8 @@ class CommitStatus < ApplicationRecord
where(allow_failure: true, status: [:failed, :canceled])
end
+ scope :order_id_desc, -> { order('ci_builds.id DESC') }
+
scope :exclude_ignored, -> do
# We want to ignore failed but allowed to fail jobs.
#
@@ -77,9 +79,9 @@ class CommitStatus < ApplicationRecord
merge(or_conditions)
end
- # We use `CommitStatusEnums.failure_reasons` here so that EE can more easily
+ # We use `Enums::CommitStatus.failure_reasons` here so that EE can more easily
# extend this `Hash` with new values.
- enum_with_nil failure_reason: ::CommitStatusEnums.failure_reasons
+ enum_with_nil failure_reason: Enums::CommitStatus.failure_reasons
##
# We still create some CommitStatuses outside of CreatePipelineService.
@@ -199,7 +201,14 @@ class CommitStatus < ApplicationRecord
end
def group_name
- name.to_s.gsub(%r{\d+[\s:/\\]+\d+\s*}, '').strip
+ # 'rspec:linux: 1/10' => 'rspec:linux'
+ common_name = name.to_s.gsub(%r{\d+[\s:\/\\]+\d+\s*}, '')
+
+ # 'rspec:linux: [aws, max memory]' => 'rspec:linux'
+ common_name.gsub!(%r{: \[.*, .*\]\s*\z}, '')
+
+ common_name.strip!
+ common_name
end
def failed_but_allowed?
diff --git a/app/models/commit_status_enums.rb b/app/models/commit_status_enums.rb
deleted file mode 100644
index ad90929b8fa..00000000000
--- a/app/models/commit_status_enums.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-module CommitStatusEnums
- # Returns the Hash to use for creating the `failure_reason` enum for
- # `CommitStatus`.
- def self.failure_reasons
- {
- unknown_failure: nil,
- script_failure: 1,
- api_failure: 2,
- stuck_or_timeout_failure: 3,
- runner_system_failure: 4,
- missing_dependency_failure: 5,
- runner_unsupported: 6,
- stale_schedule: 7,
- job_execution_timeout: 8,
- archived_failure: 9,
- unmet_prerequisites: 10,
- scheduler_failure: 11,
- data_integrity_failure: 12,
- forward_deployment_failure: 13,
- insufficient_bridge_permissions: 1_001,
- downstream_bridge_project_not_found: 1_002,
- invalid_bridge_trigger: 1_003,
- bridge_pipeline_is_child_pipeline: 1_006,
- downstream_pipeline_creation_failed: 1_007,
- secrets_provider_not_found: 1_008
- }
- end
-end
-
-CommitStatusEnums.prepend_if_ee('EE::CommitStatusEnums')
diff --git a/app/models/concerns/admin_changed_password_notifier.rb b/app/models/concerns/admin_changed_password_notifier.rb
new file mode 100644
index 00000000000..f6c2abc7e0f
--- /dev/null
+++ b/app/models/concerns/admin_changed_password_notifier.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+module AdminChangedPasswordNotifier
+ # This module is responsible for triggering the `Password changed by administrator` emails
+ # when a GitLab administrator changes the password of another user.
+
+ # Usage
+ # These emails are disabled by default and are never trigerred after updating the password, unless
+ # explicitly specified.
+
+ # To explicitly trigger this email, the `send_only_admin_changed_your_password_notification!`
+ # method should be called, so like:
+
+ # user = User.find_by(email: 'hello@example.com')
+ # user.send_only_admin_changed_your_password_notification!
+ # user.password = user.password_confirmation = 'new_password'
+ # user.save!
+
+ # The `send_only_admin_changed_your_password_notification` has 2 responsibilities.
+ # It prevents triggering Devise's default `Password changed` email.
+ # It trigggers the `Password changed by administrator` email.
+
+ # It is important to skip sending the default Devise email when sending out `Password changed by administrator`
+ # email because we should not be sending 2 emails for the same event,
+ # hence the only public API made available from this module is `send_only_admin_changed_your_password_notification!`
+
+ # There is no public API made available to send the `Password changed by administrator` email,
+ # *without* skipping the default `Password changed` email, to prevent the problem mentioned above.
+
+ extend ActiveSupport::Concern
+
+ included do
+ after_update :send_admin_changed_your_password_notification, if: :send_admin_changed_your_password_notification?
+ end
+
+ def initialize(*args, &block)
+ @allow_admin_changed_your_password_notification = false # These emails are off by default
+ super
+ end
+
+ def send_only_admin_changed_your_password_notification!
+ skip_password_change_notification! # skip sending the default Devise 'password changed' notification
+ allow_admin_changed_your_password_notification!
+ end
+
+ private
+
+ def send_admin_changed_your_password_notification
+ send_devise_notification(:password_change_by_admin)
+ end
+
+ def allow_admin_changed_your_password_notification!
+ @allow_admin_changed_your_password_notification = true # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+
+ def send_admin_changed_your_password_notification?
+ self.class.send_password_change_notification && saved_change_to_encrypted_password? &&
+ @allow_admin_changed_your_password_notification # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+end
diff --git a/app/models/concerns/bulk_member_access_load.rb b/app/models/concerns/bulk_member_access_load.rb
index 041ed3755e0..f44ad474cd5 100644
--- a/app/models/concerns/bulk_member_access_load.rb
+++ b/app/models/concerns/bulk_member_access_load.rb
@@ -22,7 +22,7 @@ module BulkMemberAccessLoad
end
# Look up only the IDs we need
- resource_ids = resource_ids - access.keys
+ resource_ids -= access.keys
return access if resource_ids.empty?
diff --git a/app/models/concerns/checksummable.rb b/app/models/concerns/checksummable.rb
index 1f76eb87aa5..d6d17bfc604 100644
--- a/app/models/concerns/checksummable.rb
+++ b/app/models/concerns/checksummable.rb
@@ -3,9 +3,13 @@
module Checksummable
extend ActiveSupport::Concern
+ def crc32(data)
+ Zlib.crc32(data)
+ end
+
class_methods do
def hexdigest(path)
- Digest::SHA256.file(path).hexdigest
+ ::Digest::SHA256.file(path).hexdigest
end
end
end
diff --git a/app/models/concerns/ci/artifactable.rb b/app/models/concerns/ci/artifactable.rb
index 54fb9021f2f..24df86dbc3c 100644
--- a/app/models/concerns/ci/artifactable.rb
+++ b/app/models/concerns/ci/artifactable.rb
@@ -4,6 +4,8 @@ module Ci
module Artifactable
extend ActiveSupport::Concern
+ NotSupportedAdapterError = Class.new(StandardError)
+
FILE_FORMAT_ADAPTERS = {
gzip: Gitlab::Ci::Build::Artifacts::Adapters::GzipStream,
raw: Gitlab::Ci::Build::Artifacts::Adapters::RawStream
@@ -15,6 +17,24 @@ module Ci
zip: 2,
gzip: 3
}, _suffix: true
+
+ scope :expired, -> (limit) { where('expire_at < ?', Time.current).limit(limit) }
+ end
+
+ def each_blob(&blk)
+ unless file_format_adapter_class
+ raise NotSupportedAdapterError, 'This file format requires a dedicated adapter'
+ end
+
+ file.open do |stream|
+ file_format_adapter_class.new(stream).each_blob(&blk)
+ end
+ end
+
+ private
+
+ def file_format_adapter_class
+ FILE_FORMAT_ADAPTERS[file_format.to_sym]
end
end
end
diff --git a/app/models/concerns/discussion_on_diff.rb b/app/models/concerns/discussion_on_diff.rb
index 8542c48f366..40891073738 100644
--- a/app/models/concerns/discussion_on_diff.rb
+++ b/app/models/concerns/discussion_on_diff.rb
@@ -13,14 +13,12 @@ module DiscussionOnDiff
:diff_line,
:active?,
:created_at_diff?,
-
to: :first_note
delegate :file_path,
:blob,
:highlighted_diff_lines,
:diff_lines,
-
to: :diff_file,
allow_nil: true
end
diff --git a/app/models/concerns/enums/ci/pipeline.rb b/app/models/concerns/enums/ci/pipeline.rb
new file mode 100644
index 00000000000..f1bc43a12d8
--- /dev/null
+++ b/app/models/concerns/enums/ci/pipeline.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+module Enums
+ module Ci
+ module Pipeline
+ # Returns the `Hash` to use for creating the `failure_reason` enum for
+ # `Ci::Pipeline`.
+ def self.failure_reasons
+ {
+ unknown_failure: 0,
+ config_error: 1,
+ external_validation_failure: 2
+ }
+ end
+
+ # Returns the `Hash` to use for creating the `sources` enum for
+ # `Ci::Pipeline`.
+ def self.sources
+ {
+ unknown: nil,
+ push: 1,
+ web: 2,
+ trigger: 3,
+ schedule: 4,
+ api: 5,
+ external: 6,
+ # TODO: Rename `pipeline` to `cross_project_pipeline` in 13.0
+ # https://gitlab.com/gitlab-org/gitlab/issues/195991
+ pipeline: 7,
+ chat: 8,
+ webide: 9,
+ merge_request_event: 10,
+ external_pull_request_event: 11,
+ parent_pipeline: 12,
+ ondemand_dast_scan: 13
+ }
+ end
+
+ # Dangling sources are those events that generate pipelines for which
+ # we don't want to directly affect the ref CI status.
+ # - when a webide pipeline fails it does not change the ref CI status to failed
+ # - when a child pipeline (from parent_pipeline source) fails it affects its
+ # parent pipeline. It's up to the parent to affect the ref CI status
+ # - when an ondemand_dast_scan pipeline runs it is for testing purpose and should
+ # not affect the ref CI status.
+ def self.dangling_sources
+ sources.slice(:webide, :parent_pipeline, :ondemand_dast_scan)
+ end
+
+ # CI sources are those pipeline events that affect the CI status of the ref
+ # they run for. By definition it excludes dangling pipelines.
+ def self.ci_sources
+ sources.except(*dangling_sources.keys)
+ end
+
+ # Returns the `Hash` to use for creating the `config_sources` enum for
+ # `Ci::Pipeline`.
+ def self.config_sources
+ {
+ unknown_source: nil,
+ repository_source: 1,
+ auto_devops_source: 2,
+ webide_source: 3,
+ remote_source: 4,
+ external_project_source: 5,
+ bridge_source: 6,
+ parameter_source: 7
+ }
+ end
+ end
+ end
+end
+
+Enums::Ci::Pipeline.prepend_if_ee('EE::Enums::Ci::Pipeline')
diff --git a/app/models/concerns/enums/commit_status.rb b/app/models/concerns/enums/commit_status.rb
new file mode 100644
index 00000000000..faeed7276ab
--- /dev/null
+++ b/app/models/concerns/enums/commit_status.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module Enums
+ module CommitStatus
+ # Returns the Hash to use for creating the `failure_reason` enum for
+ # `CommitStatus`.
+ def self.failure_reasons
+ {
+ unknown_failure: nil,
+ script_failure: 1,
+ api_failure: 2,
+ stuck_or_timeout_failure: 3,
+ runner_system_failure: 4,
+ missing_dependency_failure: 5,
+ runner_unsupported: 6,
+ stale_schedule: 7,
+ job_execution_timeout: 8,
+ archived_failure: 9,
+ unmet_prerequisites: 10,
+ scheduler_failure: 11,
+ data_integrity_failure: 12,
+ forward_deployment_failure: 13,
+ insufficient_bridge_permissions: 1_001,
+ downstream_bridge_project_not_found: 1_002,
+ invalid_bridge_trigger: 1_003,
+ bridge_pipeline_is_child_pipeline: 1_006, # not used anymore, but cannot be deleted because of old data
+ downstream_pipeline_creation_failed: 1_007,
+ secrets_provider_not_found: 1_008,
+ reached_max_descendant_pipelines_depth: 1_009
+ }
+ end
+ end
+end
+
+Enums::CommitStatus.prepend_if_ee('EE::Enums::CommitStatus')
diff --git a/app/models/internal_id_enums.rb b/app/models/concerns/enums/internal_id.rb
index 125ae7573b6..2d51d232e93 100644
--- a/app/models/internal_id_enums.rb
+++ b/app/models/concerns/enums/internal_id.rb
@@ -1,9 +1,10 @@
# frozen_string_literal: true
-module InternalIdEnums
- def self.usage_resources
- # when adding new resource, make sure it doesn't conflict with EE usage_resources
- {
+module Enums
+ module InternalId
+ def self.usage_resources
+ # when adding new resource, make sure it doesn't conflict with EE usage_resources
+ {
issues: 0,
merge_requests: 1,
deployments: 2,
@@ -14,8 +15,9 @@ module InternalIdEnums
operations_user_lists: 7,
alert_management_alerts: 8,
sprints: 9 # iterations
- }
+ }
+ end
end
end
-InternalIdEnums.prepend_if_ee('EE::InternalIdEnums')
+Enums::InternalId.prepend_if_ee('EE::Enums::InternalId')
diff --git a/app/models/concerns/enums/prometheus_metric.rb b/app/models/concerns/enums/prometheus_metric.rb
new file mode 100644
index 00000000000..e65a01990a3
--- /dev/null
+++ b/app/models/concerns/enums/prometheus_metric.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+module Enums
+ module PrometheusMetric
+ def self.groups
+ {
+ # built-in groups
+ nginx_ingress_vts: -1,
+ ha_proxy: -2,
+ aws_elb: -3,
+ nginx: -4,
+ kubernetes: -5,
+ nginx_ingress: -6,
+ cluster_health: -100
+ }.merge(custom_groups).freeze
+ end
+
+ # custom/user groups
+ def self.custom_groups
+ {
+ business: 0,
+ response: 1,
+ system: 2,
+ custom: 3
+ }.freeze
+ end
+
+ def self.group_details
+ {
+ # built-in groups
+ nginx_ingress_vts: {
+ group_title: _('Response metrics (NGINX Ingress VTS)'),
+ required_metrics: %w(nginx_upstream_responses_total nginx_upstream_response_msecs_avg),
+ priority: 10
+ }.freeze,
+ nginx_ingress: {
+ group_title: _('Response metrics (NGINX Ingress)'),
+ required_metrics: %w(nginx_ingress_controller_requests nginx_ingress_controller_ingress_upstream_latency_seconds_sum),
+ priority: 10
+ }.freeze,
+ ha_proxy: {
+ group_title: _('Response metrics (HA Proxy)'),
+ required_metrics: %w(haproxy_frontend_http_requests_total haproxy_frontend_http_responses_total),
+ priority: 10
+ }.freeze,
+ aws_elb: {
+ group_title: _('Response metrics (AWS ELB)'),
+ required_metrics: %w(aws_elb_request_count_sum aws_elb_latency_average aws_elb_httpcode_backend_5_xx_sum),
+ priority: 10
+ }.freeze,
+ nginx: {
+ group_title: _('Response metrics (NGINX)'),
+ required_metrics: %w(nginx_server_requests nginx_server_requestMsec),
+ priority: 10
+ }.freeze,
+ kubernetes: {
+ group_title: _('System metrics (Kubernetes)'),
+ required_metrics: %w(container_memory_usage_bytes container_cpu_usage_seconds_total),
+ priority: 5
+ }.freeze,
+ cluster_health: {
+ group_title: _('Cluster Health'),
+ required_metrics: %w(container_memory_usage_bytes container_cpu_usage_seconds_total),
+ priority: 10
+ }.freeze
+ }.merge(custom_group_details).freeze
+ end
+
+ # custom/user groups
+ def self.custom_group_details
+ {
+ business: {
+ group_title: _('Business metrics (Custom)'),
+ priority: 0
+ }.freeze,
+ response: {
+ group_title: _('Response metrics (Custom)'),
+ priority: -5
+ }.freeze,
+ system: {
+ group_title: _('System metrics (Custom)'),
+ priority: -10
+ }.freeze,
+ custom: {
+ group_title: _('Custom metrics'),
+ priority: 0
+ }
+ }.freeze
+ end
+ end
+end
diff --git a/app/models/concerns/from_except.rb b/app/models/concerns/from_except.rb
new file mode 100644
index 00000000000..b9ca9dda4b0
--- /dev/null
+++ b/app/models/concerns/from_except.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module FromExcept
+ extend ActiveSupport::Concern
+
+ class_methods do
+ # Produces a query that uses a FROM to select data using an EXCEPT.
+ #
+ # Example:
+ # groups = Group.from_except([group1.self_and_hierarchy, group2.self_and_hierarchy])
+ #
+ # This would produce the following SQL query:
+ #
+ # SELECT *
+ # FROM (
+ # SELECT "namespaces". *
+ # ...
+ #
+ # EXCEPT
+ #
+ # SELECT "namespaces". *
+ # ...
+ # ) groups;
+ #
+ # members - An Array of ActiveRecord::Relation objects to use in the except.
+ #
+ # remove_duplicates - A boolean indicating if duplicate entries should be
+ # removed. Defaults to true.
+ #
+ # alias_as - The alias to use for the sub query. Defaults to the name of the
+ # table of the current model.
+ # rubocop: disable Gitlab/Except
+ extend FromSetOperator
+ define_set_operator Gitlab::SQL::Except
+ # rubocop: enable Gitlab/Except
+ end
+end
diff --git a/app/models/concerns/from_intersect.rb b/app/models/concerns/from_intersect.rb
new file mode 100644
index 00000000000..428e63eb45e
--- /dev/null
+++ b/app/models/concerns/from_intersect.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module FromIntersect
+ extend ActiveSupport::Concern
+
+ class_methods do
+ # Produces a query that uses a FROM to select data using an INTERSECT.
+ #
+ # Example:
+ # groups = Group.from_intersect([group1.self_and_hierarchy, group2.self_and_hierarchy])
+ #
+ # This would produce the following SQL query:
+ #
+ # SELECT *
+ # FROM (
+ # SELECT "namespaces". *
+ # ...
+ #
+ # INTERSECT
+ #
+ # SELECT "namespaces". *
+ # ...
+ # ) groups;
+ #
+ # members - An Array of ActiveRecord::Relation objects to use in the intersect.
+ #
+ # remove_duplicates - A boolean indicating if duplicate entries should be
+ # removed. Defaults to true.
+ #
+ # alias_as - The alias to use for the sub query. Defaults to the name of the
+ # table of the current model.
+ # rubocop: disable Gitlab/Intersect
+ extend FromSetOperator
+ define_set_operator Gitlab::SQL::Intersect
+ # rubocop: enable Gitlab/Intersect
+ end
+end
diff --git a/app/models/concerns/from_set_operator.rb b/app/models/concerns/from_set_operator.rb
new file mode 100644
index 00000000000..593fd251c5c
--- /dev/null
+++ b/app/models/concerns/from_set_operator.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module FromSetOperator
+ # Define a high level method to more easily work with the SQL set operations
+ # of UNION, INTERSECT, and EXCEPT as defined by Gitlab::SQL::Union,
+ # Gitlab::SQL::Intersect, and Gitlab::SQL::Except respectively.
+ def define_set_operator(operator)
+ method_name = 'from_' + operator.name.demodulize.downcase
+ method_name = method_name.to_sym
+
+ raise "Trying to redefine method '#{method(method_name)}'" if methods.include?(method_name)
+
+ define_method(method_name) do |members, remove_duplicates: true, alias_as: table_name|
+ operator_sql = operator.new(members, remove_duplicates: remove_duplicates).to_sql
+
+ from(Arel.sql("(#{operator_sql}) #{alias_as}"))
+ end
+ end
+end
diff --git a/app/models/concerns/from_union.rb b/app/models/concerns/from_union.rb
index e28dee34815..e25d603b802 100644
--- a/app/models/concerns/from_union.rb
+++ b/app/models/concerns/from_union.rb
@@ -35,13 +35,29 @@ module FromUnion
# alias_as - The alias to use for the sub query. Defaults to the name of the
# table of the current model.
# rubocop: disable Gitlab/Union
+ extend FromSetOperator
+ define_set_operator Gitlab::SQL::Union
+
+ alias_method :from_union_set_operator, :from_union
def from_union(members, remove_duplicates: true, alias_as: table_name)
+ if Feature.enabled?(:sql_set_operators)
+ from_union_set_operator(members, remove_duplicates: remove_duplicates, alias_as: alias_as)
+ else
+ # The original from_union method.
+ standard_from_union(members, remove_duplicates: remove_duplicates, alias_as: alias_as)
+ end
+ end
+
+ private
+
+ def standard_from_union(members, remove_duplicates: true, alias_as: table_name)
union = Gitlab::SQL::Union
.new(members, remove_duplicates: remove_duplicates)
.to_sql
from(Arel.sql("(#{union}) #{alias_as}"))
end
+
# rubocop: enable Gitlab/Union
end
end
diff --git a/app/models/concerns/has_wiki.rb b/app/models/concerns/has_wiki.rb
index 3e7cb940a62..df7bbe4dc08 100644
--- a/app/models/concerns/has_wiki.rb
+++ b/app/models/concerns/has_wiki.rb
@@ -25,10 +25,6 @@ module HasWiki
wiki.repository_exists?
end
- def after_wiki_activity
- true
- end
-
private
def check_wiki_path_conflict
diff --git a/app/models/concerns/id_in_ordered.rb b/app/models/concerns/id_in_ordered.rb
new file mode 100644
index 00000000000..b89409e6841
--- /dev/null
+++ b/app/models/concerns/id_in_ordered.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+module IdInOrdered
+ extend ActiveSupport::Concern
+
+ included do
+ scope :id_in_ordered, -> (ids) do
+ raise ArgumentError, "ids must be an array of integers" unless ids.is_a?(Enumerable) && ids.all? { |id| id.is_a?(Integer) }
+
+ # No need to sort if no more than 1 and the sorting code doesn't work
+ # with an empty array
+ return id_in(ids) unless ids.count > 1
+
+ id_attribute = arel_table[:id]
+ id_in(ids)
+ .order(
+ Arel.sql("array_position(ARRAY[#{ids.join(',')}], #{id_attribute.relation.name}.#{id_attribute.name})"))
+ end
+ end
+end
diff --git a/app/models/concerns/issuable.rb b/app/models/concerns/issuable.rb
index dd5aedbb760..888e1b384a2 100644
--- a/app/models/concerns/issuable.rb
+++ b/app/models/concerns/issuable.rb
@@ -177,10 +177,41 @@ module Issuable
assignees.count > 1
end
- def supports_weight?
+ def allows_reviewers?
false
end
+ def supports_time_tracking?
+ is_a?(TimeTrackable) && !incident?
+ end
+
+ def supports_severity?
+ incident?
+ end
+
+ def incident?
+ is_a?(Issue) && super
+ end
+
+ def supports_issue_type?
+ is_a?(Issue)
+ end
+
+ def severity
+ return IssuableSeverity::DEFAULT unless incident?
+
+ issuable_severity&.severity || IssuableSeverity::DEFAULT
+ end
+
+ def update_severity(severity)
+ return unless incident?
+
+ severity = severity.to_s.downcase
+ severity = IssuableSeverity::DEFAULT unless IssuableSeverity.severities.key?(severity)
+
+ (issuable_severity || build_issuable_severity(issue_id: id)).update(severity: severity)
+ end
+
private
def description_max_length_for_new_records_is_valid
@@ -377,8 +408,12 @@ module Issuable
Date.today == created_at.to_date
end
+ def created_hours_ago
+ (Time.now.utc.to_i - created_at.utc.to_i) / 3600
+ end
+
def new?
- today? && created_at == updated_at
+ created_hours_ago < 24
end
def open?
diff --git a/app/models/concerns/loaded_in_group_list.rb b/app/models/concerns/loaded_in_group_list.rb
index 79ff82d9f99..e624b9aa356 100644
--- a/app/models/concerns/loaded_in_group_list.rb
+++ b/app/models/concerns/loaded_in_group_list.rb
@@ -54,6 +54,7 @@ module LoadedInGroupList
.where(members[:source_type].eq(Namespace.name))
.where(members[:source_id].eq(namespaces[:id]))
.where(members[:requested_at].eq(nil))
+ .where(members[:access_level].gt(Gitlab::Access::MINIMAL_ACCESS))
end
end
@@ -70,7 +71,7 @@ module LoadedInGroupList
end
def member_count
- @member_count ||= try(:preloaded_member_count) || users.count
+ @member_count ||= try(:preloaded_member_count) || members.count
end
end
diff --git a/app/models/concerns/mentionable/reference_regexes.rb b/app/models/concerns/mentionable/reference_regexes.rb
index f44a674b3c9..307d58a3a3c 100644
--- a/app/models/concerns/mentionable/reference_regexes.rb
+++ b/app/models/concerns/mentionable/reference_regexes.rb
@@ -30,7 +30,7 @@ module Mentionable
def self.external_pattern
strong_memoize(:external_pattern) do
issue_pattern = IssueTrackerService.reference_pattern
- link_patterns = URI.regexp(%w(http https))
+ link_patterns = URI::DEFAULT_PARSER.make_regexp(%w(http https))
reference_pattern(link_patterns, issue_pattern)
end
end
diff --git a/app/models/concerns/milestoneable.rb b/app/models/concerns/milestoneable.rb
index ccb334343ff..b1698bc2ee3 100644
--- a/app/models/concerns/milestoneable.rb
+++ b/app/models/concerns/milestoneable.rb
@@ -51,7 +51,7 @@ module Milestoneable
# Overridden on EE module
#
def supports_milestone?
- respond_to?(:milestone_id)
+ respond_to?(:milestone_id) && !incident?
end
end
diff --git a/app/models/concerns/optimized_issuable_label_filter.rb b/app/models/concerns/optimized_issuable_label_filter.rb
new file mode 100644
index 00000000000..7be4a26d4fa
--- /dev/null
+++ b/app/models/concerns/optimized_issuable_label_filter.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+module OptimizedIssuableLabelFilter
+ def by_label(items)
+ return items unless params.labels?
+
+ return super if Feature.disabled?(:optimized_issuable_label_filter)
+
+ target_model = items.model
+
+ if params.filter_by_no_label?
+ items.where('NOT EXISTS (?)', optimized_any_label_query(target_model))
+ elsif params.filter_by_any_label?
+ items.where('EXISTS (?)', optimized_any_label_query(target_model))
+ else
+ issuables_with_selected_labels(items, target_model)
+ end
+ end
+
+ # Taken from IssuableFinder
+ def count_by_state
+ return super if root_namespace.nil?
+ return super if Feature.disabled?(:optimized_issuable_label_filter)
+
+ count_params = params.merge(state: nil, sort: nil, force_cte: true)
+ finder = self.class.new(current_user, count_params)
+
+ state_counts = finder
+ .execute
+ .reorder(nil)
+ .group(:state_id)
+ .count
+
+ counts = state_counts.transform_keys { |key| count_key(key) }
+
+ counts[:all] = counts.values.sum
+ counts.with_indifferent_access
+ end
+
+ private
+
+ def issuables_with_selected_labels(items, target_model)
+ if root_namespace
+ all_label_ids = find_label_ids(root_namespace)
+ # Found less labels in the DB than we were searching for. Return nothing.
+ return items.none if all_label_ids.size != params.label_names.size
+
+ all_label_ids.each do |label_ids|
+ items = items.where('EXISTS (?)', optimized_label_query_by_label_ids(target_model, label_ids))
+ end
+ else
+ params.label_names.each do |label_name|
+ items = items.where('EXISTS (?)', optimized_label_query_by_label_name(target_model, label_name))
+ end
+ end
+
+ items
+ end
+
+ def find_label_ids(root_namespace)
+ finder_params = {
+ include_subgroups: true,
+ include_ancestor_groups: true,
+ include_descendant_groups: true,
+ group: root_namespace,
+ title: params.label_names
+ }
+
+ LabelsFinder
+ .new(nil, finder_params)
+ .execute(skip_authorization: true)
+ .pluck(:title, :id)
+ .group_by(&:first)
+ .values
+ .map { |labels| labels.map(&:last) }
+ end
+
+ def root_namespace
+ strong_memoize(:root_namespace) do
+ (params.project || params.group)&.root_ancestor
+ end
+ end
+
+ def optimized_any_label_query(target_model)
+ LabelLink
+ .where(target_type: target_model.name)
+ .where(LabelLink.arel_table['target_id'].eq(target_model.arel_table['id']))
+ .limit(1)
+ end
+
+ def optimized_label_query_by_label_ids(target_model, label_ids)
+ LabelLink
+ .where(target_type: target_model.name)
+ .where(LabelLink.arel_table['target_id'].eq(target_model.arel_table['id']))
+ .where(label_id: label_ids)
+ .limit(1)
+ end
+
+ def optimized_label_query_by_label_name(target_model, label_name)
+ LabelLink
+ .joins(:label)
+ .where(target_type: target_model.name)
+ .where(LabelLink.arel_table['target_id'].eq(target_model.arel_table['id']))
+ .where(labels: { name: label_name })
+ .limit(1)
+ end
+end
diff --git a/app/models/concerns/prometheus_adapter.rb b/app/models/concerns/prometheus_adapter.rb
index adb6a59e11c..55c2bf96a94 100644
--- a/app/models/concerns/prometheus_adapter.rb
+++ b/app/models/concerns/prometheus_adapter.rb
@@ -3,6 +3,11 @@
module PrometheusAdapter
extend ActiveSupport::Concern
+ # We should choose more conservative timeouts, but some queries we run are now busting our
+ # default timeouts, which are stricter. We should make those queries faster instead.
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/232786
+ DEFAULT_PROMETHEUS_REQUEST_TIMEOUT_SEC = 60.seconds
+
included do
include ReactiveCaching
@@ -15,6 +20,12 @@ module PrometheusAdapter
raise NotImplementedError
end
+ def prometheus_client_default_options
+ {
+ timeout: DEFAULT_PROMETHEUS_REQUEST_TIMEOUT_SEC
+ }
+ end
+
# This is a light-weight check if a prometheus client is properly configured.
def configured?
raise NotImplemented
diff --git a/app/models/concerns/relative_positioning.rb b/app/models/concerns/relative_positioning.rb
index d1f04609693..3cbc174536c 100644
--- a/app/models/concerns/relative_positioning.rb
+++ b/app/models/concerns/relative_positioning.rb
@@ -27,18 +27,7 @@
#
module RelativePositioning
extend ActiveSupport::Concern
-
- STEPS = 10
- IDEAL_DISTANCE = 2**(STEPS - 1) + 1
-
- MIN_POSITION = Gitlab::Database::MIN_INT_VALUE
- START_POSITION = 0
- MAX_POSITION = Gitlab::Database::MAX_INT_VALUE
-
- MAX_GAP = IDEAL_DISTANCE * 2
- MIN_GAP = 2
-
- NoSpaceLeft = Class.new(StandardError)
+ include ::Gitlab::RelativePositioning
class_methods do
def move_nulls_to_end(objects)
@@ -49,56 +38,10 @@ module RelativePositioning
move_nulls(objects, at_end: false)
end
- # This method takes two integer values (positions) and
- # calculates the position between them. The range is huge as
- # the maximum integer value is 2147483647.
- #
- # We avoid open ranges by clamping the range to [MIN_POSITION, MAX_POSITION].
- #
- # Then we handle one of three cases:
- # - If the gap is too small, we raise NoSpaceLeft
- # - If the gap is larger than MAX_GAP, we place the new position at most
- # IDEAL_DISTANCE from the edge of the gap.
- # - otherwise we place the new position at the midpoint.
- #
- # The new position will always satisfy: pos_before <= midpoint <= pos_after
- #
- # As a precondition, the gap between pos_before and pos_after MUST be >= 2.
- # If the gap is too small, NoSpaceLeft is raised.
- #
- # This class method should only be called by instance methods of this module, which
- # include handling for minimum gap size.
- #
- # @raises NoSpaceLeft
- # @api private
- def position_between(pos_before, pos_after)
- pos_before ||= MIN_POSITION
- pos_after ||= MAX_POSITION
-
- pos_before, pos_after = [pos_before, pos_after].sort
-
- gap_width = pos_after - pos_before
- midpoint = [pos_after - 1, pos_before + (gap_width / 2)].min
-
- if gap_width < MIN_GAP
- raise NoSpaceLeft
- elsif gap_width > MAX_GAP
- if pos_before == MIN_POSITION
- pos_after - IDEAL_DISTANCE
- elsif pos_after == MAX_POSITION
- pos_before + IDEAL_DISTANCE
- else
- midpoint
- end
- else
- midpoint
- end
- end
-
private
# @api private
- def gap_size(object, gaps:, at_end:, starting_from:)
+ def gap_size(context, gaps:, at_end:, starting_from:)
total_width = IDEAL_DISTANCE * gaps
size = if at_end && starting_from + total_width >= MAX_POSITION
(MAX_POSITION - starting_from) / gaps
@@ -108,23 +51,17 @@ module RelativePositioning
IDEAL_DISTANCE
end
- # Shift max elements leftwards if there isn't enough space
return [size, starting_from] if size >= MIN_GAP
- order = at_end ? :desc : :asc
- terminus = object
- .send(:relative_siblings) # rubocop:disable GitlabSecurity/PublicSend
- .where('relative_position IS NOT NULL')
- .order(relative_position: order)
- .first
-
if at_end
- terminus.move_sequence_before(true)
- max_relative_position = terminus.reset.relative_position
+ terminus = context.max_sibling
+ terminus.shift_left
+ max_relative_position = terminus.relative_position
[[(MAX_POSITION - max_relative_position) / gaps, IDEAL_DISTANCE].min, max_relative_position]
else
- terminus.move_sequence_after(true)
- min_relative_position = terminus.reset.relative_position
+ terminus = context.min_sibling
+ terminus.shift_right
+ min_relative_position = terminus.relative_position
[[(min_relative_position - MIN_POSITION) / gaps, IDEAL_DISTANCE].min, min_relative_position]
end
end
@@ -142,8 +79,9 @@ module RelativePositioning
objects = objects.reject(&:relative_position)
return 0 if objects.empty?
- representative = objects.first
- number_of_gaps = objects.size + 1 # 1 at left, one between each, and one at right
+ number_of_gaps = objects.size # 1 to the nearest neighbour, and one between each
+ representative = RelativePositioning.mover.context(objects.first)
+
position = if at_end
representative.max_relative_position
else
@@ -152,16 +90,21 @@ module RelativePositioning
position ||= START_POSITION # If there are no positioned siblings, start from START_POSITION
- gap, position = gap_size(representative, gaps: number_of_gaps, at_end: at_end, starting_from: position)
-
- # Raise if we could not make enough space
- raise NoSpaceLeft if gap < MIN_GAP
+ gap = 0
+ attempts = 10 # consolidate up to 10 gaps to find enough space
+ while gap < 1 && attempts > 0
+ gap, position = gap_size(representative, gaps: number_of_gaps, at_end: at_end, starting_from: position)
+ attempts -= 1
+ end
- indexed = objects.each_with_index.to_a
- starting_from = at_end ? position : position - (gap * number_of_gaps)
+ # Allow placing items next to each other, if we have to.
+ gap = 1 if gap < MIN_GAP
+ delta = at_end ? gap : -gap
+ indexed = (at_end ? objects : objects.reverse).each_with_index
# Some classes are polymorphic, and not all siblings are in the same table.
by_model = indexed.group_by { |pair| pair.first.class }
+ lower_bound, upper_bound = at_end ? [position, MAX_POSITION] : [MIN_POSITION, position]
by_model.each do |model, pairs|
model.transaction do
@@ -169,7 +112,8 @@ module RelativePositioning
# These are known to be integers, one from the DB, and the other
# calculated by us, and thus safe to interpolate
values = batch.map do |obj, i|
- pos = starting_from + gap * (i + 1)
+ desired_pos = position + delta * (i + 1)
+ pos = desired_pos.clamp(lower_bound, upper_bound)
obj.relative_position = pos
"(#{obj.id}, #{pos})"
end.join(', ')
@@ -192,306 +136,68 @@ module RelativePositioning
end
end
- def min_relative_position(&block)
- calculate_relative_position('MIN', &block)
- end
-
- def max_relative_position(&block)
- calculate_relative_position('MAX', &block)
- end
-
- def prev_relative_position(ignoring: nil)
- prev_pos = nil
-
- if self.relative_position
- prev_pos = max_relative_position do |relation|
- relation = relation.id_not_in(ignoring.id) if ignoring.present?
- relation.where('relative_position < ?', self.relative_position)
- end
- end
-
- prev_pos
- end
-
- def next_relative_position(ignoring: nil)
- next_pos = nil
-
- if self.relative_position
- next_pos = min_relative_position do |relation|
- relation = relation.id_not_in(ignoring.id) if ignoring.present?
- relation.where('relative_position > ?', self.relative_position)
- end
- end
-
- next_pos
+ def self.mover
+ ::Gitlab::RelativePositioning::Mover.new(START_POSITION, (MIN_POSITION..MAX_POSITION))
end
def move_between(before, after)
- return move_after(before) unless after
- return move_before(after) unless before
-
- before, after = after, before if after.relative_position < before.relative_position
-
- pos_left = before.relative_position
- pos_right = after.relative_position
+ before, after = [before, after].sort_by(&:relative_position) if before && after
- if pos_right - pos_left < MIN_GAP
- # Not enough room! Make space by shifting all previous elements to the left
- # if there is enough space, else to the right
- gap = after.send(:find_next_gap_before) # rubocop:disable GitlabSecurity/PublicSend
-
- if gap.present?
- after.move_sequence_before(next_gap: gap)
- pos_left -= optimum_delta_for_gap(gap)
- else
- before.move_sequence_after
- pos_right = after.reset.relative_position
- end
- end
-
- new_position = self.class.position_between(pos_left, pos_right)
-
- self.relative_position = new_position
+ RelativePositioning.mover.move(self, before, after)
+ rescue ActiveRecord::QueryCanceled, NoSpaceLeft => e
+ could_not_move(e)
+ raise e
end
def move_after(before = self)
- pos_before = before.relative_position
- pos_after = before.next_relative_position(ignoring: self)
-
- if pos_before == MAX_POSITION || gap_too_small?(pos_after, pos_before)
- gap = before.send(:find_next_gap_after) # rubocop:disable GitlabSecurity/PublicSend
-
- if gap.nil?
- before.move_sequence_before(true)
- pos_before = before.reset.relative_position
- else
- before.move_sequence_after(next_gap: gap)
- pos_after += optimum_delta_for_gap(gap)
- end
- end
-
- self.relative_position = self.class.position_between(pos_before, pos_after)
+ RelativePositioning.mover.move(self, before, nil)
+ rescue ActiveRecord::QueryCanceled, NoSpaceLeft => e
+ could_not_move(e)
+ raise e
end
def move_before(after = self)
- pos_after = after.relative_position
- pos_before = after.prev_relative_position(ignoring: self)
-
- if pos_after == MIN_POSITION || gap_too_small?(pos_before, pos_after)
- gap = after.send(:find_next_gap_before) # rubocop:disable GitlabSecurity/PublicSend
-
- if gap.nil?
- after.move_sequence_after(true)
- pos_after = after.reset.relative_position
- else
- after.move_sequence_before(next_gap: gap)
- pos_before -= optimum_delta_for_gap(gap)
- end
- end
-
- self.relative_position = self.class.position_between(pos_before, pos_after)
+ RelativePositioning.mover.move(self, nil, after)
+ rescue ActiveRecord::QueryCanceled, NoSpaceLeft => e
+ could_not_move(e)
+ raise e
end
def move_to_end
- max_pos = max_relative_position
-
- if max_pos.nil?
- self.relative_position = START_POSITION
- elsif gap_too_small?(max_pos, MAX_POSITION)
- max = relative_siblings.order(Gitlab::Database.nulls_last_order('relative_position', 'DESC')).first
- max.move_sequence_before(true)
- max.reset
- self.relative_position = self.class.position_between(max.relative_position, MAX_POSITION)
- else
- self.relative_position = self.class.position_between(max_pos, MAX_POSITION)
- end
+ RelativePositioning.mover.move_to_end(self)
+ rescue NoSpaceLeft => e
+ could_not_move(e)
+ self.relative_position = MAX_POSITION
+ rescue ActiveRecord::QueryCanceled => e
+ could_not_move(e)
+ raise e
end
def move_to_start
- min_pos = min_relative_position
-
- if min_pos.nil?
- self.relative_position = START_POSITION
- elsif gap_too_small?(min_pos, MIN_POSITION)
- min = relative_siblings.order(Gitlab::Database.nulls_last_order('relative_position', 'ASC')).first
- min.move_sequence_after(true)
- min.reset
- self.relative_position = self.class.position_between(MIN_POSITION, min.relative_position)
- else
- self.relative_position = self.class.position_between(MIN_POSITION, min_pos)
- end
- end
-
- # Moves the sequence before the current item to the middle of the next gap
- # For example, we have
- #
- # 5 . . . . . 11 12 13 14 [15] 16 . 17
- # -----------
- #
- # This moves the sequence [11 12 13 14] to [8 9 10 11], so we have:
- #
- # 5 . . 8 9 10 11 . . . [15] 16 . 17
- # ---------
- #
- # Creating a gap to the left of the current item. We can understand this as
- # dividing the 5 spaces between 5 and 11 into two smaller gaps of 2 and 3.
- #
- # If `include_self` is true, the current item will also be moved, creating a
- # gap to the right of the current item:
- #
- # 5 . . 8 9 10 11 [14] . . . 16 . 17
- # --------------
- #
- # As an optimization, the gap can be precalculated and passed to this method.
- #
- # @api private
- # @raises NoSpaceLeft if the sequence cannot be moved
- def move_sequence_before(include_self = false, next_gap: find_next_gap_before)
- raise NoSpaceLeft unless next_gap.present?
-
- delta = optimum_delta_for_gap(next_gap)
-
- move_sequence(next_gap[:start], relative_position, -delta, include_self)
- end
-
- # Moves the sequence after the current item to the middle of the next gap
- # For example, we have:
- #
- # 8 . 10 [11] 12 13 14 15 . . . . . 21
- # -----------
- #
- # This moves the sequence [12 13 14 15] to [15 16 17 18], so we have:
- #
- # 8 . 10 [11] . . . 15 16 17 18 . . 21
- # -----------
- #
- # Creating a gap to the right of the current item. We can understand this as
- # dividing the 5 spaces between 15 and 21 into two smaller gaps of 3 and 2.
- #
- # If `include_self` is true, the current item will also be moved, creating a
- # gap to the left of the current item:
- #
- # 8 . 10 . . . [14] 15 16 17 18 . . 21
- # ----------------
- #
- # As an optimization, the gap can be precalculated and passed to this method.
- #
- # @api private
- # @raises NoSpaceLeft if the sequence cannot be moved
- def move_sequence_after(include_self = false, next_gap: find_next_gap_after)
- raise NoSpaceLeft unless next_gap.present?
-
- delta = optimum_delta_for_gap(next_gap)
-
- move_sequence(relative_position, next_gap[:start], delta, include_self)
- end
-
- private
-
- def gap_too_small?(pos_a, pos_b)
- return false unless pos_a && pos_b
-
- (pos_a - pos_b).abs < MIN_GAP
- end
-
- # Find the first suitable gap to the left of the current position.
- #
- # Satisfies the relations:
- # - gap[:start] <= relative_position
- # - abs(gap[:start] - gap[:end]) >= MIN_GAP
- # - MIN_POSITION <= gap[:start] <= MAX_POSITION
- # - MIN_POSITION <= gap[:end] <= MAX_POSITION
- #
- # Supposing that the current item is 13, and we have a sequence of items:
- #
- # 1 . . . 5 . . . . 11 12 [13] 14 . . 17
- # ^---------^
- #
- # Then we return: `{ start: 11, end: 5 }`
- #
- # Here start refers to the end of the gap closest to the current item.
- def find_next_gap_before
- items_with_next_pos = scoped_items
- .select('relative_position AS pos, LEAD(relative_position) OVER (ORDER BY relative_position DESC) AS next_pos')
- .where('relative_position <= ?', relative_position)
- .order(relative_position: :desc)
-
- find_next_gap(items_with_next_pos, MIN_POSITION)
- end
-
- # Find the first suitable gap to the right of the current position.
- #
- # Satisfies the relations:
- # - gap[:start] >= relative_position
- # - abs(gap[:start] - gap[:end]) >= MIN_GAP
- # - MIN_POSITION <= gap[:start] <= MAX_POSITION
- # - MIN_POSITION <= gap[:end] <= MAX_POSITION
- #
- # Supposing the current item is 13, and that we have a sequence of items:
- #
- # 9 . . . [13] 14 15 . . . . 20 . . . 24
- # ^---------^
- #
- # Then we return: `{ start: 15, end: 20 }`
- #
- # Here start refers to the end of the gap closest to the current item.
- def find_next_gap_after
- items_with_next_pos = scoped_items
- .select('relative_position AS pos, LEAD(relative_position) OVER (ORDER BY relative_position ASC) AS next_pos')
- .where('relative_position >= ?', relative_position)
- .order(:relative_position)
-
- find_next_gap(items_with_next_pos, MAX_POSITION)
- end
-
- def find_next_gap(items_with_next_pos, end_is_nil)
- gap = self.class
- .from(items_with_next_pos, :items)
- .where('next_pos IS NULL OR ABS(pos::bigint - next_pos::bigint) >= ?', MIN_GAP)
- .limit(1)
- .pluck(:pos, :next_pos)
- .first
-
- return if gap.nil? || gap.first == end_is_nil
-
- { start: gap.first, end: gap.second || end_is_nil }
- end
-
- def optimum_delta_for_gap(gap)
- delta = ((gap[:start] - gap[:end]) / 2.0).abs.ceil
-
- [delta, IDEAL_DISTANCE].min
- end
-
- def move_sequence(start_pos, end_pos, delta, include_self = false)
- relation = include_self ? scoped_items : relative_siblings
-
+ RelativePositioning.mover.move_to_start(self)
+ rescue NoSpaceLeft => e
+ could_not_move(e)
+ self.relative_position = MIN_POSITION
+ rescue ActiveRecord::QueryCanceled => e
+ could_not_move(e)
+ raise e
+ end
+
+ # This method is used during rebalancing - override it to customise the update
+ # logic:
+ def update_relative_siblings(relation, range, delta)
relation
- .where('relative_position BETWEEN ? AND ?', start_pos, end_pos)
+ .where(relative_position: range)
.update_all("relative_position = relative_position + #{delta}")
end
- def calculate_relative_position(calculation)
- # When calculating across projects, this is much more efficient than
- # MAX(relative_position) without the GROUP BY, due to index usage:
- # https://gitlab.com/gitlab-org/gitlab-foss/issues/54276#note_119340977
- relation = scoped_items
- .order(Gitlab::Database.nulls_last_order('position', 'DESC'))
- .group(self.class.relative_positioning_parent_column)
- .limit(1)
-
- relation = yield relation if block_given?
-
- relation
- .pluck(self.class.relative_positioning_parent_column, Arel.sql("#{calculation}(relative_position) AS position"))
- .first&.last
- end
-
- def relative_siblings(relation = scoped_items)
- relation.id_not_in(id)
+ # This method is used to exclude the current self (or another object)
+ # from a relation. Customize this if `id <> :id` is not sufficient
+ def exclude_self(relation, excluded: self)
+ relation.id_not_in(excluded.id)
end
- def scoped_items
- self.class.relative_positioning_query_base(self)
+ # Override if you want to be notified of failures to move
+ def could_not_move(exception)
end
end
diff --git a/app/models/concerns/resolvable_discussion.rb b/app/models/concerns/resolvable_discussion.rb
index 5174ae05d15..3e1e5faee54 100644
--- a/app/models/concerns/resolvable_discussion.rb
+++ b/app/models/concerns/resolvable_discussion.rb
@@ -31,7 +31,6 @@ module ResolvableDiscussion
delegate :resolved_at,
:resolved_by,
:resolved_by_push?,
-
to: :last_resolved_note,
allow_nil: true
end
diff --git a/app/models/concerns/storage/legacy_namespace.rb b/app/models/concerns/storage/legacy_namespace.rb
index 250889fdf8b..71b976c6f11 100644
--- a/app/models/concerns/storage/legacy_namespace.rb
+++ b/app/models/concerns/storage/legacy_namespace.rb
@@ -23,10 +23,22 @@ module Storage
former_parent_full_path = parent_was&.full_path
parent_full_path = parent&.full_path
Gitlab::UploadsTransfer.new.move_namespace(path, former_parent_full_path, parent_full_path)
- Gitlab::PagesTransfer.new.move_namespace(path, former_parent_full_path, parent_full_path)
+
+ if any_project_with_pages_deployed?
+ run_after_commit do
+ Gitlab::PagesTransfer.new.async.move_namespace(path, former_parent_full_path, parent_full_path)
+ end
+ end
else
Gitlab::UploadsTransfer.new.rename_namespace(full_path_before_last_save, full_path)
- Gitlab::PagesTransfer.new.rename_namespace(full_path_before_last_save, full_path)
+
+ if any_project_with_pages_deployed?
+ full_path_was = full_path_before_last_save
+
+ run_after_commit do
+ Gitlab::PagesTransfer.new.async.rename_namespace(full_path_was, full_path)
+ end
+ end
end
# If repositories moved successfully we need to
diff --git a/app/models/concerns/timebox.rb b/app/models/concerns/timebox.rb
index 8927e42dd97..3e2cf9031d0 100644
--- a/app/models/concerns/timebox.rb
+++ b/app/models/concerns/timebox.rb
@@ -75,8 +75,8 @@ module Timebox
scope :within_timeframe, -> (start_date, end_date) do
where('start_date is not NULL or due_date is not NULL')
- .where('start_date is NULL or start_date <= ?', end_date)
- .where('due_date is NULL or due_date >= ?', start_date)
+ .where('start_date is NULL or start_date <= ?', end_date)
+ .where('due_date is NULL or due_date >= ?', start_date)
end
strip_attributes :title
@@ -195,6 +195,10 @@ module Timebox
end
end
+ def weight_available?
+ resource_parent&.feature_available?(:issue_weights)
+ end
+
private
def timebox_format_reference(format = :iid)
diff --git a/app/models/cycle_analytics/level_base.rb b/app/models/cycle_analytics/level_base.rb
index 543349ebf8f..967de9a22b4 100644
--- a/app/models/cycle_analytics/level_base.rb
+++ b/app/models/cycle_analytics/level_base.rb
@@ -2,7 +2,7 @@
module CycleAnalytics
module LevelBase
- STAGES = %i[issue plan code test review staging production].freeze
+ STAGES = %i[issue plan code test review staging].freeze
def all_medians_by_stage
STAGES.each_with_object({}) do |stage_name, medians_per_stage|
diff --git a/app/models/data_list.rb b/app/models/data_list.rb
index 12011cb17f7..2cee3447886 100644
--- a/app/models/data_list.rb
+++ b/app/models/data_list.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
class DataList
- def initialize(batch, data_fields_hash, klass)
- @batch = batch
+ def initialize(batch_ids, data_fields_hash, klass)
+ @batch_ids = batch_ids
@data_fields_hash = data_fields_hash
@klass = klass
end
@@ -13,13 +13,15 @@ class DataList
private
- attr_reader :batch, :data_fields_hash, :klass
+ attr_reader :batch_ids, :data_fields_hash, :klass
def columns
data_fields_hash.keys << 'service_id'
end
def values
- batch.map { |row| data_fields_hash.values << row['id'] }
+ batch_ids.map do |row|
+ data_fields_hash.values << row['id']
+ end
end
end
diff --git a/app/models/deployment.rb b/app/models/deployment.rb
index d6508ffceba..3978620c74d 100644
--- a/app/models/deployment.rb
+++ b/app/models/deployment.rb
@@ -148,7 +148,7 @@ class Deployment < ApplicationRecord
def execute_hooks
deployment_data = Gitlab::DataBuilder::Deployment.build(self)
- project.execute_hooks(deployment_data, :deployment_hooks) if Feature.enabled?(:deployment_webhooks, project, default_enabled: true)
+ project.execute_hooks(deployment_data, :deployment_hooks)
project.execute_services(deployment_data, :deployment_hooks)
end
diff --git a/app/models/design_management/design.rb b/app/models/design_management/design.rb
index deda814d689..57bb250829d 100644
--- a/app/models/design_management/design.rb
+++ b/app/models/design_management/design.rb
@@ -79,16 +79,10 @@ module DesignManagement
joins(join.join_sources).where(actions[:event].not_eq(deletion))
end
- scope :ordered, -> (project) do
- # TODO: Always order by relative position after the feature flag is removed
- # https://gitlab.com/gitlab-org/gitlab/-/issues/34382
- if Feature.enabled?(:reorder_designs, project, default_enabled: true)
- # We need to additionally sort by `id` to support keyset pagination.
- # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/17788/diffs#note_230875678
- order(:relative_position, :id)
- else
- in_creation_order
- end
+ scope :ordered, -> do
+ # We need to additionally sort by `id` to support keyset pagination.
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/17788/diffs#note_230875678
+ order(:relative_position, :id)
end
scope :in_creation_order, -> { reorder(:id) }
diff --git a/app/models/design_management/design_collection.rb b/app/models/design_management/design_collection.rb
index 96d5f4c2419..c48b36588c9 100644
--- a/app/models/design_management/design_collection.rb
+++ b/app/models/design_management/design_collection.rb
@@ -6,8 +6,34 @@ module DesignManagement
delegate :designs, :project, to: :issue
+ state_machine :copy_state, initial: :ready, namespace: :copy do
+ after_transition any => any, do: :update_stored_copy_state!
+
+ event :start do
+ transition ready: :in_progress
+ end
+
+ event :end do
+ transition in_progress: :ready
+ end
+
+ event :error do
+ transition in_progress: :error
+ end
+
+ event :reset do
+ transition any => :ready
+ end
+ end
+
def initialize(issue)
+ super() # Necessary to initialize state_machine
+
@issue = issue
+
+ if stored_copy_state = get_stored_copy_state
+ @copy_state = stored_copy_state
+ end
end
def ==(other)
@@ -30,5 +56,39 @@ module DesignManagement
def designs_by_filename(filenames)
designs.current.where(filename: filenames)
end
+
+ private
+
+ def update_stored_copy_state!
+ # As "ready" is the initial copy state we can clear the cached value
+ # rather than persist it.
+ if copy_ready?
+ unset_store_copy_state!
+ else
+ set_stored_copy_state!
+ end
+ end
+
+ def copy_state_cache_key
+ "DesignCollection/copy_state/issue=#{issue.id}"
+ end
+
+ def get_stored_copy_state
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.get(copy_state_cache_key)
+ end
+ end
+
+ def set_stored_copy_state!
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(copy_state_cache_key, copy_state)
+ end
+ end
+
+ def unset_store_copy_state!
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.del(copy_state_cache_key)
+ end
+ end
end
end
diff --git a/app/models/dev_ops_score/card.rb b/app/models/dev_ops_report/card.rb
index b1894cf4138..4060cb1e5b6 100644
--- a/app/models/dev_ops_score/card.rb
+++ b/app/models/dev_ops_report/card.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-module DevOpsScore
+module DevOpsReport
class Card
attr_accessor :metric, :title, :description, :feature, :blog, :docs
diff --git a/app/models/dev_ops_score/idea_to_production_step.rb b/app/models/dev_ops_report/idea_to_production_step.rb
index d892793cf97..2503d5949e5 100644
--- a/app/models/dev_ops_score/idea_to_production_step.rb
+++ b/app/models/dev_ops_report/idea_to_production_step.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-module DevOpsScore
+module DevOpsReport
class IdeaToProductionStep
attr_accessor :metric, :title, :features
diff --git a/app/models/dev_ops_score/metric.rb b/app/models/dev_ops_report/metric.rb
index a9133128ce9..14eff725433 100644
--- a/app/models/dev_ops_score/metric.rb
+++ b/app/models/dev_ops_report/metric.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-module DevOpsScore
+module DevOpsReport
class Metric < ApplicationRecord
include Presentable
diff --git a/app/models/diff_discussion.rb b/app/models/diff_discussion.rb
index f9e2f00b9f3..6806008d676 100644
--- a/app/models/diff_discussion.rb
+++ b/app/models/diff_discussion.rb
@@ -16,7 +16,6 @@ class DiffDiscussion < Discussion
:diff_note_positions,
:on_text?,
:on_image?,
-
to: :first_note
def legacy_diff_discussion?
diff --git a/app/models/discussion.rb b/app/models/discussion.rb
index adcb2217d85..793cdb5dece 100644
--- a/app/models/discussion.rb
+++ b/app/models/discussion.rb
@@ -24,7 +24,6 @@ class Discussion
:system_note_with_references_visible_for?,
:resource_parent,
:save,
-
to: :first_note
def declarative_policy_delegate
diff --git a/app/models/environment.rb b/app/models/environment.rb
index c6a08c996da..cfdcb0499e6 100644
--- a/app/models/environment.rb
+++ b/app/models/environment.rb
@@ -371,7 +371,7 @@ class Environment < ApplicationRecord
end
def elastic_stack_available?
- !!deployment_platform&.cluster&.application_elastic_stack&.available?
+ !!deployment_platform&.cluster&.application_elastic_stack_available?
end
private
diff --git a/app/models/group.rb b/app/models/group.rb
index f8cbaa2495c..c0f145997cc 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -20,8 +20,10 @@ class Group < Namespace
UpdateSharedRunnersError = Class.new(StandardError)
- has_many :group_members, -> { where(requested_at: nil) }, dependent: :destroy, as: :source # rubocop:disable Cop/ActiveRecordDependent
+ has_many :all_group_members, -> { where(requested_at: nil) }, dependent: :destroy, as: :source, class_name: 'GroupMember' # rubocop:disable Cop/ActiveRecordDependent
+ has_many :group_members, -> { where(requested_at: nil).where.not(members: { access_level: Gitlab::Access::MINIMAL_ACCESS }) }, dependent: :destroy, as: :source # rubocop:disable Cop/ActiveRecordDependent
alias_method :members, :group_members
+
has_many :users, through: :group_members
has_many :owners,
-> { where(members: { access_level: Gitlab::Access::OWNER }) },
@@ -33,6 +35,7 @@ class Group < Namespace
has_many :milestones
has_many :iterations
+ has_many :services
has_many :shared_group_links, foreign_key: :shared_with_group_id, class_name: 'GroupGroupLink'
has_many :shared_with_group_links, foreign_key: :shared_group_id, class_name: 'GroupGroupLink'
has_many :shared_groups, through: :shared_group_links, source: :shared_group
@@ -395,6 +398,10 @@ class Group < Namespace
])
end
+ def users_count
+ members.count
+ end
+
# Returns all users that are members of projects
# belonging to the current group or sub-groups
def project_users_with_descendants
@@ -403,10 +410,17 @@ class Group < Namespace
.where(namespaces: { id: self_and_descendants.select(:id) })
end
- def max_member_access_for_user(user)
+ # Return the highest access level for a user
+ #
+ # A special case is handled here when the user is a GitLab admin
+ # which implies it has "OWNER" access everywhere, but should not
+ # officially appear as a member of a group unless specifically added to it
+ #
+ # @param user [User]
+ # @param only_concrete_membership [Bool] whether require admin concrete membership status
+ def max_member_access_for_user(user, only_concrete_membership: false)
return GroupMember::NO_ACCESS unless user
-
- return GroupMember::OWNER if user.admin?
+ return GroupMember::OWNER if user.admin? && !only_concrete_membership
max_member_access = members_with_parents.where(user_id: user)
.reorder(access_level: :desc)
@@ -630,6 +644,7 @@ class Group < Namespace
.where(group_member_table[:requested_at].eq(nil))
.where(group_member_table[:source_id].eq(group_group_link_table[:shared_with_group_id]))
.where(group_member_table[:source_type].eq('Namespace'))
+ .non_minimal_access
end
def smallest_value_arel(args, column_alias)
diff --git a/app/models/group_deploy_key.rb b/app/models/group_deploy_key.rb
index 160ac28b33b..c65b00a6de0 100644
--- a/app/models/group_deploy_key.rb
+++ b/app/models/group_deploy_key.rb
@@ -8,6 +8,10 @@ class GroupDeployKey < Key
validates :user, presence: true
+ scope :for_groups, ->(group_ids) do
+ joins(:group_deploy_keys_groups).where(group_deploy_keys_groups: { group_id: group_ids }).uniq
+ end
+
def type
'DeployKey'
end
diff --git a/app/models/internal_id.rb b/app/models/internal_id.rb
index 21cf6bfa414..4c0469d849a 100644
--- a/app/models/internal_id.rb
+++ b/app/models/internal_id.rb
@@ -21,7 +21,7 @@ class InternalId < ApplicationRecord
belongs_to :project
belongs_to :namespace
- enum usage: ::InternalIdEnums.usage_resources
+ enum usage: Enums::InternalId.usage_resources
validates :usage, presence: true
diff --git a/app/models/issuable_severity.rb b/app/models/issuable_severity.rb
new file mode 100644
index 00000000000..d68b3dc48ee
--- /dev/null
+++ b/app/models/issuable_severity.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class IssuableSeverity < ApplicationRecord
+ DEFAULT = 'unknown'
+
+ belongs_to :issue
+
+ validates :issue, presence: true, uniqueness: true
+ validates :severity, presence: true
+
+ enum severity: {
+ unknown: 0,
+ low: 1,
+ medium: 2,
+ high: 3,
+ critical: 4
+ }
+end
diff --git a/app/models/issue.rb b/app/models/issue.rb
index a0003df87e1..5a5de371301 100644
--- a/app/models/issue.rb
+++ b/app/models/issue.rb
@@ -18,6 +18,7 @@ class Issue < ApplicationRecord
include MilestoneEventable
include WhereComposite
include StateEventable
+ include IdInOrdered
DueDateStruct = Struct.new(:title, :name).freeze
NoDueDate = DueDateStruct.new('No Due Date', '0').freeze
@@ -29,6 +30,11 @@ class Issue < ApplicationRecord
SORTING_PREFERENCE_FIELD = :issues_sort
+ # Types of issues that should be displayed on lists across the app
+ # for example, project issues list, group issues list and issue boards.
+ # Some issue types, like test cases, should be hidden by default.
+ TYPES_FOR_LIST = %w(issue incident).freeze
+
belongs_to :project
has_one :namespace, through: :project
@@ -59,6 +65,7 @@ class Issue < ApplicationRecord
end
end
+ has_one :issuable_severity
has_one :sentry_issue
has_one :alert_management_alert, class_name: 'AlertManagement::Alert'
has_and_belongs_to_many :self_managed_prometheus_alert_events, join_table: :issues_self_managed_prometheus_alert_events # rubocop: disable Rails/HasAndBelongsToMany
@@ -72,7 +79,8 @@ class Issue < ApplicationRecord
enum issue_type: {
issue: 0,
- incident: 1
+ incident: 1,
+ test_case: 2 ## EE-only
}
alias_attribute :parent_ids, :project_id
@@ -305,6 +313,24 @@ class Issue < ApplicationRecord
end
end
+ def related_issues(current_user, preload: nil)
+ related_issues = ::Issue
+ .select(['issues.*', 'issue_links.id AS issue_link_id',
+ 'issue_links.link_type as issue_link_type_value',
+ 'issue_links.target_id as issue_link_source_id'])
+ .joins("INNER JOIN issue_links ON
+ (issue_links.source_id = issues.id AND issue_links.target_id = #{id})
+ OR
+ (issue_links.target_id = issues.id AND issue_links.source_id = #{id})")
+ .preload(preload)
+ .reorder('issue_link_id')
+
+ cross_project_filter = -> (issues) { issues.where(project: project) }
+ Ability.issues_readable_by_user(related_issues,
+ current_user,
+ filters: { read_cross_project: cross_project_filter })
+ end
+
def can_be_worked_on?
!self.closed? && !self.project.forked?
end
@@ -378,6 +404,15 @@ class Issue < ApplicationRecord
author.id == User.support_bot.id
end
+ def issue_link_type
+ return unless respond_to?(:issue_link_type_value) && respond_to?(:issue_link_source_id)
+
+ type = IssueLink.link_types.key(issue_link_type_value) || IssueLink::TYPE_RELATES_TO
+ return type if issue_link_source_id == id
+
+ IssueLink.inverse_link_type(type)
+ end
+
private
def ensure_metrics
@@ -413,6 +448,11 @@ class Issue < ApplicationRecord
key = Gitlab::Routing.url_helpers.realtime_changes_project_issue_path(project, self)
Gitlab::EtagCaching::Store.new.touch(key)
end
+
+ def could_not_move(exception)
+ # Symptom of running out of space - schedule rebalancing
+ IssueRebalancingWorker.perform_async(nil, project_id)
+ end
end
Issue.prepend_if_ee('EE::Issue')
diff --git a/app/models/issue_link.rb b/app/models/issue_link.rb
new file mode 100644
index 00000000000..9740b009396
--- /dev/null
+++ b/app/models/issue_link.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+class IssueLink < ApplicationRecord
+ include FromUnion
+
+ belongs_to :source, class_name: 'Issue'
+ belongs_to :target, class_name: 'Issue'
+
+ validates :source, presence: true
+ validates :target, presence: true
+ validates :source, uniqueness: { scope: :target_id, message: 'is already related' }
+ validate :check_self_relation
+
+ scope :for_source_issue, ->(issue) { where(source_id: issue.id) }
+ scope :for_target_issue, ->(issue) { where(target_id: issue.id) }
+
+ TYPE_RELATES_TO = 'relates_to'
+ TYPE_BLOCKS = 'blocks'
+ TYPE_IS_BLOCKED_BY = 'is_blocked_by'
+
+ enum link_type: { TYPE_RELATES_TO => 0, TYPE_BLOCKS => 1, TYPE_IS_BLOCKED_BY => 2 }
+
+ def self.inverse_link_type(type)
+ type
+ end
+
+ private
+
+ def check_self_relation
+ return unless source && target
+
+ if source == target
+ errors.add(:source, 'cannot be related to itself')
+ end
+ end
+end
+
+IssueLink.prepend_if_ee('EE::IssueLink')
diff --git a/app/models/iteration.rb b/app/models/iteration.rb
index 3495f099064..d223c80fca0 100644
--- a/app/models/iteration.rb
+++ b/app/models/iteration.rb
@@ -29,6 +29,7 @@ class Iteration < ApplicationRecord
scope :upcoming, -> { with_state(:upcoming) }
scope :started, -> { with_state(:started) }
+ scope :closed, -> { with_state(:closed) }
scope :within_timeframe, -> (start_date, end_date) do
where('start_date is not NULL or due_date is not NULL')
@@ -36,8 +37,8 @@ class Iteration < ApplicationRecord
.where('due_date is NULL or due_date >= ?', start_date)
end
- scope :start_date_passed, -> { where('start_date <= ?', Date.current).where('due_date > ?', Date.current) }
- scope :due_date_passed, -> { where('due_date <= ?', Date.current) }
+ scope :start_date_passed, -> { where('start_date <= ?', Date.current).where('due_date >= ?', Date.current) }
+ scope :due_date_passed, -> { where('due_date < ?', Date.current) }
state_machine :state_enum, initial: :upcoming do
event :start do
@@ -63,9 +64,10 @@ class Iteration < ApplicationRecord
case state
when 'closed' then iterations.closed
when 'started' then iterations.started
+ when 'upcoming' then iterations.upcoming
when 'opened' then iterations.started.or(iterations.upcoming)
when 'all' then iterations
- else iterations.upcoming
+ else raise ArgumentError, "Unknown state filter: #{state}"
end
end
diff --git a/app/models/jira_connect_installation.rb b/app/models/jira_connect_installation.rb
new file mode 100644
index 00000000000..7480800abc3
--- /dev/null
+++ b/app/models/jira_connect_installation.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class JiraConnectInstallation < ApplicationRecord
+ attr_encrypted :shared_secret,
+ mode: :per_attribute_iv,
+ algorithm: 'aes-256-gcm',
+ key: Settings.attr_encrypted_db_key_base_32
+
+ has_many :subscriptions, class_name: 'JiraConnectSubscription'
+
+ validates :client_key, presence: true, uniqueness: true
+ validates :shared_secret, presence: true
+ validates :base_url, presence: true, public_url: true
+
+ scope :for_project, -> (project) {
+ distinct
+ .joins(:subscriptions)
+ .where(jira_connect_subscriptions: {
+ id: JiraConnectSubscription.for_project(project)
+ })
+ }
+end
diff --git a/app/models/jira_connect_subscription.rb b/app/models/jira_connect_subscription.rb
new file mode 100644
index 00000000000..c74f75b2d8e
--- /dev/null
+++ b/app/models/jira_connect_subscription.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+class JiraConnectSubscription < ApplicationRecord
+ belongs_to :installation, class_name: 'JiraConnectInstallation', foreign_key: 'jira_connect_installation_id'
+ belongs_to :namespace
+
+ validates :installation, presence: true
+ validates :namespace, presence: true, uniqueness: { scope: :jira_connect_installation_id, message: 'has already been added' }
+
+ scope :preload_namespace_route, -> { preload(namespace: :route) }
+ scope :for_project, -> (project) { where(namespace_id: project.namespace.self_and_ancestors) }
+end
diff --git a/app/models/jira_import_state.rb b/app/models/jira_import_state.rb
index 2d952c552a8..76b5f1def6a 100644
--- a/app/models/jira_import_state.rb
+++ b/app/models/jira_import_state.rb
@@ -110,10 +110,6 @@ class JiraImportState < ApplicationRecord
)
end
- def self.finished_imports_count
- finished.sum(:imported_issues_count)
- end
-
def mark_as_failed(error_message)
sanitized_message = Gitlab::UrlSanitizer.sanitize(error_message)
diff --git a/app/models/lfs_objects_project.rb b/app/models/lfs_objects_project.rb
index 674294f0916..e5632ff2842 100644
--- a/app/models/lfs_objects_project.rb
+++ b/app/models/lfs_objects_project.rb
@@ -19,6 +19,7 @@ class LfsObjectsProject < ApplicationRecord
}
scope :project_id_in, ->(ids) { where(project_id: ids) }
+ scope :lfs_object_in, -> (lfs_objects) { where(lfs_object: lfs_objects) }
private
diff --git a/app/models/member.rb b/app/models/member.rb
index bbc5d638637..5a084a3a2e6 100644
--- a/app/models/member.rb
+++ b/app/models/member.rb
@@ -25,7 +25,6 @@ class Member < ApplicationRecord
validates :user_id, uniqueness: { scope: [:source_type, :source_id],
message: "already exists in source",
allow_nil: true }
- validates :access_level, inclusion: { in: Gitlab::Access.all_values }, presence: true
validate :higher_access_level_than_group, unless: :importing?
validates :invite_email,
presence: {
@@ -60,6 +59,7 @@ class Member < ApplicationRecord
left_join_users
.where(user_ok)
.where(requested_at: nil)
+ .non_minimal_access
.reorder(nil)
end
@@ -68,6 +68,8 @@ class Member < ApplicationRecord
left_join_users
.where(users: { state: 'active' })
.non_request
+ .non_invite
+ .non_minimal_access
.reorder(nil)
end
@@ -85,6 +87,7 @@ class Member < ApplicationRecord
scope :developers, -> { active.where(access_level: DEVELOPER) }
scope :maintainers, -> { active.where(access_level: MAINTAINER) }
scope :non_guests, -> { where('members.access_level > ?', GUEST) }
+ scope :non_minimal_access, -> { where('members.access_level > ?', MINIMAL_ACCESS) }
scope :owners, -> { active.where(access_level: OWNER) }
scope :owners_and_maintainers, -> { active.where(access_level: [OWNER, MAINTAINER]) }
scope :with_user, -> (user) { where(user: user) }
@@ -161,8 +164,8 @@ class Member < ApplicationRecord
where(user_id: user_ids).has_access.pluck(:user_id, :access_level).to_h
end
- def find_by_invite_token(invite_token)
- invite_token = Devise.token_generator.digest(self, :invite_token, invite_token)
+ def find_by_invite_token(raw_invite_token)
+ invite_token = Devise.token_generator.digest(self, :invite_token, raw_invite_token)
find_by(invite_token: invite_token)
end
@@ -397,6 +400,10 @@ class Member < ApplicationRecord
end
end
+ def invite_to_unknown_user?
+ invite? && user_id.nil?
+ end
+
private
def send_invite
diff --git a/app/models/members/group_member.rb b/app/models/members/group_member.rb
index 8c224dea88f..34958936c9f 100644
--- a/app/models/members/group_member.rb
+++ b/app/models/members/group_member.rb
@@ -13,6 +13,9 @@ class GroupMember < Member
# Make sure group member points only to group as it source
default_value_for :source_type, SOURCE_TYPE
validates :source_type, format: { with: /\ANamespace\z/ }
+ validates :access_level, presence: true
+ validate :access_level_inclusion
+
default_scope { where(source_type: SOURCE_TYPE) } # rubocop:disable Cop/DefaultScope
scope :of_groups, ->(groups) { where(source_id: groups.select(:id)) }
@@ -45,6 +48,12 @@ class GroupMember < Member
private
+ def access_level_inclusion
+ return if access_level.in?(Gitlab::Access.all_values)
+
+ errors.add(:access_level, "is not included in the list")
+ end
+
def send_invite
run_after_commit_or_now { notification_service.invite_group_member(self, @raw_invite_token) }
diff --git a/app/models/members_preloader.rb b/app/models/members_preloader.rb
index 6da8d5f3161..88db7f63bd9 100644
--- a/app/models/members_preloader.rb
+++ b/app/models/members_preloader.rb
@@ -12,6 +12,7 @@ class MembersPreloader
ActiveRecord::Associations::Preloader.new.preload(members, :source)
ActiveRecord::Associations::Preloader.new.preload(members.map(&:user), :status)
ActiveRecord::Associations::Preloader.new.preload(members.map(&:user), :u2f_registrations)
+ ActiveRecord::Associations::Preloader.new.preload(members.map(&:user), :webauthn_registrations)
end
end
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index f4c2d568b4d..3fdc501644d 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -21,10 +21,12 @@ class MergeRequest < ApplicationRecord
include MilestoneEventable
include StateEventable
include ApprovableBase
+ include IdInOrdered
extend ::Gitlab::Utils::Override
sha_attribute :squash_commit_sha
+ sha_attribute :merge_ref_sha
self.reactive_cache_key = ->(model) { [model.project.id, model.iid] }
self.reactive_cache_refresh_interval = 10.minutes
@@ -80,6 +82,8 @@ class MergeRequest < ApplicationRecord
has_many :merge_request_assignees
has_many :assignees, class_name: "User", through: :merge_request_assignees
+ has_many :merge_request_reviewers
+ has_many :reviewers, class_name: "User", through: :merge_request_reviewers
has_many :user_mentions, class_name: "MergeRequestUserMention", dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
has_many :deployment_merge_requests
@@ -105,7 +109,6 @@ class MergeRequest < ApplicationRecord
after_create :ensure_merge_request_diff
after_update :clear_memoized_shas
- after_update :clear_memoized_source_branch_exists
after_update :reload_diff_if_branch_changed
after_commit :ensure_metrics, on: [:create, :update], unless: :importing?
after_commit :expire_etag_cache, unless: :importing?
@@ -250,6 +253,15 @@ class MergeRequest < ApplicationRecord
joins(:notes).where(notes: { commit_id: sha })
end
scope :join_project, -> { joins(:target_project) }
+ scope :join_metrics, -> do
+ query = joins(:metrics)
+
+ if Feature.enabled?(:improved_mr_merged_at_queries, default_enabled: true)
+ query = query.where(MergeRequest.arel_table[:target_project_id].eq(MergeRequest::Metrics.arel_table[:target_project_id]))
+ end
+
+ query
+ end
scope :references_project, -> { references(:target_project) }
scope :with_api_entity_associations, -> {
preload_routables
@@ -263,6 +275,14 @@ class MergeRequest < ApplicationRecord
where("target_branch LIKE ?", ApplicationRecord.sanitize_sql_like(wildcard_branch_name).tr('*', '%'))
end
scope :by_target_branch, ->(branch_name) { where(target_branch: branch_name) }
+ scope :order_merged_at, ->(direction) do
+ query = join_metrics.order(Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', direction))
+
+ # Add `merge_request_metrics.merged_at` to the `SELECT` in order to make the keyset pagination work.
+ query.select(*query.arel.projections, MergeRequest::Metrics.arel_table[:merged_at].as('"merge_request_metrics.merged_at"'))
+ end
+ scope :order_merged_at_asc, -> { order_merged_at('ASC') }
+ scope :order_merged_at_desc, -> { order_merged_at('DESC') }
scope :preload_source_project, -> { preload(:source_project) }
scope :preload_target_project, -> { preload(:target_project) }
scope :preload_routables, -> do
@@ -294,7 +314,7 @@ class MergeRequest < ApplicationRecord
alias_attribute :auto_merge_enabled, :merge_when_pipeline_succeeds
alias_method :issuing_parent, :target_project
- delegate :active?, to: :head_pipeline, prefix: true, allow_nil: true
+ delegate :active?, :builds_with_coverage, to: :head_pipeline, prefix: true, allow_nil: true
delegate :success?, :active?, to: :actual_head_pipeline, prefix: true, allow_nil: true
RebaseLockTimeout = Class.new(StandardError)
@@ -319,6 +339,15 @@ class MergeRequest < ApplicationRecord
.pluck(:target_branch)
end
+ def self.sort_by_attribute(method, excluded_labels: [])
+ case method.to_s
+ when 'merged_at', 'merged_at_asc' then order_merged_at_asc.with_order_id_desc
+ when 'merged_at_desc' then order_merged_at_desc.with_order_id_desc
+ else
+ super
+ end
+ end
+
def rebase_in_progress?
rebase_jid.present? && Gitlab::SidekiqStatus.running?(rebase_jid)
end
@@ -333,7 +362,11 @@ class MergeRequest < ApplicationRecord
def merge_pipeline
return unless merged?
- target_project.pipeline_for(target_branch, merge_commit_sha)
+ # When the merge_method is :merge there will be a merge_commit_sha, however
+ # when it is fast-forward there is no merge commit, so we must fall back to
+ # either the squash commit (if the MR was squashed) or the diff head commit.
+ sha = merge_commit_sha || squash_commit_sha || diff_head_sha
+ target_project.latest_pipeline(target_branch, sha)
end
# Pattern used to extract `!123` merge request references from text
@@ -867,10 +900,6 @@ class MergeRequest < ApplicationRecord
clear_memoization(:target_branch_head)
end
- def clear_memoized_source_branch_exists
- clear_memoization(:source_branch_exists)
- end
-
def reload_diff_if_branch_changed
if (saved_change_to_source_branch? || saved_change_to_target_branch?) &&
(source_branch_head && target_branch_head)
@@ -928,8 +957,9 @@ class MergeRequest < ApplicationRecord
self.class.wip_title(self.title)
end
- def mergeable?(skip_ci_check: false)
- return false unless mergeable_state?(skip_ci_check: skip_ci_check)
+ def mergeable?(skip_ci_check: false, skip_discussions_check: false)
+ return false unless mergeable_state?(skip_ci_check: skip_ci_check,
+ skip_discussions_check: skip_discussions_check)
check_mergeability
@@ -1122,11 +1152,9 @@ class MergeRequest < ApplicationRecord
end
def source_branch_exists?
- strong_memoize(:source_branch_exists) do
- next false unless self.source_project
+ return false unless self.source_project
- self.source_project.repository.branch_exists?(self.source_branch)
- end
+ self.source_project.repository.branch_exists?(self.source_branch)
end
def target_branch_exists?
@@ -1232,6 +1260,8 @@ class MergeRequest < ApplicationRecord
# Returns the current merge-ref HEAD commit.
#
def merge_ref_head
+ return project.repository.commit(merge_ref_sha) if merge_ref_sha
+
project.repository.commit(merge_ref_path)
end
@@ -1345,9 +1375,9 @@ class MergeRequest < ApplicationRecord
end
def has_coverage_reports?
- return false unless Feature.enabled?(:coverage_report_view, project)
+ return false unless Feature.enabled?(:coverage_report_view, project, default_enabled: true)
- actual_head_pipeline&.has_reports?(Ci::JobArtifact.coverage_reports)
+ actual_head_pipeline&.has_coverage_reports?
end
def has_terraform_reports?
@@ -1447,6 +1477,19 @@ class MergeRequest < ApplicationRecord
Commit.truncate_sha(merge_commit_sha) if merge_commit_sha
end
+ def merged_commit_sha
+ return unless merged?
+
+ sha = merge_commit_sha || squash_commit_sha || diff_head_sha
+ sha.presence
+ end
+
+ def short_merged_commit_sha
+ if sha = merged_commit_sha
+ Commit.truncate_sha(sha)
+ end
+ end
+
def can_be_reverted?(current_user)
return false unless merge_commit
return false unless merged_at
@@ -1561,7 +1604,7 @@ class MergeRequest < ApplicationRecord
def first_contribution?
return false if project.team.max_member_access(author_id) > Gitlab::Access::GUEST
- project.merge_requests.merged.where(author_id: author_id).empty?
+ !project.merge_requests.merged.exists?(author_id: author_id)
end
# TODO: remove once production database rename completes
@@ -1633,6 +1676,10 @@ class MergeRequest < ApplicationRecord
end
end
+ def allows_reviewers?
+ Feature.enabled?(:merge_request_reviewers, project)
+ end
+
private
def with_rebase_lock
diff --git a/app/models/merge_request_assignee.rb b/app/models/merge_request_assignee.rb
index 2ac1de4321a..73f8fe77b04 100644
--- a/app/models/merge_request_assignee.rb
+++ b/app/models/merge_request_assignee.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
class MergeRequestAssignee < ApplicationRecord
- belongs_to :merge_request
+ belongs_to :merge_request, touch: true
belongs_to :assignee, class_name: "User", foreign_key: :user_id, inverse_of: :merge_request_assignees
validates :assignee, uniqueness: { scope: :merge_request_id }
diff --git a/app/models/merge_request_diff.rb b/app/models/merge_request_diff.rb
index b70340a98cd..880e3cc1ba5 100644
--- a/app/models/merge_request_diff.rb
+++ b/app/models/merge_request_diff.rb
@@ -17,6 +17,10 @@ class MergeRequestDiff < ApplicationRecord
# diffs to external storage
EXTERNAL_DIFF_CUTOFF = 7.days.freeze
+ # The files_count column is a 2-byte signed integer. Look up the true value
+ # from the database if this sentinel is seen
+ FILES_COUNT_SENTINEL = 2**15 - 1
+
belongs_to :merge_request
manual_inverse_association :merge_request, :merge_request_diff
@@ -150,10 +154,10 @@ class MergeRequestDiff < ApplicationRecord
# All diff information is collected from repository after object is created.
# It allows you to override variables like head_commit_sha before getting diff.
after_create :save_git_content, unless: :importing?
- after_create :set_count_columns
after_create_commit :set_as_latest_diff, unless: :importing?
after_save :update_external_diff_store
+ after_save :set_count_columns
def self.find_by_diff_refs(diff_refs)
find_by(start_commit_sha: diff_refs.start_sha, head_commit_sha: diff_refs.head_sha, base_commit_sha: diff_refs.base_sha)
@@ -202,6 +206,17 @@ class MergeRequestDiff < ApplicationRecord
end
end
+ def files_count
+ db_value = read_attribute(:files_count)
+
+ case db_value
+ when nil, FILES_COUNT_SENTINEL
+ merge_request_diff_files.count
+ else
+ db_value
+ end
+ end
+
# This method will rely on repository branch sha
# in case start_commit_sha is nil. Its necesarry for old merge request diff
# created before version 8.4 to work
@@ -423,7 +438,7 @@ class MergeRequestDiff < ApplicationRecord
# external storage. If external storage isn't an option for this diff, the
# method is a no-op.
def migrate_files_to_external_storage!
- return if stored_externally? || !use_external_diff? || merge_request_diff_files.count == 0
+ return if stored_externally? || !use_external_diff? || files_count == 0
rows = build_merge_request_diff_files(merge_request_diff_files)
rows = build_external_merge_request_diff_files(rows)
@@ -449,7 +464,7 @@ class MergeRequestDiff < ApplicationRecord
# If this diff isn't in external storage, the method is a no-op.
def migrate_files_to_database!
return unless stored_externally?
- return if merge_request_diff_files.count == 0
+ return if files_count == 0
rows = convert_external_diffs_to_database
@@ -666,7 +681,7 @@ class MergeRequestDiff < ApplicationRecord
def set_count_columns
update_columns(
commits_count: merge_request_diff_commits.size,
- files_count: merge_request_diff_files.size
+ files_count: [FILES_COUNT_SENTINEL, merge_request_diff_files.size].min
)
end
diff --git a/app/models/merge_request_diff_file.rb b/app/models/merge_request_diff_file.rb
index 23319445a38..55ff4250c2d 100644
--- a/app/models/merge_request_diff_file.rb
+++ b/app/models/merge_request_diff_file.rb
@@ -25,6 +25,16 @@ class MergeRequestDiffFile < ApplicationRecord
super
end
- binary? ? content.unpack1('m0') : content
+ return content unless binary?
+
+ # If the data isn't valid base64, return it as-is, since it's almost certain
+ # to be a valid diff. Parsing it as a diff will fail if it's something else.
+ #
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/240921
+ begin
+ content.unpack1('m0')
+ rescue ArgumentError
+ content
+ end
end
end
diff --git a/app/models/merge_request_reviewer.rb b/app/models/merge_request_reviewer.rb
new file mode 100644
index 00000000000..1cb49c0cd76
--- /dev/null
+++ b/app/models/merge_request_reviewer.rb
@@ -0,0 +1,6 @@
+# frozen_string_literal: true
+
+class MergeRequestReviewer < ApplicationRecord
+ belongs_to :merge_request
+ belongs_to :reviewer, class_name: "User", foreign_key: :user_id, inverse_of: :merge_request_assignees
+end
diff --git a/app/models/namespace.rb b/app/models/namespace.rb
index e529ba6b486..527fa9d52d0 100644
--- a/app/models/namespace.rb
+++ b/app/models/namespace.rb
@@ -135,6 +135,10 @@ class Namespace < ApplicationRecord
uniquify.string(path) { |s| Namespace.find_by_path_or_name(s) }
end
+ def clean_name(value)
+ value.scan(Gitlab::Regex.group_name_regex_chars).join(' ')
+ end
+
def find_by_pages_host(host)
gitlab_host = "." + Settings.pages.host.downcase
host = host.downcase
@@ -349,6 +353,10 @@ class Namespace < ApplicationRecord
)
end
+ def any_project_with_pages_deployed?
+ all_projects.with_pages_deployed.any?
+ end
+
def closest_setting(name)
self_and_ancestors(hierarchy_order: :asc)
.find { |n| !n.read_attribute(name).nil? }
diff --git a/app/models/namespace/root_storage_statistics.rb b/app/models/namespace/root_storage_statistics.rb
index 2ad6ea59588..5723a823e98 100644
--- a/app/models/namespace/root_storage_statistics.rb
+++ b/app/models/namespace/root_storage_statistics.rb
@@ -2,7 +2,16 @@
class Namespace::RootStorageStatistics < ApplicationRecord
SNIPPETS_SIZE_STAT_NAME = 'snippets_size'.freeze
- STATISTICS_ATTRIBUTES = %W(storage_size repository_size wiki_size lfs_objects_size build_artifacts_size packages_size #{SNIPPETS_SIZE_STAT_NAME}).freeze
+ STATISTICS_ATTRIBUTES = %W(
+ storage_size
+ repository_size
+ wiki_size
+ lfs_objects_size
+ build_artifacts_size
+ packages_size
+ #{SNIPPETS_SIZE_STAT_NAME}
+ pipeline_artifacts_size
+ ).freeze
self.primary_key = :namespace_id
@@ -40,7 +49,8 @@ class Namespace::RootStorageStatistics < ApplicationRecord
'COALESCE(SUM(ps.lfs_objects_size), 0) AS lfs_objects_size',
'COALESCE(SUM(ps.build_artifacts_size), 0) AS build_artifacts_size',
'COALESCE(SUM(ps.packages_size), 0) AS packages_size',
- "COALESCE(SUM(ps.snippets_size), 0) AS #{SNIPPETS_SIZE_STAT_NAME}"
+ "COALESCE(SUM(ps.snippets_size), 0) AS #{SNIPPETS_SIZE_STAT_NAME}",
+ 'COALESCE(SUM(ps.pipeline_artifacts_size), 0) AS pipeline_artifacts_size'
)
end
diff --git a/app/models/note.rb b/app/models/note.rb
index e1fc16818b3..812d77d5f86 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -20,20 +20,6 @@ class Note < ApplicationRecord
include ThrottledTouch
include FromUnion
- module SpecialRole
- FIRST_TIME_CONTRIBUTOR = :first_time_contributor
-
- class << self
- def values
- constants.map {|const| self.const_get(const, false)}
- end
-
- def value?(val)
- values.include?(val)
- end
- end
- end
-
cache_markdown_field :note, pipeline: :note, issuable_state_filter_enabled: true
redact_field :note
@@ -60,9 +46,6 @@ class Note < ApplicationRecord
# Attribute used to store the attributes that have been changed by quick actions.
attr_accessor :commands_changes
- # A special role that may be displayed on issuable's discussions
- attr_reader :special_role
-
default_value_for :system, false
attr_mentionable :note, pipeline: :note
@@ -220,10 +203,6 @@ class Note < ApplicationRecord
.where(noteable_type: type, noteable_id: ids)
end
- def has_special_role?(role, note)
- note.special_role == role
- end
-
def search(query)
fuzzy_search(query, [:note])
end
@@ -342,20 +321,20 @@ class Note < ApplicationRecord
noteable.author_id == user.id
end
- def special_role=(role)
- raise "Role is undefined, #{role} not found in #{SpecialRole.values}" unless SpecialRole.value?(role)
+ def contributor?
+ return false unless ::Feature.enabled?(:show_contributor_on_note, project)
- @special_role = role
+ project&.team&.contributor?(self.author_id)
end
- def has_special_role?(role)
- self.class.has_special_role?(role, self)
- end
+ def noteable_author?(noteable)
+ return false unless ::Feature.enabled?(:show_author_on_note, project)
- def specialize_for_first_contribution!(noteable)
- return unless noteable.author_id == self.author_id
+ noteable.author == self.author
+ end
- self.special_role = Note::SpecialRole::FIRST_TIME_CONTRIBUTOR
+ def project_name
+ project&.name
end
def confidential?(include_noteable: false)
@@ -556,6 +535,8 @@ class Note < ApplicationRecord
end
def system_note_with_references_visible_for?(user)
+ return true unless system?
+
(!system_note_with_references? || all_referenced_mentionables_allowed?(user)) && system_note_viewable_by?(user)
end
@@ -563,6 +544,10 @@ class Note < ApplicationRecord
noteable.author if for_personal_snippet?
end
+ def skip_notification?
+ review.present?
+ end
+
private
# Using this method followed by a call to `save` may result in ActiveRecord::RecordNotUnique exception
diff --git a/app/models/operations/feature_flag.rb b/app/models/operations/feature_flag.rb
new file mode 100644
index 00000000000..586e9d689a1
--- /dev/null
+++ b/app/models/operations/feature_flag.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+module Operations
+ class FeatureFlag < ApplicationRecord
+ include AtomicInternalId
+ include IidRoutes
+
+ self.table_name = 'operations_feature_flags'
+
+ belongs_to :project
+
+ has_internal_id :iid, scope: :project, init: ->(s) { s&.project&.operations_feature_flags&.maximum(:iid) }
+
+ default_value_for :active, true
+
+ # scopes exists only for the first version
+ has_many :scopes, class_name: 'Operations::FeatureFlagScope'
+ # strategies exists only for the second version
+ has_many :strategies, class_name: 'Operations::FeatureFlags::Strategy'
+ has_many :feature_flag_issues
+ has_many :issues, through: :feature_flag_issues
+ has_one :default_scope, -> { where(environment_scope: '*') }, class_name: 'Operations::FeatureFlagScope'
+
+ validates :project, presence: true
+ validates :name,
+ presence: true,
+ length: 2..63,
+ format: {
+ with: Gitlab::Regex.feature_flag_regex,
+ message: Gitlab::Regex.feature_flag_regex_message
+ }
+ validates :name, uniqueness: { scope: :project_id }
+ validates :description, allow_blank: true, length: 0..255
+ validate :first_default_scope, on: :create, if: :has_scopes?
+ validate :version_associations
+
+ before_create :build_default_scope, if: -> { legacy_flag? && scopes.none? }
+
+ accepts_nested_attributes_for :scopes, allow_destroy: true
+ accepts_nested_attributes_for :strategies, allow_destroy: true
+
+ scope :ordered, -> { order(:name) }
+
+ scope :enabled, -> { where(active: true) }
+ scope :disabled, -> { where(active: false) }
+
+ enum version: {
+ legacy_flag: 1,
+ new_version_flag: 2
+ }
+
+ class << self
+ def preload_relations
+ preload(:scopes, strategies: :scopes)
+ end
+
+ def for_unleash_client(project, environment)
+ includes(strategies: [:scopes, :user_list])
+ .where(project: project)
+ .merge(Operations::FeatureFlags::Scope.on_environment(environment))
+ .reorder(:id)
+ .references(:operations_scopes)
+ end
+ end
+
+ def related_issues(current_user, preload:)
+ issues = ::Issue
+ .select('issues.*, operations_feature_flags_issues.id AS link_id')
+ .joins(:feature_flag_issues)
+ .where('operations_feature_flags_issues.feature_flag_id = ?', id)
+ .order('operations_feature_flags_issues.id ASC')
+ .includes(preload)
+
+ Ability.issues_readable_by_user(issues, current_user)
+ end
+
+ private
+
+ def version_associations
+ if new_version_flag? && scopes.any?
+ errors.add(:version_associations, 'version 2 feature flags may not have scopes')
+ elsif legacy_flag? && strategies.any?
+ errors.add(:version_associations, 'version 1 feature flags may not have strategies')
+ end
+ end
+
+ def first_default_scope
+ unless scopes.first.environment_scope == '*'
+ errors.add(:default_scope, 'has to be the first element')
+ end
+ end
+
+ def build_default_scope
+ scopes.build(environment_scope: '*', active: self.active)
+ end
+
+ def has_scopes?
+ scopes.any?
+ end
+ end
+end
diff --git a/app/models/operations/feature_flag_scope.rb b/app/models/operations/feature_flag_scope.rb
new file mode 100644
index 00000000000..78be29f2531
--- /dev/null
+++ b/app/models/operations/feature_flag_scope.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+module Operations
+ class FeatureFlagScope < ApplicationRecord
+ prepend HasEnvironmentScope
+ include Gitlab::Utils::StrongMemoize
+
+ self.table_name = 'operations_feature_flag_scopes'
+
+ belongs_to :feature_flag
+
+ validates :environment_scope, uniqueness: {
+ scope: :feature_flag,
+ message: "(%{value}) has already been taken"
+ }
+
+ validates :environment_scope,
+ if: :default_scope?, on: :update,
+ inclusion: { in: %w(*), message: 'cannot be changed from default scope' }
+
+ validates :strategies, feature_flag_strategies: true
+
+ before_destroy :prevent_destroy_default_scope, if: :default_scope?
+
+ scope :ordered, -> { order(:id) }
+ scope :enabled, -> { where(active: true) }
+ scope :disabled, -> { where(active: false) }
+
+ def self.with_name_and_description
+ joins(:feature_flag)
+ .select(FeatureFlag.arel_table[:name], FeatureFlag.arel_table[:description])
+ end
+
+ def self.for_unleash_client(project, environment)
+ select_columns = [
+ 'DISTINCT ON (operations_feature_flag_scopes.feature_flag_id) operations_feature_flag_scopes.id',
+ '(operations_feature_flags.active AND operations_feature_flag_scopes.active) AS active',
+ 'operations_feature_flag_scopes.strategies',
+ 'operations_feature_flag_scopes.environment_scope',
+ 'operations_feature_flag_scopes.created_at',
+ 'operations_feature_flag_scopes.updated_at'
+ ]
+
+ select(select_columns)
+ .with_name_and_description
+ .where(feature_flag_id: project.operations_feature_flags.select(:id))
+ .order(:feature_flag_id)
+ .on_environment(environment)
+ .reverse_order
+ end
+
+ private
+
+ def default_scope?
+ environment_scope_was == '*'
+ end
+
+ def prevent_destroy_default_scope
+ raise ActiveRecord::ReadOnlyRecord, "default scope cannot be destroyed"
+ end
+ end
+end
diff --git a/app/models/operations/feature_flags/scope.rb b/app/models/operations/feature_flags/scope.rb
new file mode 100644
index 00000000000..d70101b5e0d
--- /dev/null
+++ b/app/models/operations/feature_flags/scope.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Operations
+ module FeatureFlags
+ class Scope < ApplicationRecord
+ prepend HasEnvironmentScope
+
+ self.table_name = 'operations_scopes'
+
+ belongs_to :strategy, class_name: 'Operations::FeatureFlags::Strategy'
+ end
+ end
+end
diff --git a/app/models/operations/feature_flags/strategy.rb b/app/models/operations/feature_flags/strategy.rb
new file mode 100644
index 00000000000..ff68af9741e
--- /dev/null
+++ b/app/models/operations/feature_flags/strategy.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+module Operations
+ module FeatureFlags
+ class Strategy < ApplicationRecord
+ STRATEGY_DEFAULT = 'default'
+ STRATEGY_GITLABUSERLIST = 'gitlabUserList'
+ STRATEGY_GRADUALROLLOUTUSERID = 'gradualRolloutUserId'
+ STRATEGY_USERWITHID = 'userWithId'
+ STRATEGIES = {
+ STRATEGY_DEFAULT => [].freeze,
+ STRATEGY_GITLABUSERLIST => [].freeze,
+ STRATEGY_GRADUALROLLOUTUSERID => %w[groupId percentage].freeze,
+ STRATEGY_USERWITHID => ['userIds'].freeze
+ }.freeze
+ USERID_MAX_LENGTH = 256
+
+ self.table_name = 'operations_strategies'
+
+ belongs_to :feature_flag
+ has_many :scopes, class_name: 'Operations::FeatureFlags::Scope'
+ has_one :strategy_user_list
+ has_one :user_list, through: :strategy_user_list
+
+ validates :name,
+ inclusion: {
+ in: STRATEGIES.keys,
+ message: 'strategy name is invalid'
+ }
+
+ validate :parameters_validations, if: -> { errors[:name].blank? }
+ validates :user_list, presence: true, if: -> { name == STRATEGY_GITLABUSERLIST }
+ validates :user_list, absence: true, if: -> { name != STRATEGY_GITLABUSERLIST }
+ validate :same_project_validation, if: -> { user_list.present? }
+
+ accepts_nested_attributes_for :scopes, allow_destroy: true
+
+ def user_list_id=(user_list_id)
+ self.user_list = ::Operations::FeatureFlags::UserList.find(user_list_id)
+ end
+
+ private
+
+ def same_project_validation
+ unless user_list.project_id == feature_flag.project_id
+ errors.add(:user_list, 'must belong to the same project')
+ end
+ end
+
+ def parameters_validations
+ validate_parameters_type &&
+ validate_parameters_keys &&
+ validate_parameters_values
+ end
+
+ def validate_parameters_type
+ parameters.is_a?(Hash) || parameters_error('parameters are invalid')
+ end
+
+ def validate_parameters_keys
+ actual_keys = parameters.keys.sort
+ expected_keys = STRATEGIES[name].sort
+ expected_keys == actual_keys || parameters_error('parameters are invalid')
+ end
+
+ def validate_parameters_values
+ case name
+ when STRATEGY_GRADUALROLLOUTUSERID
+ gradual_rollout_user_id_parameters_validation
+ when STRATEGY_USERWITHID
+ FeatureFlagUserXidsValidator.validate_user_xids(self, :parameters, parameters['userIds'], 'userIds')
+ end
+ end
+
+ def gradual_rollout_user_id_parameters_validation
+ percentage = parameters['percentage']
+ group_id = parameters['groupId']
+
+ unless percentage.is_a?(String) && percentage.match(/\A[1-9]?[0-9]\z|\A100\z/)
+ parameters_error('percentage must be a string between 0 and 100 inclusive')
+ end
+
+ unless group_id.is_a?(String) && group_id.match(/\A[a-z]{1,32}\z/)
+ parameters_error('groupId parameter is invalid')
+ end
+ end
+
+ def parameters_error(message)
+ errors.add(:parameters, message)
+ false
+ end
+ end
+ end
+end
diff --git a/app/models/operations/feature_flags/strategy_user_list.rb b/app/models/operations/feature_flags/strategy_user_list.rb
new file mode 100644
index 00000000000..813b632dd67
--- /dev/null
+++ b/app/models/operations/feature_flags/strategy_user_list.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+module Operations
+ module FeatureFlags
+ class StrategyUserList < ApplicationRecord
+ self.table_name = 'operations_strategies_user_lists'
+
+ belongs_to :strategy
+ belongs_to :user_list
+ end
+ end
+end
diff --git a/app/models/operations/feature_flags/user_list.rb b/app/models/operations/feature_flags/user_list.rb
new file mode 100644
index 00000000000..b9bdcb59d5f
--- /dev/null
+++ b/app/models/operations/feature_flags/user_list.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module Operations
+ module FeatureFlags
+ class UserList < ApplicationRecord
+ include AtomicInternalId
+ include IidRoutes
+
+ self.table_name = 'operations_user_lists'
+
+ belongs_to :project
+ has_many :strategy_user_lists
+ has_many :strategies, through: :strategy_user_lists
+
+ has_internal_id :iid, scope: :project, init: ->(s) { s&.project&.operations_feature_flags_user_lists&.maximum(:iid) }, presence: true
+
+ validates :project, presence: true
+ validates :name,
+ presence: true,
+ uniqueness: { scope: :project_id },
+ length: 1..255
+ validates :user_xids, feature_flag_user_xids: true
+
+ before_destroy :ensure_no_associated_strategies
+
+ private
+
+ def ensure_no_associated_strategies
+ if strategies.present?
+ errors.add(:base, 'User list is associated with a strategy')
+ throw :abort # rubocop: disable Cop/BanCatchThrow
+ end
+ end
+ end
+ end
+end
diff --git a/app/models/operations/feature_flags_client.rb b/app/models/operations/feature_flags_client.rb
new file mode 100644
index 00000000000..1c65c3f096e
--- /dev/null
+++ b/app/models/operations/feature_flags_client.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Operations
+ class FeatureFlagsClient < ApplicationRecord
+ include TokenAuthenticatable
+
+ self.table_name = 'operations_feature_flags_clients'
+
+ belongs_to :project
+
+ validates :project, presence: true
+ validates :token, presence: true
+
+ add_authentication_token_field :token, encrypted: :required
+
+ before_validation :ensure_token!
+
+ def self.find_for_project_and_token(project, token)
+ return unless project
+ return unless token
+
+ where(project_id: project).find_by_token(token)
+ end
+ end
+end
diff --git a/app/models/packages/conan/file_metadatum.rb b/app/models/packages/conan/file_metadatum.rb
index e1ef62b3959..de54580e948 100644
--- a/app/models/packages/conan/file_metadatum.rb
+++ b/app/models/packages/conan/file_metadatum.rb
@@ -3,6 +3,9 @@
class Packages::Conan::FileMetadatum < ApplicationRecord
belongs_to :package_file, inverse_of: :conan_file_metadatum
+ DEFAULT_PACKAGE_REVISION = '0'.freeze
+ DEFAULT_RECIPE_REVISION = '0'.freeze
+
validates :package_file, presence: true
validates :recipe_revision,
diff --git a/app/models/packages/package.rb b/app/models/packages/package.rb
index d6633456de4..bda11160957 100644
--- a/app/models/packages/package.rb
+++ b/app/models/packages/package.rb
@@ -5,6 +5,8 @@ class Packages::Package < ApplicationRecord
include UsageStatistics
belongs_to :project
+ belongs_to :creator, class_name: 'User'
+
# package_files must be destroyed by ruby code in order to properly remove carrierwave uploads and update project statistics
has_many :package_files, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :dependency_links, inverse_of: :package, class_name: 'Packages::DependencyLink'
@@ -37,8 +39,13 @@ class Packages::Package < ApplicationRecord
validates :name, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan?
validates :version, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan?
validates :version, format: { with: Gitlab::Regex.maven_version_regex }, if: -> { version? && maven? }
+ validates :version, format: { with: Gitlab::Regex.pypi_version_regex }, if: :pypi?
+ validates :version,
+ presence: true,
+ format: { with: Gitlab::Regex.generic_package_version_regex },
+ if: :generic?
- enum package_type: { maven: 1, npm: 2, conan: 3, nuget: 4, pypi: 5, composer: 6 }
+ enum package_type: { maven: 1, npm: 2, conan: 3, nuget: 4, pypi: 5, composer: 6, generic: 7 }
scope :with_name, ->(name) { where(name: name) }
scope :with_name_like, ->(name) { where(arel_table[:name].matches(name)) }
@@ -46,6 +53,9 @@ class Packages::Package < ApplicationRecord
scope :with_version, ->(version) { where(version: version) }
scope :without_version_like, -> (version) { where.not(arel_table[:version].matches(version)) }
scope :with_package_type, ->(package_type) { where(package_type: package_type) }
+ scope :including_build_info, -> { includes(build_info: { pipeline: :user }) }
+ scope :including_project_route, -> { includes(project: { namespace: :route }) }
+ scope :including_tags, -> { includes(:tags) }
scope :with_conan_channel, ->(package_channel) do
joins(:conan_metadatum).where(packages_conan_metadata: { package_channel: package_channel })
@@ -138,6 +148,8 @@ class Packages::Package < ApplicationRecord
def versions
project.packages
+ .including_build_info
+ .including_tags
.with_name(name)
.where.not(version: version)
.with_package_type(package_type)
diff --git a/app/models/packages/pypi/metadatum.rb b/app/models/packages/pypi/metadatum.rb
index 7e6456ad964..2e4d61eaf53 100644
--- a/app/models/packages/pypi/metadatum.rb
+++ b/app/models/packages/pypi/metadatum.rb
@@ -6,6 +6,7 @@ class Packages::Pypi::Metadatum < ApplicationRecord
belongs_to :package, -> { where(package_type: :pypi) }, inverse_of: :pypi_metadatum
validates :package, presence: true
+ validates :required_python, length: { maximum: 255 }, allow_blank: true
validate :pypi_package_type
diff --git a/app/models/pages/lookup_path.rb b/app/models/pages/lookup_path.rb
index 51c496c77d3..84d820e539c 100644
--- a/app/models/pages/lookup_path.rb
+++ b/app/models/pages/lookup_path.rb
@@ -22,10 +22,11 @@ module Pages
end
def source
- {
- type: 'file',
- path: File.join(project.full_path, 'public/')
- }
+ if artifacts_archive && !artifacts_archive.file_storage?
+ zip_source
+ else
+ file_source
+ end
end
def prefix
@@ -39,5 +40,28 @@ module Pages
private
attr_reader :project, :trim_prefix, :domain
+
+ def artifacts_archive
+ return unless Feature.enabled?(:pages_artifacts_archive, project)
+
+ # Using build artifacts is temporary solution for quick test
+ # in production environment, we'll replace this with proper
+ # `pages_deployments` later
+ project.pages_metadatum.artifacts_archive&.file
+ end
+
+ def zip_source
+ {
+ type: 'zip',
+ path: artifacts_archive.url(expire_at: 1.day.from_now)
+ }
+ end
+
+ def file_source
+ {
+ type: 'file',
+ path: File.join(project.full_path, 'public/')
+ }
+ end
end
end
diff --git a/app/models/pages_deployment.rb b/app/models/pages_deployment.rb
new file mode 100644
index 00000000000..78e0f185a11
--- /dev/null
+++ b/app/models/pages_deployment.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+# PagesDeployment stores a zip archive containing GitLab Pages web-site
+class PagesDeployment < ApplicationRecord
+ belongs_to :project, optional: false
+ belongs_to :ci_build, class_name: 'Ci::Build', optional: true
+
+ validates :file, presence: true
+ validates :file_store, presence: true, inclusion: { in: ObjectStorage::SUPPORTED_STORES }
+ validates :size, presence: true, numericality: { greater_than: 0, only_integer: true }
+end
diff --git a/app/models/pages_domain.rb b/app/models/pages_domain.rb
index d071d2d3c89..98db47deaa3 100644
--- a/app/models/pages_domain.rb
+++ b/app/models/pages_domain.rb
@@ -249,11 +249,7 @@ class PagesDomain < ApplicationRecord
return if usage_serverless?
return unless pages_deployed?
- if Feature.enabled?(:async_update_pages_config, project)
- run_after_commit { PagesUpdateConfigurationWorker.perform_async(project_id) }
- else
- Projects::UpdatePagesConfigurationService.new(project).execute
- end
+ run_after_commit { PagesUpdateConfigurationWorker.perform_async(project_id) }
end
# rubocop: enable CodeReuse/ServiceClass
diff --git a/app/models/performance_monitoring/prometheus_dashboard.rb b/app/models/performance_monitoring/prometheus_dashboard.rb
index bf87d2c3916..40d14aaa1de 100644
--- a/app/models/performance_monitoring/prometheus_dashboard.rb
+++ b/app/models/performance_monitoring/prometheus_dashboard.rb
@@ -53,14 +53,23 @@ module PerformanceMonitoring
# This method is planned to be refactored as a part of https://gitlab.com/gitlab-org/gitlab/-/issues/219398
# implementation. For new existing logic was reused to faster deliver MVC
def schema_validation_warnings
+ return run_custom_validation.map(&:message) if Feature.enabled?(:metrics_dashboard_exhaustive_validations, environment&.project)
+
self.class.from_json(reload_schema)
- nil
+ []
+ rescue Gitlab::Metrics::Dashboard::Errors::LayoutError => error
+ [error.message]
rescue ActiveModel::ValidationError => exception
exception.model.errors.map { |attr, error| "#{attr}: #{error}" }
end
private
+ def run_custom_validation
+ Gitlab::Metrics::Dashboard::Validator
+ .errors(reload_schema, dashboard_path: path, project: environment&.project)
+ end
+
# dashboard finder methods are somehow limited, #find includes checking if
# user is authorised to view selected dashboard, but modifies schema, which in some cases may
# cause false positives returned from validation, and #find_raw does not authorise users
diff --git a/app/models/product_analytics_event.rb b/app/models/product_analytics_event.rb
index 579ea88c272..d2026d3b333 100644
--- a/app/models/product_analytics_event.rb
+++ b/app/models/product_analytics_event.rb
@@ -20,10 +20,19 @@ class ProductAnalyticsEvent < ApplicationRecord
where('collector_tstamp BETWEEN ? AND ? ', today - duration + 1, today + 1)
}
+ scope :by_category_and_action, ->(category, action) { where(se_category: category, se_action: action) }
+
def self.count_by_graph(graph, days)
group(graph).timerange(days).count
end
+ def self.count_collector_tstamp_by_day(days)
+ group("DATE_TRUNC('day', collector_tstamp)")
+ .reorder('date_trunc_day_collector_tstamp')
+ .timerange(days)
+ .count
+ end
+
def as_json_wo_empty
as_json.compact
end
diff --git a/app/models/project.rb b/app/models/project.rb
index e1b6a9c41dd..4db0eaa0442 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -3,7 +3,6 @@
require 'carrierwave/orm/activerecord'
class Project < ApplicationRecord
- extend ::Gitlab::Utils::Override
include Gitlab::ConfigHelper
include Gitlab::VisibilityLevel
include AccessRequestable
@@ -147,6 +146,7 @@ class Project < ApplicationRecord
has_one :discord_service
has_one :drone_ci_service
has_one :emails_on_push_service
+ has_one :ewm_service
has_one :pipelines_email_service
has_one :irker_service
has_one :pivotaltracker_service
@@ -245,7 +245,6 @@ class Project < ApplicationRecord
has_many :lfs_file_locks
has_many :project_group_links
has_many :invited_groups, through: :project_group_links, source: :group
- has_many :pages_domains
has_many :todos
has_many :notification_settings, as: :source, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
@@ -254,6 +253,7 @@ class Project < ApplicationRecord
has_one :import_data, class_name: 'ProjectImportData', inverse_of: :project, autosave: true
has_one :project_feature, inverse_of: :project
has_one :statistics, class_name: 'ProjectStatistics'
+ has_one :feature_usage, class_name: 'ProjectFeatureUsage'
has_one :cluster_project, class_name: 'Clusters::Project'
has_many :clusters, through: :cluster_project, class_name: 'Clusters::Cluster'
@@ -279,10 +279,9 @@ class Project < ApplicationRecord
# The relation :all_pipelines is intended to be used when we want to get the
# whole list of pipelines associated to the project
has_many :all_pipelines, class_name: 'Ci::Pipeline', inverse_of: :project
- # The relation :ci_pipelines is intended to be used when we want to get only
- # those pipeline which are directly related to CI. There are
- # other pipelines, like webide ones, that we won't retrieve
- # if we use this relation.
+ # The relation :ci_pipelines includes all those that directly contribute to the
+ # latest status of a ref. This does not include dangling pipelines such as those
+ # from webide, child pipelines, etc.
has_many :ci_pipelines,
-> { ci_sources },
class_name: 'Ci::Pipeline',
@@ -327,8 +326,6 @@ class Project < ApplicationRecord
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_project_id
has_many :source_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :project_id
- has_one :pages_metadatum, class_name: 'ProjectPagesMetadatum', inverse_of: :project
-
has_many :import_failures, inverse_of: :project
has_many :jira_imports, -> { order 'jira_imports.created_at' }, class_name: 'JiraImportState', inverse_of: :project
@@ -339,10 +336,19 @@ class Project < ApplicationRecord
has_many :webide_pipelines, -> { webide_source }, class_name: 'Ci::Pipeline', inverse_of: :project
has_many :reviews, inverse_of: :project
+ # GitLab Pages
+ has_many :pages_domains
+ has_one :pages_metadatum, class_name: 'ProjectPagesMetadatum', inverse_of: :project
+ has_many :pages_deployments
+
# Can be too many records. We need to implement delete_all in batches.
# Issue https://gitlab.com/gitlab-org/gitlab/-/issues/228637
has_many :product_analytics_events, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :operations_feature_flags, class_name: 'Operations::FeatureFlag'
+ has_one :operations_feature_flags_client, class_name: 'Operations::FeatureFlagsClient'
+ has_many :operations_feature_flags_user_lists, class_name: 'Operations::FeatureFlags::UserList'
+
accepts_nested_attributes_for :variables, allow_destroy: true
accepts_nested_attributes_for :project_feature, update_only: true
accepts_nested_attributes_for :project_setting, update_only: true
@@ -393,6 +399,8 @@ class Project < ApplicationRecord
to: :project_setting
delegate :active?, to: :prometheus_service, allow_nil: true, prefix: true
+ delegate :log_jira_dvcs_integration_usage, :jira_dvcs_server_last_sync_at, :jira_dvcs_cloud_last_sync_at, to: :feature_usage
+
# Validations
validates :creator, presence: true, on: :create
validates :description, length: { maximum: 2000 }, allow_blank: true
@@ -454,14 +462,17 @@ class Project < ApplicationRecord
# Sometimes queries (e.g. using CTEs) require explicit disambiguation with table name
scope :projects_order_id_desc, -> { reorder(self.arel_table['id'].desc) }
- scope :sorted_by_similarity_desc, -> (search) do
+ scope :sorted_by_similarity_desc, -> (search, include_in_select: false) do
order_expression = Gitlab::Database::SimilarityScore.build_expression(search: search, rules: [
{ column: arel_table["path"], multiplier: 1 },
{ column: arel_table["name"], multiplier: 0.7 },
{ column: arel_table["description"], multiplier: 0.2 }
])
- reorder(order_expression.desc, arel_table['id'].desc)
+ query = reorder(order_expression.desc, arel_table['id'].desc)
+
+ query = query.select(*query.arel.projections, order_expression.as('similarity')) if include_in_select
+ query
end
scope :with_packages, -> { joins(:packages) }
@@ -476,6 +487,9 @@ class Project < ApplicationRecord
scope :for_milestones, ->(ids) { joins(:milestones).where('milestones.id' => ids).distinct }
scope :with_push, -> { joins(:events).merge(Event.pushed_action) }
scope :with_project_feature, -> { joins('LEFT JOIN project_features ON projects.id = project_features.project_id') }
+ scope :with_active_jira_services, -> { joins(:services).merge(::JiraService.active) } # rubocop:disable CodeReuse/ServiceClass
+ scope :with_jira_dvcs_cloud, -> { joins(:feature_usage).merge(ProjectFeatureUsage.with_jira_dvcs_integration_enabled(cloud: true)) }
+ scope :with_jira_dvcs_server, -> { joins(:feature_usage).merge(ProjectFeatureUsage.with_jira_dvcs_integration_enabled(cloud: false)) }
scope :inc_routes, -> { includes(:route, namespace: :route) }
scope :with_statistics, -> { includes(:statistics) }
scope :with_namespace, -> { includes(:namespace) }
@@ -545,6 +559,8 @@ class Project < ApplicationRecord
preload(:project_feature, :route, namespace: [:route, :owner])
}
+ scope :imported_from, -> (type) { where(import_type: type) }
+
enum auto_cancel_pending_pipelines: { disabled: 0, enabled: 1 }
chronic_duration_attr :build_timeout_human_readable, :build_timeout,
@@ -882,12 +898,12 @@ class Project < ApplicationRecord
end
def repository
- @repository ||= Repository.new(full_path, self, shard: repository_storage, disk_path: disk_path)
+ @repository ||= Gitlab::GlRepository::PROJECT.repository_for(self)
end
def design_repository
strong_memoize(:design_repository) do
- DesignManagement::Repository.new(self)
+ Gitlab::GlRepository::DESIGN.repository_for(self)
end
end
@@ -942,13 +958,12 @@ class Project < ApplicationRecord
latest_successful_build_for_ref(job_name, ref) || raise(ActiveRecord::RecordNotFound.new("Couldn't find job #{job_name}"))
end
- def latest_pipeline_for_ref(ref = default_branch)
+ def latest_pipeline(ref = default_branch, sha = nil)
ref = ref.presence || default_branch
- sha = commit(ref)&.sha
-
+ sha ||= commit(ref)&.sha
return unless sha
- ci_pipelines.newest_first(ref: ref, sha: sha).first
+ ci_pipelines.newest_first(ref: ref, sha: sha).take
end
def merge_base_commit(first_commit_id, second_commit_id)
@@ -1442,6 +1457,10 @@ class Project < ApplicationRecord
http_url_to_repo
end
+ def feature_usage
+ super.presence || build_feature_usage
+ end
+
def forked?
fork_network && fork_network.root_project != self
end
@@ -1452,44 +1471,10 @@ class Project < ApplicationRecord
forked_from_project || fork_network&.root_project
end
- # TODO: Remove this method once all LfsObjectsProject records are backfilled
- # for forks.
- #
- # See https://gitlab.com/gitlab-org/gitlab/issues/122002 for more info.
- def lfs_storage_project
- @lfs_storage_project ||= begin
- result = self
-
- # TODO: Make this go to the fork_network root immediately
- # dependant on the discussion in: https://gitlab.com/gitlab-org/gitlab-foss/issues/39769
- result = result.fork_source while result&.forked?
-
- result || self
- end
- end
-
- # This will return all `lfs_objects` that are accessible to the project and
- # the fork source. This is needed since older forks won't have access to some
- # LFS objects directly and have to get it from the fork source.
- #
- # TODO: Remove this method once all LfsObjectsProject records are backfilled
- # for forks. At that point, projects can look at their own `lfs_objects`.
- #
- # See https://gitlab.com/gitlab-org/gitlab/issues/122002 for more info.
- def all_lfs_objects
+ def lfs_objects_for_repository_types(*types)
LfsObject
- .distinct
.joins(:lfs_objects_projects)
- .where(lfs_objects_projects: { project_id: [self, lfs_storage_project] })
- end
-
- # TODO: Remove this method once all LfsObjectsProject records are backfilled
- # for forks. At that point, projects can look at their own `lfs_objects` so
- # `lfs_objects_oids` can be used instead.
- #
- # See https://gitlab.com/gitlab-org/gitlab/issues/122002 for more info.
- def all_lfs_objects_oids(oids: [])
- oids(all_lfs_objects, oids: oids)
+ .where(lfs_objects_projects: { project: self, repository_type: types })
end
def lfs_objects_oids(oids: [])
@@ -1653,21 +1638,6 @@ class Project < ApplicationRecord
!namespace.share_with_group_lock
end
- def pipeline_for(ref, sha = nil, id = nil)
- sha ||= commit(ref).try(:sha)
- return unless sha
-
- if id.present?
- pipelines_for(ref, sha).find_by(id: id)
- else
- pipelines_for(ref, sha).take
- end
- end
-
- def pipelines_for(ref, sha)
- ci_pipelines.order(id: :desc).where(sha: sha, ref: ref)
- end
-
def latest_successful_pipeline_for_default_branch
if defined?(@latest_successful_pipeline_for_default_branch)
return @latest_successful_pipeline_for_default_branch
@@ -1826,12 +1796,12 @@ class Project < ApplicationRecord
end
# rubocop: enable CodeReuse/ServiceClass
- def mark_pages_as_deployed
- ensure_pages_metadatum.update!(deployed: true)
+ def mark_pages_as_deployed(artifacts_archive: nil)
+ ensure_pages_metadatum.update!(deployed: true, artifacts_archive: artifacts_archive)
end
def mark_pages_as_not_deployed
- ensure_pages_metadatum.update!(deployed: false)
+ ensure_pages_metadatum.update!(deployed: false, artifacts_archive: nil)
end
def write_repository_config(gl_full_path: full_path)
@@ -2140,8 +2110,8 @@ class Project < ApplicationRecord
data = repository.route_map_for(sha)
Gitlab::RouteMap.new(data) if data
- rescue Gitlab::RouteMap::FormatError
- nil
+ rescue Gitlab::RouteMap::FormatError
+ nil
end
end
@@ -2424,6 +2394,10 @@ class Project < ApplicationRecord
false
end
+ def jira_subscription_exists?
+ JiraConnectSubscription.for_project(self).exists?
+ end
+
def uses_default_ci_config?
ci_config_path.blank? || ci_config_path == Gitlab::FileDetector::PATTERNS[:gitlab_ci]
end
@@ -2464,11 +2438,6 @@ class Project < ApplicationRecord
jira_imports.last
end
- override :after_wiki_activity
- def after_wiki_activity
- touch(:last_activity_at, :last_repository_updated_at)
- end
-
def metrics_setting
super || build_metrics_setting
end
@@ -2518,6 +2487,20 @@ class Project < ApplicationRecord
.exists?
end
+ def default_branch_or_master
+ default_branch || 'master'
+ end
+
+ def ci_config_path_or_default
+ ci_config_path.presence || Ci::Pipeline::DEFAULT_CONFIG_PATH
+ end
+
+ def enabled_group_deploy_keys
+ return GroupDeployKey.none unless group
+
+ GroupDeployKey.for_groups(group.self_and_ancestors_ids)
+ end
+
private
def find_service(services, name)
@@ -2533,11 +2516,11 @@ class Project < ApplicationRecord
end
def services_templates
- @services_templates ||= Service.templates
+ @services_templates ||= Service.for_template
end
def services_instances
- @services_instances ||= Service.instances
+ @services_instances ||= Service.for_instance
end
def closest_namespace_setting(name)
@@ -2678,9 +2661,11 @@ class Project < ApplicationRecord
end
def oids(objects, oids: [])
- collection = oids.any? ? objects.where(oid: oids) : objects
+ objects = objects.where(oid: oids) if oids.any?
- collection.pluck(:oid)
+ [].tap do |out|
+ objects.each_batch { |relation| out.concat(relation.pluck(:oid)) }
+ end
end
end
diff --git a/app/models/project_feature_usage.rb b/app/models/project_feature_usage.rb
new file mode 100644
index 00000000000..b167c2e371b
--- /dev/null
+++ b/app/models/project_feature_usage.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+class ProjectFeatureUsage < ApplicationRecord
+ self.primary_key = :project_id
+
+ JIRA_DVCS_CLOUD_FIELD = 'jira_dvcs_cloud_last_sync_at'.freeze
+ JIRA_DVCS_SERVER_FIELD = 'jira_dvcs_server_last_sync_at'.freeze
+
+ belongs_to :project
+ validates :project, presence: true
+
+ scope :with_jira_dvcs_integration_enabled, -> (cloud: true) do
+ where.not(jira_dvcs_integration_field(cloud: cloud) => nil)
+ end
+
+ class << self
+ def jira_dvcs_integration_field(cloud: true)
+ cloud ? JIRA_DVCS_CLOUD_FIELD : JIRA_DVCS_SERVER_FIELD
+ end
+ end
+
+ def log_jira_dvcs_integration_usage(cloud: true)
+ transaction(requires_new: true) do
+ save unless persisted?
+ touch(self.class.jira_dvcs_integration_field(cloud: cloud))
+ end
+ rescue ActiveRecord::RecordNotUnique
+ reset
+ retry
+ end
+end
diff --git a/app/models/project_pages_metadatum.rb b/app/models/project_pages_metadatum.rb
index 1fda388b1ae..8a1db4a9acf 100644
--- a/app/models/project_pages_metadatum.rb
+++ b/app/models/project_pages_metadatum.rb
@@ -4,6 +4,7 @@ class ProjectPagesMetadatum < ApplicationRecord
self.primary_key = :project_id
belongs_to :project, inverse_of: :pages_metadatum
+ belongs_to :artifacts_archive, class_name: 'Ci::JobArtifact'
scope :deployed, -> { where(deployed: true) }
end
diff --git a/app/models/project_services/chat_message/merge_message.rb b/app/models/project_services/chat_message/merge_message.rb
index c4fcdff8386..b9916a54d75 100644
--- a/app/models/project_services/chat_message/merge_message.rb
+++ b/app/models/project_services/chat_message/merge_message.rb
@@ -5,6 +5,7 @@ module ChatMessage
attr_reader :merge_request_iid
attr_reader :source_branch
attr_reader :target_branch
+ attr_reader :action
attr_reader :state
attr_reader :title
@@ -16,6 +17,7 @@ module ChatMessage
@merge_request_iid = obj_attr[:iid]
@source_branch = obj_attr[:source_branch]
@target_branch = obj_attr[:target_branch]
+ @action = obj_attr[:action]
@state = obj_attr[:state]
@title = format_title(obj_attr[:title])
end
@@ -63,11 +65,17 @@ module ChatMessage
"#{project_url}/-/merge_requests/#{merge_request_iid}"
end
- # overridden in EE
def state_or_action_text
- state
+ case action
+ when 'approved', 'unapproved'
+ action
+ when 'approval'
+ 'added their approval to'
+ when 'unapproval'
+ 'removed their approval from'
+ else
+ state
+ end
end
end
end
-
-ChatMessage::MergeMessage.prepend_if_ee('::EE::ChatMessage::MergeMessage')
diff --git a/app/models/project_services/ewm_service.rb b/app/models/project_services/ewm_service.rb
new file mode 100644
index 00000000000..af402e50292
--- /dev/null
+++ b/app/models/project_services/ewm_service.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+class EwmService < IssueTrackerService
+ validates :project_url, :issues_url, :new_issue_url, presence: true, public_url: true, if: :activated?
+
+ def self.reference_pattern(only_long: true)
+ @reference_pattern ||= %r{(?<issue>\b(bug|task|work item|workitem|rtcwi|defect)\b\s+\d+)}i
+ end
+
+ def title
+ 'EWM'
+ end
+
+ def description
+ s_('IssueTracker|EWM work items tracker')
+ end
+
+ def self.to_param
+ 'ewm'
+ end
+
+ def can_test?
+ false
+ end
+
+ def issue_url(iid)
+ issues_url.gsub(':id', iid.to_s.split(' ')[-1])
+ end
+end
diff --git a/app/models/project_services/jira_service.rb b/app/models/project_services/jira_service.rb
index 36d7026de30..732da62863f 100644
--- a/app/models/project_services/jira_service.rb
+++ b/app/models/project_services/jira_service.rb
@@ -5,6 +5,7 @@ class JiraService < IssueTrackerService
include Gitlab::Routing
include ApplicationHelper
include ActionView::Helpers::AssetUrlHelper
+ include Gitlab::Utils::StrongMemoize
PROJECTS_PER_PAGE = 50
@@ -32,6 +33,7 @@ class JiraService < IssueTrackerService
data_field :username, :password, :url, :api_url, :jira_issue_transition_id, :project_key, :issues_enabled
before_update :reset_password
+ after_commit :update_deployment_type, on: [:create, :update], if: :update_deployment_type?
enum comment_detail: {
standard: 1,
@@ -212,7 +214,7 @@ class JiraService < IssueTrackerService
end
def test(_)
- result = test_settings
+ result = server_info
success = result.present?
result = @error&.message unless success
@@ -231,10 +233,10 @@ class JiraService < IssueTrackerService
private
- def test_settings
- return unless client_url.present?
-
- jira_request { client.ServerInfo.all.attrs }
+ def server_info
+ strong_memoize(:server_info) do
+ client_url.present? ? jira_request { client.ServerInfo.all.attrs } : nil
+ end
end
def can_cross_reference?(noteable)
@@ -436,6 +438,26 @@ class JiraService < IssueTrackerService
url_changed?
end
+ def update_deployment_type?
+ (api_url_changed? || url_changed? || username_changed? || password_changed?) &&
+ can_test?
+ end
+
+ def update_deployment_type
+ clear_memoization(:server_info) # ensure we run the request when we try to update deployment type
+ results = server_info
+ return data_fields.deployment_unknown! unless results.present?
+
+ case results['deploymentType']
+ when 'Server'
+ data_fields.deployment_server!
+ when 'Cloud'
+ data_fields.deployment_cloud!
+ else
+ data_fields.deployment_unknown!
+ end
+ end
+
def self.event_description(event)
case event
when "merge_request", "merge_request_events"
diff --git a/app/models/project_services/prometheus_service.rb b/app/models/project_services/prometheus_service.rb
index 950cd4f6859..d0e62a1afba 100644
--- a/app/models/project_services/prometheus_service.rb
+++ b/app/models/project_services/prometheus_service.rb
@@ -97,13 +97,9 @@ class PrometheusService < MonitoringService
def prometheus_client
return unless should_return_client?
- options = {
- allow_local_requests: allow_local_api_url?,
- # We should choose more conservative timeouts, but some queries we run are now busting our
- # default timeouts, which are stricter. We should make those queries faster instead.
- # See https://gitlab.com/gitlab-org/gitlab/-/issues/233109
- timeout: 60
- }
+ options = prometheus_client_default_options.merge(
+ allow_local_requests: allow_local_api_url?
+ )
if behind_iap?
# Adds the Authorization header
diff --git a/app/models/project_statistics.rb b/app/models/project_statistics.rb
index f153bfe3f5b..67ab2c0ce8a 100644
--- a/app/models/project_statistics.rb
+++ b/app/models/project_statistics.rb
@@ -12,13 +12,17 @@ class ProjectStatistics < ApplicationRecord
before_save :update_storage_size
COLUMNS_TO_REFRESH = [:repository_size, :wiki_size, :lfs_objects_size, :commit_count, :snippets_size].freeze
- INCREMENTABLE_COLUMNS = { build_artifacts_size: %i[storage_size], packages_size: %i[storage_size], snippets_size: %i[storage_size] }.freeze
+ INCREMENTABLE_COLUMNS = {
+ build_artifacts_size: %i[storage_size],
+ packages_size: %i[storage_size],
+ pipeline_artifacts_size: %i[storage_size],
+ snippets_size: %i[storage_size]
+ }.freeze
NAMESPACE_RELATABLE_COLUMNS = [:repository_size, :wiki_size, :lfs_objects_size].freeze
scope :for_project_ids, ->(project_ids) { where(project_id: project_ids) }
scope :for_namespaces, -> (namespaces) { where(namespace: namespaces) }
- scope :with_any_ci_minutes_used, -> { where.not(shared_runners_seconds: 0) }
def total_repository_size
repository_size + lfs_objects_size
@@ -80,6 +84,10 @@ class ProjectStatistics < ApplicationRecord
# might try to update project statistics before the `snippets_size` column has been created.
storage_size += snippets_size if self.class.column_names.include?('snippets_size')
+ # The `pipeline_artifacts_size` column was added on 20200817142800 but db/post_migrate/20190527194900_schedule_calculate_wiki_sizes.rb
+ # might try to update project statistics before the `pipeline_artifacts_size` column has been created.
+ storage_size += pipeline_artifacts_size if self.class.column_names.include?('pipeline_artifacts_size')
+
self.storage_size = storage_size
end
diff --git a/app/models/project_team.rb b/app/models/project_team.rb
index 072d281e5f8..5b7eded00cd 100644
--- a/app/models/project_team.rb
+++ b/app/models/project_team.rb
@@ -178,6 +178,40 @@ class ProjectTeam
max_member_access_for_user_ids([user_id])[user_id]
end
+ def contribution_check_for_user_ids(user_ids)
+ user_ids = user_ids.uniq
+ key = "contribution_check_for_users:#{project.id}"
+
+ Gitlab::SafeRequestStore[key] ||= {}
+ contributors = Gitlab::SafeRequestStore[key] || {}
+
+ user_ids -= contributors.keys
+
+ return contributors if user_ids.empty?
+
+ resource_contributors = project.merge_requests
+ .merged
+ .where(author_id: user_ids, target_branch: project.default_branch.to_s)
+ .pluck(:author_id)
+ .product([true]).to_h
+
+ contributors.merge!(resource_contributors)
+
+ missing_resource_ids = user_ids - resource_contributors.keys
+
+ missing_resource_ids.each do |resource_id|
+ contributors[resource_id] = false
+ end
+
+ contributors
+ end
+
+ def contributor?(user_id)
+ return false if max_member_access(user_id) >= Gitlab::Access::GUEST
+
+ contribution_check_for_user_ids([user_id])[user_id]
+ end
+
private
def fetch_members(level = nil)
diff --git a/app/models/project_wiki.rb b/app/models/project_wiki.rb
index 5df0a33dc9a..bd570cf7ead 100644
--- a/app/models/project_wiki.rb
+++ b/app/models/project_wiki.rb
@@ -10,6 +10,23 @@ class ProjectWiki < Wiki
def disk_path(*args, &block)
container.disk_path + '.wiki'
end
+
+ override :after_wiki_activity
+ def after_wiki_activity
+ # Update activity columns, this is done synchronously to avoid
+ # replication delays in Geo.
+ project.touch(:last_activity_at, :last_repository_updated_at)
+ end
+
+ override :after_post_receive
+ def after_post_receive
+ # Update storage statistics
+ ProjectCacheWorker.perform_async(project.id, [], [:wiki_size])
+
+ # This call is repeated for post-receive, to make sure we're updating
+ # the activity columns for Git pushes as well.
+ after_wiki_activity
+ end
end
# TODO: Remove this once we implement ES support for group wikis.
diff --git a/app/models/prometheus_metric.rb b/app/models/prometheus_metric.rb
index bfd23d2a334..9ddf66cd388 100644
--- a/app/models/prometheus_metric.rb
+++ b/app/models/prometheus_metric.rb
@@ -4,7 +4,7 @@ class PrometheusMetric < ApplicationRecord
belongs_to :project, validate: true, inverse_of: :prometheus_metrics
has_many :prometheus_alerts, inverse_of: :prometheus_metric
- enum group: PrometheusMetricEnums.groups
+ enum group: Enums::PrometheusMetric.groups
validates :title, presence: true
validates :query, presence: true
@@ -16,11 +16,13 @@ class PrometheusMetric < ApplicationRecord
validates :project, presence: true, unless: :common?
validates :project, absence: true, if: :common?
+ scope :for_dashboard_path, -> (dashboard_path) { where(dashboard_path: dashboard_path) }
scope :for_project, -> (project) { where(project: project) }
scope :for_group, -> (group) { where(group: group) }
scope :for_title, -> (title) { where(title: title) }
scope :for_y_label, -> (y_label) { where(y_label: y_label) }
scope :for_identifier, -> (identifier) { where(identifier: identifier) }
+ scope :not_identifier, -> (identifier) { where.not(identifier: identifier) }
scope :common, -> { where(common: true) }
scope :ordered, -> { reorder(created_at: :asc) }
@@ -72,6 +74,6 @@ class PrometheusMetric < ApplicationRecord
private
def group_details(group)
- PrometheusMetricEnums.group_details.fetch(group.to_sym)
+ Enums::PrometheusMetric.group_details.fetch(group.to_sym)
end
end
diff --git a/app/models/prometheus_metric_enums.rb b/app/models/prometheus_metric_enums.rb
deleted file mode 100644
index 75a34618e2c..00000000000
--- a/app/models/prometheus_metric_enums.rb
+++ /dev/null
@@ -1,84 +0,0 @@
-# frozen_string_literal: true
-
-module PrometheusMetricEnums
- def self.groups
- {
- # built-in groups
- nginx_ingress_vts: -1,
- ha_proxy: -2,
- aws_elb: -3,
- nginx: -4,
- kubernetes: -5,
- nginx_ingress: -6,
- cluster_health: -100
- }.merge(custom_groups).freeze
- end
-
- # custom/user groups
- def self.custom_groups
- {
- business: 0,
- response: 1,
- system: 2
- }.freeze
- end
-
- def self.group_details
- {
- # built-in groups
- nginx_ingress_vts: {
- group_title: _('Response metrics (NGINX Ingress VTS)'),
- required_metrics: %w(nginx_upstream_responses_total nginx_upstream_response_msecs_avg),
- priority: 10
- }.freeze,
- nginx_ingress: {
- group_title: _('Response metrics (NGINX Ingress)'),
- required_metrics: %w(nginx_ingress_controller_requests nginx_ingress_controller_ingress_upstream_latency_seconds_sum),
- priority: 10
- }.freeze,
- ha_proxy: {
- group_title: _('Response metrics (HA Proxy)'),
- required_metrics: %w(haproxy_frontend_http_requests_total haproxy_frontend_http_responses_total),
- priority: 10
- }.freeze,
- aws_elb: {
- group_title: _('Response metrics (AWS ELB)'),
- required_metrics: %w(aws_elb_request_count_sum aws_elb_latency_average aws_elb_httpcode_backend_5_xx_sum),
- priority: 10
- }.freeze,
- nginx: {
- group_title: _('Response metrics (NGINX)'),
- required_metrics: %w(nginx_server_requests nginx_server_requestMsec),
- priority: 10
- }.freeze,
- kubernetes: {
- group_title: _('System metrics (Kubernetes)'),
- required_metrics: %w(container_memory_usage_bytes container_cpu_usage_seconds_total),
- priority: 5
- }.freeze,
- cluster_health: {
- group_title: _('Cluster Health'),
- required_metrics: %w(container_memory_usage_bytes container_cpu_usage_seconds_total),
- priority: 10
- }.freeze
- }.merge(custom_group_details).freeze
- end
-
- # custom/user groups
- def self.custom_group_details
- {
- business: {
- group_title: _('Business metrics (Custom)'),
- priority: 0
- }.freeze,
- response: {
- group_title: _('Response metrics (Custom)'),
- priority: -5
- }.freeze,
- system: {
- group_title: _('System metrics (Custom)'),
- priority: -10
- }.freeze
- }.freeze
- end
-end
diff --git a/app/models/protected_branch.rb b/app/models/protected_branch.rb
index 594c822c18f..599c174ddd7 100644
--- a/app/models/protected_branch.rb
+++ b/app/models/protected_branch.rb
@@ -38,9 +38,9 @@ class ProtectedBranch < ApplicationRecord
project.protected_branches
end
+ # overridden in EE
def self.branch_requires_code_owner_approval?(project, branch_name)
- # NOOP
- #
+ false
end
def self.by_name(query)
diff --git a/app/models/remote_mirror.rb b/app/models/remote_mirror.rb
index 8b15d481c1b..6b8b34ce4d2 100644
--- a/app/models/remote_mirror.rb
+++ b/app/models/remote_mirror.rb
@@ -210,6 +210,10 @@ class RemoteMirror < ApplicationRecord
super(usernames_whitelist: %w[git])
end
+ def bare_url
+ Gitlab::UrlSanitizer.new(read_attribute(:url)).full_url
+ end
+
def ensure_remote!
return unless project
return unless remote_name && remote_url
diff --git a/app/models/repository.rb b/app/models/repository.rb
index 07122db36b3..ef17e010ba8 100644
--- a/app/models/repository.rb
+++ b/app/models/repository.rb
@@ -214,7 +214,7 @@ class Repository
return false if with_slash.empty?
prefixes = no_slash.map { |ref| Regexp.escape(ref) }.join('|')
- prefix_regex = %r{^#{prefixes}/}
+ prefix_regex = %r{^(#{prefixes})/}
with_slash.any? do |ref|
prefix_regex.match?(ref)
diff --git a/app/models/resource_iteration_event.rb b/app/models/resource_iteration_event.rb
deleted file mode 100644
index 78d85ea8b95..00000000000
--- a/app/models/resource_iteration_event.rb
+++ /dev/null
@@ -1,5 +0,0 @@
-# frozen_string_literal: true
-
-class ResourceIterationEvent < ResourceTimeboxEvent
- belongs_to :iteration
-end
diff --git a/app/models/resource_state_event.rb b/app/models/resource_state_event.rb
index 766b4d7a865..1ce4e14d289 100644
--- a/app/models/resource_state_event.rb
+++ b/app/models/resource_state_event.rb
@@ -19,3 +19,5 @@ class ResourceStateEvent < ResourceEvent
issue || merge_request
end
end
+
+ResourceStateEvent.prepend_if_ee('EE::ResourceStateEvent')
diff --git a/app/models/security_event.rb b/app/models/security_event.rb
deleted file mode 100644
index 3fe4cc99c9b..00000000000
--- a/app/models/security_event.rb
+++ /dev/null
@@ -1,4 +0,0 @@
-# frozen_string_literal: true
-
-class SecurityEvent < AuditEvent
-end
diff --git a/app/models/service.rb b/app/models/service.rb
index 40e7e5552d1..e63e06bf46f 100644
--- a/app/models/service.rb
+++ b/app/models/service.rb
@@ -9,12 +9,11 @@ class Service < ApplicationRecord
include DataFields
include IgnorableColumns
- ignore_columns %i[title description], remove_with: '13.4', remove_after: '2020-09-22'
ignore_columns %i[default], remove_with: '13.5', remove_after: '2020-10-22'
SERVICE_NAMES = %w[
alerts asana assembla bamboo bugzilla buildkite campfire confluence custom_issue_tracker discord
- drone_ci emails_on_push external_wiki flowdock hangouts_chat hipchat irker jira
+ drone_ci emails_on_push ewm external_wiki flowdock hangouts_chat hipchat irker jira
mattermost mattermost_slash_commands microsoft_teams packagist pipelines_email
pivotaltracker prometheus pushover redmine slack slack_slash_commands teamcity unify_circuit webex_teams youtrack
].freeze
@@ -27,16 +26,17 @@ class Service < ApplicationRecord
default_value_for :active, false
default_value_for :alert_events, true
- default_value_for :push_events, true
- default_value_for :issues_events, true
- default_value_for :confidential_issues_events, true
+ default_value_for :category, 'common'
default_value_for :commit_events, true
- default_value_for :merge_requests_events, true
- default_value_for :tag_push_events, true
- default_value_for :note_events, true
+ default_value_for :confidential_issues_events, true
default_value_for :confidential_note_events, true
+ default_value_for :issues_events, true
default_value_for :job_events, true
+ default_value_for :merge_requests_events, true
+ default_value_for :note_events, true
default_value_for :pipeline_events, true
+ default_value_for :push_events, true
+ default_value_for :tag_push_events, true
default_value_for :wiki_page_events, true
after_initialize :initialize_properties
@@ -46,6 +46,7 @@ class Service < ApplicationRecord
after_commit :cache_project_has_external_wiki
belongs_to :project, inverse_of: :services
+ belongs_to :group, inverse_of: :services
has_one :service_hook
validates :project_id, presence: true, unless: -> { template? || instance? || group_id }
@@ -64,8 +65,9 @@ class Service < ApplicationRecord
scope :active, -> { where(active: true) }
scope :by_type, -> (type) { where(type: type) }
scope :by_active_flag, -> (flag) { where(active: flag) }
- scope :templates, -> { where(template: true, type: available_services_types) }
- scope :instances, -> { where(instance: true, type: available_services_types) }
+ scope :for_group, -> (group) { where(group_id: group, type: available_services_types) }
+ scope :for_template, -> { where(template: true, type: available_services_types) }
+ scope :for_instance, -> { where(instance: true, type: available_services_types) }
scope :push_hooks, -> { where(push_events: true, active: true) }
scope :tag_push_hooks, -> { where(tag_push_events: true, active: true) }
@@ -81,7 +83,178 @@ class Service < ApplicationRecord
scope :alert_hooks, -> { where(alert_events: true, active: true) }
scope :deployment, -> { where(category: 'deployment') }
- default_value_for :category, 'common'
+ # Provide convenient accessor methods for each serialized property.
+ # Also keep track of updated properties in a similar way as ActiveModel::Dirty
+ def self.prop_accessor(*args)
+ args.each do |arg|
+ class_eval <<~RUBY, __FILE__, __LINE__ + 1
+ unless method_defined?(arg)
+ def #{arg}
+ properties['#{arg}']
+ end
+ end
+
+ def #{arg}=(value)
+ self.properties ||= {}
+ updated_properties['#{arg}'] = #{arg} unless #{arg}_changed?
+ self.properties['#{arg}'] = value
+ end
+
+ def #{arg}_changed?
+ #{arg}_touched? && #{arg} != #{arg}_was
+ end
+
+ def #{arg}_touched?
+ updated_properties.include?('#{arg}')
+ end
+
+ def #{arg}_was
+ updated_properties['#{arg}']
+ end
+ RUBY
+ end
+ end
+
+ # Provide convenient boolean accessor methods for each serialized property.
+ # Also keep track of updated properties in a similar way as ActiveModel::Dirty
+ def self.boolean_accessor(*args)
+ self.prop_accessor(*args)
+
+ args.each do |arg|
+ class_eval <<~RUBY, __FILE__, __LINE__ + 1
+ def #{arg}?
+ # '!!' is used because nil or empty string is converted to nil
+ !!ActiveRecord::Type::Boolean.new.cast(#{arg})
+ end
+ RUBY
+ end
+ end
+
+ def self.to_param
+ raise NotImplementedError
+ end
+
+ def self.event_names
+ self.supported_events.map { |event| ServicesHelper.service_event_field_name(event) }
+ end
+
+ def self.supported_event_actions
+ %w[]
+ end
+
+ def self.supported_events
+ %w[commit push tag_push issue confidential_issue merge_request wiki_page]
+ end
+
+ def self.event_description(event)
+ ServicesHelper.service_event_description(event)
+ end
+
+ def self.find_or_create_templates
+ create_nonexistent_templates
+ for_template
+ end
+
+ def self.create_nonexistent_templates
+ nonexistent_services = list_nonexistent_services_for(for_template)
+ return if nonexistent_services.empty?
+
+ # Create within a transaction to perform the lowest possible SQL queries.
+ transaction do
+ nonexistent_services.each do |service_type|
+ service_type.constantize.create(template: true)
+ end
+ end
+ end
+ private_class_method :create_nonexistent_templates
+
+ def self.find_or_initialize_integration(name, instance: false, group_id: nil)
+ if name.in?(available_services_names)
+ "#{name}_service".camelize.constantize.find_or_initialize_by(instance: instance, group_id: group_id)
+ end
+ end
+
+ def self.find_or_initialize_all(scope)
+ scope + build_nonexistent_services_for(scope)
+ end
+
+ def self.build_nonexistent_services_for(scope)
+ list_nonexistent_services_for(scope).map do |service_type|
+ service_type.constantize.new
+ end
+ end
+ private_class_method :build_nonexistent_services_for
+
+ def self.list_nonexistent_services_for(scope)
+ # Using #map instead of #pluck to save one query count. This is because
+ # ActiveRecord loaded the object here, so we don't need to query again later.
+ available_services_types - scope.map(&:type)
+ end
+ private_class_method :list_nonexistent_services_for
+
+ def self.available_services_names
+ service_names = services_names
+ service_names += dev_services_names
+
+ service_names.sort_by(&:downcase)
+ end
+
+ def self.services_names
+ SERVICE_NAMES
+ end
+
+ def self.dev_services_names
+ return [] unless Rails.env.development?
+
+ DEV_SERVICE_NAMES
+ end
+
+ def self.available_services_types
+ available_services_names.map { |service_name| "#{service_name}_service".camelize }
+ end
+
+ def self.services_types
+ services_names.map { |service_name| "#{service_name}_service".camelize }
+ end
+
+ def self.build_from_integration(project_id, integration)
+ service = integration.dup
+
+ if integration.supports_data_fields?
+ data_fields = integration.data_fields.dup
+ data_fields.service = service
+ end
+
+ service.template = false
+ service.instance = false
+ service.inherit_from_id = integration.id if integration.instance?
+ service.project_id = project_id
+ service.active = false if service.invalid?
+ service
+ end
+
+ def self.instance_exists_for?(type)
+ exists?(instance: true, type: type)
+ end
+
+ def self.default_integration(type, scope)
+ closest_group_integration(type, scope) || instance_level_integration(type)
+ end
+
+ def self.closest_group_integration(type, scope)
+ group_ids = scope.ancestors.select(:id)
+ array = group_ids.to_sql.present? ? "array(#{group_ids.to_sql})" : 'ARRAY[]'
+
+ where(type: type, group_id: group_ids)
+ .order(Arel.sql("array_position(#{array}::bigint[], services.group_id)"))
+ .first
+ end
+ private_class_method :closest_group_integration
+
+ def self.instance_level_integration(type)
+ find_by(type: type, instance: true)
+ end
+ private_class_method :instance_level_integration
def activated?
active
@@ -124,10 +297,6 @@ class Service < ApplicationRecord
self.class.to_param
end
- def self.to_param
- raise NotImplementedError
- end
-
def fields
# implement inside child
[]
@@ -137,11 +306,11 @@ class Service < ApplicationRecord
#
# This list is used in `Service#as_json(only: json_fields)`.
def json_fields
- %w(active)
+ %w[active]
end
def to_service_hash
- as_json(methods: :type, except: %w[id template instance project_id])
+ as_json(methods: :type, except: %w[id template instance project_id group_id])
end
def to_data_fields_hash
@@ -156,10 +325,6 @@ class Service < ApplicationRecord
self.class.event_names
end
- def self.event_names
- self.supported_events.map { |event| ServicesHelper.service_event_field_name(event) }
- end
-
def event_field(event)
nil
end
@@ -188,18 +353,10 @@ class Service < ApplicationRecord
self.class.supported_event_actions
end
- def self.supported_event_actions
- %w()
- end
-
def supported_events
self.class.supported_events
end
- def self.supported_events
- %w(commit push tag_push issue confidential_issue merge_request wiki_page)
- end
-
def execute(data)
# implement inside child
end
@@ -210,59 +367,10 @@ class Service < ApplicationRecord
{ success: result.present?, result: result }
end
- # Disable test for instance-level services.
+ # Disable test for instance-level and group-level services.
# https://gitlab.com/gitlab-org/gitlab/-/issues/213138
def can_test?
- !instance?
- end
-
- # Provide convenient accessor methods
- # for each serialized property.
- # Also keep track of updated properties in a similar way as ActiveModel::Dirty
- def self.prop_accessor(*args)
- args.each do |arg|
- class_eval <<~RUBY, __FILE__, __LINE__ + 1
- unless method_defined?(arg)
- def #{arg}
- properties['#{arg}']
- end
- end
-
- def #{arg}=(value)
- self.properties ||= {}
- updated_properties['#{arg}'] = #{arg} unless #{arg}_changed?
- self.properties['#{arg}'] = value
- end
-
- def #{arg}_changed?
- #{arg}_touched? && #{arg} != #{arg}_was
- end
-
- def #{arg}_touched?
- updated_properties.include?('#{arg}')
- end
-
- def #{arg}_was
- updated_properties['#{arg}']
- end
- RUBY
- end
- end
-
- # Provide convenient boolean accessor methods
- # for each serialized property.
- # Also keep track of updated properties in a similar way as ActiveModel::Dirty
- def self.boolean_accessor(*args)
- self.prop_accessor(*args)
-
- args.each do |arg|
- class_eval <<~RUBY, __FILE__, __LINE__ + 1
- def #{arg}?
- # '!!' is used because nil or empty string is converted to nil
- !!ActiveRecord::Type::Boolean.new.cast(#{arg})
- end
- RUBY
- end
+ !instance? && !group_id
end
# Returns a hash of the properties that have been assigned a new value since last save,
@@ -289,86 +397,6 @@ class Service < ApplicationRecord
self.category == :issue_tracker
end
- def self.find_or_create_templates
- create_nonexistent_templates
- templates
- end
-
- private_class_method def self.create_nonexistent_templates
- nonexistent_services = list_nonexistent_services_for(templates)
- return if nonexistent_services.empty?
-
- # Create within a transaction to perform the lowest possible SQL queries.
- transaction do
- nonexistent_services.each do |service_type|
- service_type.constantize.create(template: true)
- end
- end
- end
-
- def self.find_or_initialize_instances
- instances + build_nonexistent_instances
- end
-
- private_class_method def self.build_nonexistent_instances
- list_nonexistent_services_for(instances).map do |service_type|
- service_type.constantize.new
- end
- end
-
- private_class_method def self.list_nonexistent_services_for(scope)
- available_services_types - scope.map(&:type)
- end
-
- def self.available_services_names
- service_names = services_names
- service_names += dev_services_names
-
- service_names.sort_by(&:downcase)
- end
-
- def self.services_names
- SERVICE_NAMES
- end
-
- def self.dev_services_names
- return [] unless Rails.env.development?
-
- DEV_SERVICE_NAMES
- end
-
- def self.available_services_types
- available_services_names.map { |service_name| "#{service_name}_service".camelize }
- end
-
- def self.services_types
- services_names.map { |service_name| "#{service_name}_service".camelize }
- end
-
- def self.build_from_integration(project_id, integration)
- service = integration.dup
-
- if integration.supports_data_fields?
- data_fields = integration.data_fields.dup
- data_fields.service = service
- end
-
- service.template = false
- service.instance = false
- service.inherit_from_id = integration.id if integration.instance?
- service.project_id = project_id
- service.active = false if service.invalid?
- service
- end
-
- def self.instance_exists_for?(type)
- exists?(instance: true, type: type)
- end
-
- def self.instance_for(type)
- find_by(instance: true, type: type)
- end
-
# override if needed
def supports_data_fields?
false
@@ -396,10 +424,6 @@ class Service < ApplicationRecord
end
end
- def self.event_description(event)
- ServicesHelper.service_event_description(event)
- end
-
def valid_recipients?
activated? && !importing?
end
diff --git a/app/models/service_list.rb b/app/models/service_list.rb
index fa3760f0c56..9cbc5e68059 100644
--- a/app/models/service_list.rb
+++ b/app/models/service_list.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
class ServiceList
- def initialize(batch, service_hash, extra_hash = {})
- @batch = batch
+ def initialize(batch_ids, service_hash, association)
+ @batch_ids = batch_ids
@service_hash = service_hash
- @extra_hash = extra_hash
+ @association = association
end
def to_array
@@ -13,15 +13,15 @@ class ServiceList
private
- attr_reader :batch, :service_hash, :extra_hash
+ attr_reader :batch_ids, :service_hash, :association
def columns
- (service_hash.keys << 'project_id') + extra_hash.keys
+ (service_hash.keys << "#{association}_id")
end
def values
- batch.map do |project_id|
- (service_hash.values << project_id) + extra_hash.values
+ batch_ids.map do |id|
+ (service_hash.values << id)
end
end
end
diff --git a/app/models/snippet.rb b/app/models/snippet.rb
index eb3960ff12b..1cf3097861c 100644
--- a/app/models/snippet.rb
+++ b/app/models/snippet.rb
@@ -214,7 +214,7 @@ class Snippet < ApplicationRecord
def blobs
return [] unless repository_exists?
- repository.ls_files(repository.root_ref).map { |file| Blob.lazy(repository, repository.root_ref, file) }
+ repository.ls_files(default_branch).map { |file| Blob.lazy(repository, default_branch, file) }
end
def hook_attrs
@@ -275,7 +275,7 @@ class Snippet < ApplicationRecord
override :repository
def repository
- @repository ||= Repository.new(full_path, self, shard: repository_storage, disk_path: disk_path, repo_type: Gitlab::GlRepository::SNIPPET)
+ @repository ||= Gitlab::GlRepository::SNIPPET.repository_for(self)
end
override :repository_size_checker
@@ -309,6 +309,11 @@ class Snippet < ApplicationRecord
end
end
+ override :default_branch
+ def default_branch
+ super || 'master'
+ end
+
def repository_storage
snippet_repository&.shard_name || self.class.pick_repository_storage
end
@@ -336,13 +341,17 @@ class Snippet < ApplicationRecord
def file_name_on_repo
return if repository.empty?
- list_files(repository.root_ref).first
+ list_files(default_branch).first
end
def list_files(ref = nil)
return [] if repository.empty?
- repository.ls_files(ref)
+ repository.ls_files(ref || default_branch)
+ end
+
+ def multiple_files?
+ list_files.size > 1
end
class << self
diff --git a/app/models/snippet_input_action.rb b/app/models/snippet_input_action.rb
index cc6373264cc..b5362b5c14e 100644
--- a/app/models/snippet_input_action.rb
+++ b/app/models/snippet_input_action.rb
@@ -15,7 +15,7 @@ class SnippetInputAction
validates :action, inclusion: { in: ACTIONS, message: "%{value} is not a valid action" }
validates :previous_path, presence: true, if: :move_action?
- validates :file_path, presence: true, unless: :create_action?
+ validates :file_path, presence: true, if: -> (action) { action.update_action? || action.delete_action? }
validates :content, presence: true, if: -> (action) { action.create_action? || action.update_action? }
validate :ensure_same_file_path_and_previous_path, if: :update_action?
validate :ensure_different_file_path_and_previous_path, if: :move_action?
diff --git a/app/models/snippet_repository.rb b/app/models/snippet_repository.rb
index 8151308125a..2cfb201191d 100644
--- a/app/models/snippet_repository.rb
+++ b/app/models/snippet_repository.rb
@@ -93,7 +93,7 @@ class SnippetRepository < ApplicationRecord
end
def get_last_empty_file_index
- repository.ls_files(nil).inject(0) do |max, file|
+ repository.ls_files(snippet.default_branch).inject(0) do |max, file|
idx = file[EMPTY_FILE_PATTERN, 1].to_i
[idx, max].max
end
@@ -131,3 +131,5 @@ class SnippetRepository < ApplicationRecord
action[:action] == :update && action[:content].nil?
end
end
+
+SnippetRepository.prepend_if_ee('EE::SnippetRepository')
diff --git a/app/models/snippet_statistics.rb b/app/models/snippet_statistics.rb
index 7439f98d114..8545296d076 100644
--- a/app/models/snippet_statistics.rb
+++ b/app/models/snippet_statistics.rb
@@ -25,7 +25,7 @@ class SnippetStatistics < ApplicationRecord
def update_file_count
count = if snippet.repository_exists?
- repository.ls_files(repository.root_ref).size
+ repository.ls_files(snippet.default_branch).size
else
0
end
diff --git a/app/models/system_note_metadata.rb b/app/models/system_note_metadata.rb
index b6ba96c768e..961212d0295 100644
--- a/app/models/system_note_metadata.rb
+++ b/app/models/system_note_metadata.rb
@@ -11,6 +11,7 @@ class SystemNoteMetadata < ApplicationRecord
close duplicate
moved merge
label milestone
+ relate unrelate
].freeze
ICON_TYPES = %w[
@@ -19,7 +20,7 @@ class SystemNoteMetadata < ApplicationRecord
title time_tracking branch milestone discussion task moved
opened closed merged duplicate locked unlocked outdated
tag due_date pinned_embed cherry_pick health_status approved unapproved
- status alert_issue_added
+ status alert_issue_added relate unrelate new_alert_added
].freeze
validates :note, presence: true
diff --git a/app/models/terraform/state.rb b/app/models/terraform/state.rb
index c50b9da1310..419fffcb666 100644
--- a/app/models/terraform/state.rb
+++ b/app/models/terraform/state.rb
@@ -5,27 +5,34 @@ module Terraform
include UsageStatistics
include FileStoreMounter
- DEFAULT = '{"version":1}'.freeze
HEX_REGEXP = %r{\A\h+\z}.freeze
UUID_LENGTH = 32
belongs_to :project
belongs_to :locked_by_user, class_name: 'User'
+ has_many :versions, class_name: 'Terraform::StateVersion', foreign_key: :terraform_state_id
+ has_one :latest_version, -> { ordered_by_version_desc }, class_name: 'Terraform::StateVersion', foreign_key: :terraform_state_id
+
+ scope :versioning_not_enabled, -> { where(versioning_enabled: false) }
+
validates :project_id, presence: true
validates :uuid, presence: true, uniqueness: true, length: { is: UUID_LENGTH },
format: { with: HEX_REGEXP, message: 'only allows hex characters' }
default_value_for(:uuid, allows_nil: false) { SecureRandom.hex(UUID_LENGTH / 2) }
+ default_value_for(:versioning_enabled, true)
mount_file_store_uploader StateUploader
- default_value_for(:file) { CarrierWaveStringFile.new(DEFAULT) }
-
def file_store
super || StateUploader.default_store
end
+ def latest_file
+ versioning_enabled ? latest_version&.file : file
+ end
+
def local?
file_store == ObjectStorage::Store::LOCAL
end
@@ -33,6 +40,17 @@ module Terraform
def locked?
self.lock_xid.present?
end
+
+ def update_file!(data, version:)
+ if versioning_enabled?
+ new_version = versions.build(version: version)
+ new_version.assign_attributes(created_by_user: locked_by_user, file: data)
+ new_version.save!
+ else
+ self.file = data
+ save!
+ end
+ end
end
end
diff --git a/app/models/terraform/state_version.rb b/app/models/terraform/state_version.rb
new file mode 100644
index 00000000000..d5e315d18a1
--- /dev/null
+++ b/app/models/terraform/state_version.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module Terraform
+ class StateVersion < ApplicationRecord
+ include FileStoreMounter
+
+ belongs_to :terraform_state, class_name: 'Terraform::State', optional: false
+ belongs_to :created_by_user, class_name: 'User', optional: true
+
+ scope :ordered_by_version_desc, -> { order(version: :desc) }
+
+ default_value_for(:file_store) { VersionedStateUploader.default_store }
+
+ mount_file_store_uploader VersionedStateUploader
+
+ delegate :project_id, :uuid, to: :terraform_state, allow_nil: true
+ end
+end
diff --git a/app/models/timelog.rb b/app/models/timelog.rb
index c0aac6f27aa..60aaaaef831 100644
--- a/app/models/timelog.rb
+++ b/app/models/timelog.rb
@@ -7,6 +7,7 @@ class Timelog < ApplicationRecord
belongs_to :issue, touch: true
belongs_to :merge_request, touch: true
belongs_to :user
+ belongs_to :note
scope :for_issues_in_group, -> (group) do
joins(:issue).where(
diff --git a/app/models/todo.rb b/app/models/todo.rb
index f973c1ff1d4..6c8e085762d 100644
--- a/app/models/todo.rb
+++ b/app/models/todo.rb
@@ -17,9 +17,11 @@ class Todo < ApplicationRecord
UNMERGEABLE = 6
DIRECTLY_ADDRESSED = 7
MERGE_TRAIN_REMOVED = 8 # This is an EE-only feature
+ REVIEW_REQUESTED = 9
ACTION_NAMES = {
ASSIGNED => :assigned,
+ REVIEW_REQUESTED => :review_requested,
MENTIONED => :mentioned,
BUILD_FAILED => :build_failed,
MARKED => :marked,
@@ -167,6 +169,10 @@ class Todo < ApplicationRecord
action == ASSIGNED
end
+ def review_requested?
+ action == REVIEW_REQUESTED
+ end
+
def merge_train_removed?
action == MERGE_TRAIN_REMOVED
end
diff --git a/app/models/user.rb b/app/models/user.rb
index f31a6823657..0a784b30d8f 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -58,6 +58,8 @@ class User < ApplicationRecord
devise :lockable, :recoverable, :rememberable, :trackable,
:validatable, :omniauthable, :confirmable, :registerable
+ include AdminChangedPasswordNotifier
+
# This module adds async behaviour to Devise emails
# and should be added after Devise modules are initialized.
include AsyncDeviseEmail
@@ -107,17 +109,18 @@ class User < ApplicationRecord
has_many :group_deploy_keys
has_many :gpg_keys
- has_many :emails, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :emails
has_many :personal_access_tokens, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :identities, dependent: :destroy, autosave: true # rubocop:disable Cop/ActiveRecordDependent
has_many :u2f_registrations, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :webauthn_registrations
has_many :chat_names, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :user_synced_attributes_metadata, autosave: true
has_one :aws_role, class_name: 'Aws::Role'
# Groups
has_many :members
- has_many :group_members, -> { where(requested_at: nil) }, source: 'GroupMember'
+ has_many :group_members, -> { where(requested_at: nil).where("access_level >= ?", Gitlab::Access::GUEST) }, source: 'GroupMember'
has_many :groups, through: :group_members
has_many :owned_groups, -> { where(members: { access_level: Gitlab::Access::OWNER }) }, through: :group_members, source: :group
has_many :maintainers_groups, -> { where(members: { access_level: Gitlab::Access::MAINTAINER }) }, through: :group_members, source: :group
@@ -181,6 +184,7 @@ class User < ApplicationRecord
has_one :user_detail
has_one :user_highest_role
has_one :user_canonical_email
+ has_one :atlassian_identity, class_name: 'Atlassian::Identity'
has_many :reviews, foreign_key: :author_id, inverse_of: :author
@@ -275,6 +279,7 @@ class User < ApplicationRecord
:view_diffs_file_by_file, :view_diffs_file_by_file=,
:tab_width, :tab_width=,
:sourcegraph_enabled, :sourcegraph_enabled=,
+ :gitpod_enabled, :gitpod_enabled=,
:setup_for_company, :setup_for_company=,
:render_whitespace_in_code, :render_whitespace_in_code=,
:experience_level, :experience_level=,
@@ -283,6 +288,7 @@ class User < ApplicationRecord
delegate :path, to: :namespace, allow_nil: true, prefix: true
delegate :job_title, :job_title=, to: :user_detail, allow_nil: true
delegate :bio, :bio=, :bio_html, to: :user_detail, allow_nil: true
+ delegate :webauthn_xid, :webauthn_xid=, to: :user_detail, allow_nil: true
accepts_nested_attributes_for :user_preference, update_only: true
accepts_nested_attributes_for :user_detail, update_only: true
@@ -431,14 +437,21 @@ class User < ApplicationRecord
FROM u2f_registrations AS u2f
WHERE u2f.user_id = users.id
) OR users.otp_required_for_login = ?
+ OR
+ EXISTS (
+ SELECT *
+ FROM webauthn_registrations AS webauthn
+ WHERE webauthn.user_id = users.id
+ )
SQL
where(with_u2f_registrations, true)
end
def self.without_two_factor
- joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id")
- .where("u2f.id IS NULL AND users.otp_required_for_login = ?", false)
+ joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id
+ LEFT OUTER JOIN webauthn_registrations AS webauthn ON webauthn.user_id = users.id")
+ .where("u2f.id IS NULL AND webauthn.id IS NULL AND users.otp_required_for_login = ?", false)
end
#
@@ -751,11 +764,12 @@ class User < ApplicationRecord
otp_backup_codes: nil
)
self.u2f_registrations.destroy_all # rubocop: disable Cop/DestroyAll
+ self.webauthn_registrations.destroy_all # rubocop: disable Cop/DestroyAll
end
end
def two_factor_enabled?
- two_factor_otp_enabled? || two_factor_u2f_enabled?
+ two_factor_otp_enabled? || two_factor_webauthn_u2f_enabled?
end
def two_factor_otp_enabled?
@@ -770,6 +784,16 @@ class User < ApplicationRecord
end
end
+ def two_factor_webauthn_u2f_enabled?
+ two_factor_u2f_enabled? || two_factor_webauthn_enabled?
+ end
+
+ def two_factor_webauthn_enabled?
+ return false unless Feature.enabled?(:webauthn)
+
+ (webauthn_registrations.loaded? && webauthn_registrations.any?) || (!webauthn_registrations.loaded? && webauthn_registrations.exists?)
+ end
+
def namespace_move_dir_allowed
if namespace&.any_project_has_container_registry_tags?
errors.add(:username, _('cannot be changed if a personal project has container registry tags.'))
@@ -1460,6 +1484,11 @@ class User < ApplicationRecord
end
end
+ def notification_settings_for_groups(groups)
+ ids = groups.is_a?(ActiveRecord::Relation) ? groups.select(:id) : groups.map(&:id)
+ notification_settings.for_groups.where(source_id: ids)
+ end
+
# Lazy load global notification setting
# Initializes User setting with Participating level if setting not persisted
def global_notification_setting
@@ -1687,9 +1716,6 @@ class User < ApplicationRecord
[last_activity, last_sign_in].compact.max
end
- # Below is used for the signup_flow experiment. Should be removed
- # when experiment finishes.
- # See https://gitlab.com/gitlab-org/growth/engineering/issues/64
REQUIRES_ROLE_VALUE = 99
def role_required?
@@ -1699,7 +1725,6 @@ class User < ApplicationRecord
def set_role_required!
update_column(:role, REQUIRES_ROLE_VALUE)
end
- # End of signup_flow experiment methods
def dismissed_callout?(feature_name:, ignore_dismissal_earlier_than: nil)
callouts = self.callouts.with_feature_name(feature_name)
diff --git a/app/models/user_callout.rb b/app/models/user_callout.rb
index 82f82356cb4..0ba319aa444 100644
--- a/app/models/user_callout.rb
+++ b/app/models/user_callout.rb
@@ -3,9 +3,30 @@
class UserCallout < ApplicationRecord
belongs_to :user
- # We use `UserCalloutEnums.feature_names` here so that EE can more easily
- # extend this `Hash` with new values.
- enum feature_name: ::UserCalloutEnums.feature_names
+ enum feature_name: {
+ gke_cluster_integration: 1,
+ gcp_signup_offer: 2,
+ cluster_security_warning: 3,
+ gold_trial: 4, # EE-only
+ geo_enable_hashed_storage: 5, # EE-only
+ geo_migrate_hashed_storage: 6, # EE-only
+ canary_deployment: 7, # EE-only
+ gold_trial_billings: 8, # EE-only
+ suggest_popover_dismissed: 9,
+ tabs_position_highlight: 10,
+ threat_monitoring_info: 11, # EE-only
+ account_recovery_regular_check: 12, # EE-only
+ webhooks_moved: 13,
+ service_templates_deprecated: 14,
+ admin_integrations_moved: 15,
+ web_ide_alert_dismissed: 16,
+ active_user_count_threshold: 18, # EE-only
+ buy_pipeline_minutes_notification_dot: 19, # EE-only
+ personal_access_token_expiry: 21, # EE-only
+ suggest_pipeline: 22,
+ customize_homepage: 23,
+ feature_flags_new_version: 24
+ }
validates :user, presence: true
validates :feature_name,
diff --git a/app/models/user_callout_enums.rb b/app/models/user_callout_enums.rb
deleted file mode 100644
index 5b64befd284..00000000000
--- a/app/models/user_callout_enums.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-module UserCalloutEnums
- # Returns the `Hash` to use for the `feature_name` enum in the `UserCallout`
- # model.
- #
- # This method is separate from the `UserCallout` model so that it can be
- # extended by EE.
- #
- # If you are going to add new items to this hash, check that you're not going
- # to conflict with EE-only values: https://gitlab.com/gitlab-org/gitlab/blob/master/ee/app/models/ee/user_callout_enums.rb
- def self.feature_names
- {
- gke_cluster_integration: 1,
- gcp_signup_offer: 2,
- cluster_security_warning: 3,
- suggest_popover_dismissed: 9,
- tabs_position_highlight: 10,
- webhooks_moved: 13,
- admin_integrations_moved: 15,
- personal_access_token_expiry: 21, # EE-only
- suggest_pipeline: 22,
- customize_homepage: 23
- }
- end
-end
-
-UserCalloutEnums.prepend_if_ee('EE::UserCalloutEnums')
diff --git a/app/models/vulnerability.rb b/app/models/vulnerability.rb
new file mode 100644
index 00000000000..71d0b1db410
--- /dev/null
+++ b/app/models/vulnerability.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+# Placeholder class for model that is implemented in EE
+# It reserves '+' as a reference prefix, but the table does not exist in FOSS
+class Vulnerability < ApplicationRecord
+ include IgnorableColumns
+
+ def self.reference_prefix
+ '+'
+ end
+
+ def self.reference_prefix_escaped
+ '&plus;'
+ end
+end
+
+Vulnerability.prepend_if_ee('EE::Vulnerability')
diff --git a/app/models/wiki.rb b/app/models/wiki.rb
index 30273d646cf..9462f7401c4 100644
--- a/app/models/wiki.rb
+++ b/app/models/wiki.rb
@@ -133,8 +133,9 @@ class Wiki
commit = commit_details(:created, message, title)
wiki.write_page(title, format.to_sym, content, commit)
+ after_wiki_activity
- update_container_activity
+ true
rescue Gitlab::Git::Wiki::DuplicatePageError => e
@error_message = "Duplicate page: #{e.message}"
false
@@ -144,16 +145,18 @@ class Wiki
commit = commit_details(:updated, message, page.title)
wiki.update_page(page.path, title || page.name, format.to_sym, content, commit)
+ after_wiki_activity
- update_container_activity
+ true
end
def delete_page(page, message = nil)
return unless page
wiki.delete_page(page.path, commit_details(:deleted, message, page.title))
+ after_wiki_activity
- update_container_activity
+ true
end
def page_title_and_dir(title)
@@ -180,7 +183,7 @@ class Wiki
override :repository
def repository
- @repository ||= Repository.new(full_path, container, shard: repository_storage, disk_path: disk_path, repo_type: Gitlab::GlRepository::WIKI)
+ @repository ||= Gitlab::GlRepository::WIKI.repository_for(container)
end
def repository_storage
@@ -209,6 +212,17 @@ class Wiki
web_url(only_path: true).sub(%r{/#{Wiki::HOMEPAGE}\z}, '')
end
+ # Callbacks for synchronous processing after wiki changes.
+ # These will be executed after any change made through GitLab itself (web UI and API),
+ # but not for Git pushes.
+ def after_wiki_activity
+ end
+
+ # Callbacks for background processing after wiki changes.
+ # These will be executed after any change to the wiki repository.
+ def after_post_receive
+ end
+
private
def commit_details(action, message = nil, title = nil)
@@ -225,10 +239,6 @@ class Wiki
def default_message(action, title)
"#{user.username} #{action} page: #{title}"
end
-
- def update_container_activity
- container.after_wiki_activity
- end
end
Wiki.prepend_if_ee('EE::Wiki')