summaryrefslogtreecommitdiff
path: root/app/models
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-12-17 11:59:07 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-12-17 11:59:07 +0000
commit8b573c94895dc0ac0e1d9d59cf3e8745e8b539ca (patch)
tree544930fb309b30317ae9797a9683768705d664c4 /app/models
parent4b1de649d0168371549608993deac953eb692019 (diff)
downloadgitlab-ce-8b573c94895dc0ac0e1d9d59cf3e8745e8b539ca.tar.gz
Add latest changes from gitlab-org/gitlab@13-7-stable-eev13.7.0-rc42
Diffstat (limited to 'app/models')
-rw-r--r--app/models/alert_management/alert.rb9
-rw-r--r--app/models/alert_management/http_integration.rb1
-rw-r--r--app/models/analytics/devops_adoption.rb6
-rw-r--r--app/models/analytics/devops_adoption/segment.rb24
-rw-r--r--app/models/analytics/devops_adoption/segment_selection.rb36
-rw-r--r--app/models/application_setting.rb57
-rw-r--r--app/models/application_setting_implementation.rb6
-rw-r--r--app/models/approval.rb2
-rw-r--r--app/models/audit_event.rb2
-rw-r--r--app/models/bulk_imports/entity.rb25
-rw-r--r--app/models/bulk_imports/failure.rb13
-rw-r--r--app/models/ci/bridge.rb6
-rw-r--r--app/models/ci/build.rb55
-rw-r--r--app/models/ci/build_dependencies.rb82
-rw-r--r--app/models/ci/build_trace_chunks/fog.rb29
-rw-r--r--app/models/ci/job_artifact.rb8
-rw-r--r--app/models/ci/pipeline.rb130
-rw-r--r--app/models/clusters/agent.rb4
-rw-r--r--app/models/clusters/applications/helm.rb35
-rw-r--r--app/models/clusters/applications/runner.rb2
-rw-r--r--app/models/clusters/cluster.rb4
-rw-r--r--app/models/clusters/concerns/application_core.rb8
-rw-r--r--app/models/clusters/concerns/application_data.rb8
-rw-r--r--app/models/clusters/platforms/kubernetes.rb66
-rw-r--r--app/models/commit_collection.rb6
-rw-r--r--app/models/concerns/cache_markdown_field.rb62
-rw-r--r--app/models/concerns/can_move_repository_storage.rb46
-rw-r--r--app/models/concerns/case_sensitivity.rb12
-rw-r--r--app/models/concerns/enums/ci/pipeline.rb9
-rw-r--r--app/models/concerns/enums/data_visualization_palette.rb33
-rw-r--r--app/models/concerns/enums/internal_id.rb3
-rw-r--r--app/models/concerns/has_repository.rb2
-rw-r--r--app/models/concerns/has_wiki_page_meta_attributes.rb164
-rw-r--r--app/models/concerns/has_wiki_page_slug_attributes.rb25
-rw-r--r--app/models/concerns/ignorable_columns.rb12
-rw-r--r--app/models/concerns/issuable.rb9
-rw-r--r--app/models/concerns/mentionable.rb41
-rw-r--r--app/models/concerns/optimized_issuable_label_filter.rb37
-rw-r--r--app/models/concerns/project_features_compatibility.rb8
-rw-r--r--app/models/concerns/protected_ref.rb6
-rw-r--r--app/models/concerns/protected_ref_access.rb1
-rw-r--r--app/models/concerns/reactive_caching.rb3
-rw-r--r--app/models/concerns/repository_storage_movable.rb121
-rw-r--r--app/models/concerns/routable.rb47
-rw-r--r--app/models/concerns/shardable.rb1
-rw-r--r--app/models/concerns/timebox.rb6
-rw-r--r--app/models/concerns/token_authenticatable.rb7
-rw-r--r--app/models/concerns/token_authenticatable_strategies/base.rb10
-rw-r--r--app/models/concerns/triggerable_hooks.rb5
-rw-r--r--app/models/container_repository.rb3
-rw-r--r--app/models/custom_emoji.rb2
-rw-r--r--app/models/cycle_analytics/level_base.rb57
-rw-r--r--app/models/cycle_analytics/project_level.rb9
-rw-r--r--app/models/dependency_proxy.rb2
-rw-r--r--app/models/dependency_proxy/manifest.rb16
-rw-r--r--app/models/dependency_proxy/registry.rb9
-rw-r--r--app/models/deployment.rb24
-rw-r--r--app/models/diff_note.rb4
-rw-r--r--app/models/environment.rb41
-rw-r--r--app/models/experiment.rb17
-rw-r--r--app/models/experiment_subject.rb28
-rw-r--r--app/models/exported_protected_branch.rb5
-rw-r--r--app/models/group.rb22
-rw-r--r--app/models/group_import_state.rb4
-rw-r--r--app/models/identity.rb3
-rw-r--r--app/models/issue.rb8
-rw-r--r--app/models/iteration.rb6
-rw-r--r--app/models/label.rb2
-rw-r--r--app/models/label_priority.rb5
-rw-r--r--app/models/list.rb2
-rw-r--r--app/models/merge_request.rb68
-rw-r--r--app/models/merge_request/metrics.rb5
-rw-r--r--app/models/merge_request_diff.rb34
-rw-r--r--app/models/merge_request_reviewer.rb2
-rw-r--r--app/models/milestone.rb4
-rw-r--r--app/models/namespace.rb1
-rw-r--r--app/models/namespace_onboarding_action.rb27
-rw-r--r--app/models/note.rb5
-rw-r--r--app/models/notification_setting.rb2
-rw-r--r--app/models/packages/event.rb5
-rw-r--r--app/models/packages/package.rb26
-rw-r--r--app/models/packages/package_file.rb11
-rw-r--r--app/models/pages/lookup_path.rb33
-rw-r--r--app/models/pages_domain.rb8
-rw-r--r--app/models/personal_access_token.rb13
-rw-r--r--app/models/project.rb89
-rw-r--r--app/models/project_feature.rb4
-rw-r--r--app/models/project_repository.rb1
-rw-r--r--app/models/project_repository_storage_move.rb105
-rw-r--r--app/models/project_services/datadog_service.rb124
-rw-r--r--app/models/project_services/jenkins_service.rb91
-rw-r--r--app/models/project_services/jira_service.rb5
-rw-r--r--app/models/project_services/mock_deployment_service.rb6
-rw-r--r--app/models/project_services/pipelines_email_service.rb4
-rw-r--r--app/models/project_statistics.rb2
-rw-r--r--app/models/protected_branch.rb4
-rw-r--r--app/models/protected_branch/push_access_level.rb12
-rw-r--r--app/models/raw_usage_data.rb2
-rw-r--r--app/models/redirect_route.rb2
-rw-r--r--app/models/release.rb2
-rw-r--r--app/models/release_highlight.rb102
-rw-r--r--app/models/repository.rb5
-rw-r--r--app/models/resource_event.rb8
-rw-r--r--app/models/resource_label_event.rb13
-rw-r--r--app/models/resource_state_event.rb4
-rw-r--r--app/models/resource_timebox_event.rb10
-rw-r--r--app/models/route.rb2
-rw-r--r--app/models/sentry_issue.rb5
-rw-r--r--app/models/service.rb26
-rw-r--r--app/models/snippet.rb6
-rw-r--r--app/models/snippet_blob.rb4
-rw-r--r--app/models/snippet_repository_storage_move.rb24
-rw-r--r--app/models/suggestion.rb3
-rw-r--r--app/models/system_note_metadata.rb7
-rw-r--r--app/models/terraform/state.rb29
-rw-r--r--app/models/terraform/state_version.rb4
-rw-r--r--app/models/timelog.rb4
-rw-r--r--app/models/todo.rb6
-rw-r--r--app/models/user.rb59
-rw-r--r--app/models/user_callout.rb3
-rw-r--r--app/models/user_detail.rb2
-rw-r--r--app/models/wiki_page/meta.rb135
-rw-r--r--app/models/wiki_page/slug.rb23
-rw-r--r--app/models/zoom_meeting.rb10
124 files changed, 2033 insertions, 721 deletions
diff --git a/app/models/alert_management/alert.rb b/app/models/alert_management/alert.rb
index 7ce7f40b6a8..7090d9f4ea1 100644
--- a/app/models/alert_management/alert.rb
+++ b/app/models/alert_management/alert.rb
@@ -69,6 +69,11 @@ module AlertManagement
unknown: 5
}
+ enum domain: {
+ operations: 0,
+ threat_monitoring: 1
+ }
+
state_machine :status, initial: :triggered do
state :triggered, value: STATUSES[:triggered]
@@ -122,6 +127,8 @@ module AlertManagement
scope :open, -> { with_status(open_statuses) }
scope :not_resolved, -> { without_status(:resolved) }
scope :with_prometheus_alert, -> { includes(:prometheus_alert) }
+ scope :with_threat_monitoring_alerts, -> { where(domain: :threat_monitoring ) }
+ scope :with_operations_alerts, -> { where(domain: :operations) }
scope :order_start_time, -> (sort_order) { order(started_at: sort_order) }
scope :order_end_time, -> (sort_order) { order(ended_at: sort_order) }
@@ -263,3 +270,5 @@ module AlertManagement
end
end
end
+
+AlertManagement::Alert.prepend_if_ee('EE::AlertManagement::Alert')
diff --git a/app/models/alert_management/http_integration.rb b/app/models/alert_management/http_integration.rb
index ae5170867c3..0c916c576cb 100644
--- a/app/models/alert_management/http_integration.rb
+++ b/app/models/alert_management/http_integration.rb
@@ -22,6 +22,7 @@ module AlertManagement
validates :name, presence: true, length: { maximum: 255 }
validates :endpoint_identifier, presence: true, length: { maximum: 255 }, format: { with: /\A[A-Za-z0-9]+\z/ }
validates :endpoint_identifier, uniqueness: { scope: [:project_id, :active] }, if: :active?
+ validates :payload_attribute_mapping, json_schema: { filename: 'http_integration_payload_attribute_mapping' }
before_validation :prevent_token_assignment
before_validation :prevent_endpoint_identifier_assignment
diff --git a/app/models/analytics/devops_adoption.rb b/app/models/analytics/devops_adoption.rb
deleted file mode 100644
index ed5a5b16a6e..00000000000
--- a/app/models/analytics/devops_adoption.rb
+++ /dev/null
@@ -1,6 +0,0 @@
-# frozen_string_literal: true
-module Analytics::DevopsAdoption
- def self.table_name_prefix
- 'analytics_devops_adoption_'
- end
-end
diff --git a/app/models/analytics/devops_adoption/segment.rb b/app/models/analytics/devops_adoption/segment.rb
deleted file mode 100644
index 71d4a312627..00000000000
--- a/app/models/analytics/devops_adoption/segment.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-class Analytics::DevopsAdoption::Segment < ApplicationRecord
- ALLOWED_SEGMENT_COUNT = 20
-
- has_many :segment_selections
- has_many :groups, through: :segment_selections
-
- validates :name, presence: true, uniqueness: true, length: { maximum: 255 }
- validate :validate_segment_count
-
- accepts_nested_attributes_for :segment_selections, allow_destroy: true
-
- scope :ordered_by_name, -> { order(:name) }
- scope :with_groups, -> { preload(:groups) }
-
- private
-
- def validate_segment_count
- if self.class.count >= ALLOWED_SEGMENT_COUNT
- errors.add(:name, s_('DevopsAdoptionSegment|The maximum number of segments has been reached'))
- end
- end
-end
diff --git a/app/models/analytics/devops_adoption/segment_selection.rb b/app/models/analytics/devops_adoption/segment_selection.rb
deleted file mode 100644
index 6b70c13a773..00000000000
--- a/app/models/analytics/devops_adoption/segment_selection.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-class Analytics::DevopsAdoption::SegmentSelection < ApplicationRecord
- ALLOWED_SELECTIONS_PER_SEGMENT = 20
-
- belongs_to :segment
- belongs_to :project
- belongs_to :group
-
- validates :segment, presence: true
- validates :project, presence: { unless: :group }
- validates :project_id, uniqueness: { scope: :segment_id, if: :project }
- validates :group, presence: { unless: :project }
- validates :group_id, uniqueness: { scope: :segment_id, if: :group }
-
- validate :exclusive_project_or_group
- validate :validate_selection_count
-
- private
-
- def exclusive_project_or_group
- if project.present? && group.present?
- errors.add(:group, s_('DevopsAdoptionSegmentSelection|The selection cannot be configured for a project and for a group at the same time'))
- end
- end
-
- def validate_selection_count
- return unless segment
-
- selection_count_for_segment = self.class.where(segment: segment).count
-
- if selection_count_for_segment >= ALLOWED_SELECTIONS_PER_SEGMENT
- errors.add(:segment, s_('DevopsAdoptionSegmentSelection|The maximum number of selections has been reached'))
- end
- end
-end
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index 7bfa5fb4cb8..9b9db7f93fd 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -5,14 +5,14 @@ class ApplicationSetting < ApplicationRecord
include CacheMarkdownField
include TokenAuthenticatable
include ChronicDurationAttribute
- include IgnorableColumns
-
- ignore_column :namespace_storage_size_limit, remove_with: '13.5', remove_after: '2020-09-22'
INSTANCE_REVIEW_MIN_USERS = 50
GRAFANA_URL_ERROR_MESSAGE = 'Please check your Grafana URL setting in ' \
'Admin Area > Settings > Metrics and profiling > Metrics - Grafana'
+ KROKI_URL_ERROR_MESSAGE = 'Please check your Kroki URL setting in ' \
+ 'Admin Area > Settings > General > Kroki'
+
add_authentication_token_field :runners_registration_token, encrypted: -> { Feature.enabled?(:application_settings_tokens_optional_encryption) ? :optional : :required }
add_authentication_token_field :health_check_access_token
add_authentication_token_field :static_objects_external_storage_auth_token
@@ -128,6 +128,11 @@ class ApplicationSetting < ApplicationRecord
presence: true,
if: :unique_ips_limit_enabled
+ validates :kroki_url,
+ presence: { if: :kroki_enabled }
+
+ validate :validate_kroki_url, if: :kroki_enabled
+
validates :plantuml_url,
presence: true,
if: :plantuml_enabled
@@ -244,6 +249,12 @@ class ApplicationSetting < ApplicationRecord
validates :user_default_internal_regex, js_regex: true, allow_nil: true
+ validates :personal_access_token_prefix,
+ format: { with: /\A[a-zA-Z0-9_+=\/@:.-]+\z/,
+ message: _("can contain only letters of the Base64 alphabet (RFC4648) with the addition of '@', ':' and '.'") },
+ length: { maximum: 20, message: _('is too long (maximum is %{count} characters)') },
+ allow_blank: true
+
validates :commit_email_hostname, format: { with: /\A[^@]+\z/ }
validates :archive_builds_in_seconds,
@@ -362,11 +373,11 @@ class ApplicationSetting < ApplicationRecord
validates :eks_access_key_id,
length: { in: 16..128 },
- if: :eks_integration_enabled?
+ if: -> (setting) { setting.eks_integration_enabled? && setting.eks_access_key_id.present? }
validates :eks_secret_access_key,
presence: true,
- if: :eks_integration_enabled?
+ if: -> (setting) { setting.eks_integration_enabled? && setting.eks_access_key_id.present? }
validates_with X509CertificateCredentialsValidator,
certificate: :external_auth_client_cert,
@@ -418,6 +429,9 @@ class ApplicationSetting < ApplicationRecord
attr_encrypted :secret_detection_token_revocation_token, encryption_options_base_truncated_aes_256_gcm
attr_encrypted :cloud_license_auth_token, encryption_options_base_truncated_aes_256_gcm
+ validates :disable_feed_token,
+ inclusion: { in: [true, false], message: 'must be a boolean value' }
+
before_validation :ensure_uuid!
before_save :ensure_runners_registration_token
@@ -429,18 +443,21 @@ class ApplicationSetting < ApplicationRecord
after_commit :expire_performance_bar_allowed_user_ids_cache, if: -> { previous_changes.key?('performance_bar_allowed_group_id') }
def validate_grafana_url
- unless parsed_grafana_url
- self.errors.add(
- :grafana_url,
- "must be a valid relative or absolute URL. #{GRAFANA_URL_ERROR_MESSAGE}"
- )
- end
+ validate_url(parsed_grafana_url, :grafana_url, GRAFANA_URL_ERROR_MESSAGE)
end
def grafana_url_absolute?
parsed_grafana_url&.absolute?
end
+ def validate_kroki_url
+ validate_url(parsed_kroki_url, :kroki_url, KROKI_URL_ERROR_MESSAGE)
+ end
+
+ def kroki_url_absolute?
+ parsed_kroki_url&.absolute?
+ end
+
def sourcegraph_url_is_com?
!!(sourcegraph_url =~ /\Ahttps:\/\/(www\.)?sourcegraph\.com/)
end
@@ -503,6 +520,24 @@ class ApplicationSetting < ApplicationRecord
def parsed_grafana_url
@parsed_grafana_url ||= Gitlab::Utils.parse_url(grafana_url)
end
+
+ def parsed_kroki_url
+ @parsed_kroki_url ||= Gitlab::UrlBlocker.validate!(kroki_url, schemes: %w(http https), enforce_sanitization: true)[0]
+ rescue Gitlab::UrlBlocker::BlockedUrlError => error
+ self.errors.add(
+ :kroki_url,
+ "is not valid. #{error}"
+ )
+ end
+
+ def validate_url(parsed_url, name, error_message)
+ unless parsed_url
+ self.errors.add(
+ name,
+ "must be a valid relative or absolute URL. #{error_message}"
+ )
+ end
+ end
end
ApplicationSetting.prepend_if_ee('EE::ApplicationSetting')
diff --git a/app/models/application_setting_implementation.rb b/app/models/application_setting_implementation.rb
index 5c7abbccd63..105889a364a 100644
--- a/app/models/application_setting_implementation.rb
+++ b/app/models/application_setting_implementation.rb
@@ -58,6 +58,7 @@ module ApplicationSettingImplementation
default_projects_limit: Settings.gitlab['default_projects_limit'],
default_snippet_visibility: Settings.gitlab.default_projects_features['visibility_level'],
diff_max_patch_bytes: Gitlab::Git::Diff::DEFAULT_MAX_PATCH_BYTES,
+ disable_feed_token: false,
disabled_oauth_sign_in_sources: [],
dns_rebinding_protection_enabled: true,
domain_allowlist: Settings.gitlab['domain_allowlist'],
@@ -103,6 +104,7 @@ module ApplicationSettingImplementation
password_authentication_enabled_for_git: true,
password_authentication_enabled_for_web: Settings.gitlab['signin_enabled'],
performance_bar_allowed_group_id: nil,
+ personal_access_token_prefix: nil,
plantuml_enabled: false,
plantuml_url: nil,
polling_interval_multiplier: 1,
@@ -168,7 +170,9 @@ module ApplicationSettingImplementation
user_show_add_ssh_key_message: true,
wiki_page_max_content_bytes: 50.megabytes,
container_registry_delete_tags_service_timeout: 250,
- container_registry_expiration_policies_worker_capacity: 0
+ container_registry_expiration_policies_worker_capacity: 0,
+ kroki_enabled: false,
+ kroki_url: nil
}
end
diff --git a/app/models/approval.rb b/app/models/approval.rb
index bc123de0b20..899ea466315 100644
--- a/app/models/approval.rb
+++ b/app/models/approval.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class Approval < ApplicationRecord
+ include CreatedAtFilterable
+
belongs_to :user
belongs_to :merge_request
diff --git a/app/models/audit_event.rb b/app/models/audit_event.rb
index 55e8a5d4535..a4d991b040c 100644
--- a/app/models/audit_event.rb
+++ b/app/models/audit_event.rb
@@ -13,6 +13,8 @@ class AuditEvent < ApplicationRecord
:target_id
].freeze
+ self.primary_key = :id
+
serialize :details, Hash # rubocop:disable Cop/ActiveRecordSerialize
belongs_to :user, foreign_key: :author_id
diff --git a/app/models/bulk_imports/entity.rb b/app/models/bulk_imports/entity.rb
index 34030e079c7..a4d0b7485ba 100644
--- a/app/models/bulk_imports/entity.rb
+++ b/app/models/bulk_imports/entity.rb
@@ -30,6 +30,11 @@ class BulkImports::Entity < ApplicationRecord
class_name: 'BulkImports::Tracker',
foreign_key: :bulk_import_entity_id
+ has_many :failures,
+ class_name: 'BulkImports::Failure',
+ inverse_of: :entity,
+ foreign_key: :bulk_import_entity_id
+
validates :project, absence: true, if: :group
validates :group, absence: true, if: :project
validates :source_type, :source_full_path, :destination_name,
@@ -52,6 +57,7 @@ class BulkImports::Entity < ApplicationRecord
event :finish do
transition started: :finished
+ transition failed: :failed
end
event :fail_op do
@@ -59,6 +65,25 @@ class BulkImports::Entity < ApplicationRecord
end
end
+ def update_tracker_for(relation:, has_next_page:, next_page: nil)
+ attributes = {
+ relation: relation,
+ has_next_page: has_next_page,
+ next_page: next_page,
+ bulk_import_entity_id: id
+ }
+
+ trackers.upsert(attributes, unique_by: %i[bulk_import_entity_id relation])
+ end
+
+ def has_next_page?(relation)
+ trackers.find_by(relation: relation)&.has_next_page
+ end
+
+ def next_page_for(relation)
+ trackers.find_by(relation: relation)&.next_page
+ end
+
private
def validate_parent_is_a_group
diff --git a/app/models/bulk_imports/failure.rb b/app/models/bulk_imports/failure.rb
new file mode 100644
index 00000000000..a6f7582c3b0
--- /dev/null
+++ b/app/models/bulk_imports/failure.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+class BulkImports::Failure < ApplicationRecord
+ self.table_name = 'bulk_import_failures'
+
+ belongs_to :entity,
+ class_name: 'BulkImports::Entity',
+ foreign_key: :bulk_import_entity_id,
+ inverse_of: :failures,
+ optional: false
+
+ validates :entity, presence: true
+end
diff --git a/app/models/ci/bridge.rb b/app/models/ci/bridge.rb
index 5b23cf46fdb..19a0d424e33 100644
--- a/app/models/ci/bridge.rb
+++ b/app/models/ci/bridge.rb
@@ -132,14 +132,10 @@ module Ci
end
def playable?
- return false unless ::Gitlab::Ci::Features.manual_bridges_enabled?(project)
-
action? && !archived? && manual?
end
def action?
- return false unless ::Gitlab::Ci::Features.manual_bridges_enabled?(project)
-
%w[manual].include?(self.when)
end
@@ -206,7 +202,7 @@ module Ci
override :dependency_variables
def dependency_variables
- return [] unless ::Feature.enabled?(:ci_bridge_dependency_variables, project)
+ return [] unless ::Feature.enabled?(:ci_bridge_dependency_variables, project, default_enabled: true)
super
end
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 84abd01786d..71939f070cb 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -190,6 +190,8 @@ module Ci
scope :with_coverage, -> { where.not(coverage: nil) }
+ scope :for_project, -> (project_id) { where(project_id: project_id) }
+
acts_as_taggable
add_authentication_token_field :token, encrypted: :optional
@@ -379,8 +381,16 @@ module Ci
Ci::BuildRunnerSession.where(build: build).delete_all
end
- after_transition any => [:skipped, :canceled] do |build|
- build.deployment&.cancel
+ after_transition any => [:skipped, :canceled] do |build, transition|
+ if Feature.enabled?(:cd_skipped_deployment_status, build.project)
+ if transition.to_name == :skipped
+ build.deployment&.skip
+ else
+ build.deployment&.cancel
+ end
+ else
+ build.deployment&.cancel
+ end
end
end
@@ -527,6 +537,7 @@ module Ci
strong_memoize(:variables) do
Gitlab::Ci::Variables::Collection.new
.concat(persisted_variables)
+ .concat(dependency_proxy_variables)
.concat(job_jwt_variables)
.concat(scoped_variables)
.concat(job_variables)
@@ -575,6 +586,15 @@ module Ci
end
end
+ def dependency_proxy_variables
+ Gitlab::Ci::Variables::Collection.new.tap do |variables|
+ break variables unless Gitlab.config.dependency_proxy.enabled
+
+ variables.append(key: 'CI_DEPENDENCY_PROXY_USER', value: ::Gitlab::Auth::CI_JOB_USER)
+ variables.append(key: 'CI_DEPENDENCY_PROXY_PASSWORD', value: token.to_s, public: false, masked: true)
+ end
+ end
+
def features
{ trace_sections: true }
end
@@ -908,13 +928,33 @@ module Ci
end
def collect_coverage_reports!(coverage_report)
+ project_path, worktree_paths = if Feature.enabled?(:smart_cobertura_parser, project)
+ # If the flag is disabled, we intentionally pass nil
+ # for both project_path and worktree_paths to fallback
+ # to the non-smart behavior of the parser
+ [project.full_path, pipeline.all_worktree_paths]
+ end
+
each_report(Ci::JobArtifact::COVERAGE_REPORT_FILE_TYPES) do |file_type, blob|
- Gitlab::Ci::Parsers.fabricate!(file_type).parse!(blob, coverage_report)
+ Gitlab::Ci::Parsers.fabricate!(file_type).parse!(
+ blob,
+ coverage_report,
+ project_path: project_path,
+ worktree_paths: worktree_paths
+ )
end
coverage_report
end
+ def collect_codequality_reports!(codequality_report)
+ each_report(Ci::JobArtifact::CODEQUALITY_REPORT_FILE_TYPES) do |file_type, blob|
+ Gitlab::Ci::Parsers.fabricate!(file_type).parse!(blob, codequality_report)
+ end
+
+ codequality_report
+ end
+
def collect_terraform_reports!(terraform_reports)
each_report(::Ci::JobArtifact::TERRAFORM_REPORT_FILE_TYPES) do |file_type, blob, report_artifact|
::Gitlab::Ci::Parsers.fabricate!(file_type).parse!(blob, terraform_reports, artifact: report_artifact)
@@ -966,6 +1006,15 @@ module Ci
::Gitlab.com? ? 500_000 : 0
end
+ def debug_mode?
+ return false unless Feature.enabled?(:restrict_access_to_build_debug_mode, default_enabled: true)
+
+ # TODO: Have `debug_mode?` check against data on sent back from runner
+ # to capture all the ways that variables can be set.
+ # See (https://gitlab.com/gitlab-org/gitlab/-/issues/290955)
+ variables.any? { |variable| variable[:key] == 'CI_DEBUG_TRACE' && variable[:value].casecmp('true') == 0 }
+ end
+
protected
def run_status_commit_hooks!
diff --git a/app/models/ci/build_dependencies.rb b/app/models/ci/build_dependencies.rb
index 2fcd1708cf4..a6abeb517c1 100644
--- a/app/models/ci/build_dependencies.rb
+++ b/app/models/ci/build_dependencies.rb
@@ -2,6 +2,8 @@
module Ci
class BuildDependencies
+ include ::Gitlab::Utils::StrongMemoize
+
attr_reader :processable
def initialize(processable)
@@ -9,7 +11,7 @@ module Ci
end
def all
- (local + cross_pipeline).uniq
+ (local + cross_pipeline + cross_project).uniq
end
# Dependencies local to the given pipeline
@@ -23,8 +25,16 @@ module Ci
deps
end
- # Dependencies that are defined in other pipelines
+ # Dependencies from the same parent-pipeline hierarchy excluding
+ # the current job's pipeline
def cross_pipeline
+ strong_memoize(:cross_pipeline) do
+ fetch_dependencies_in_hierarchy
+ end
+ end
+
+ # Dependencies that are defined by project and ref
+ def cross_project
[]
end
@@ -33,7 +43,7 @@ module Ci
end
def valid?
- valid_local? && valid_cross_pipeline?
+ valid_local? && valid_cross_pipeline? && valid_cross_project?
end
private
@@ -44,13 +54,61 @@ module Ci
::Ci::Build
end
+ def fetch_dependencies_in_hierarchy
+ deps_specifications = specified_cross_pipeline_dependencies
+ return [] if deps_specifications.empty?
+
+ deps_specifications = expand_variables_and_validate(deps_specifications)
+ jobs_in_pipeline_hierarchy(deps_specifications)
+ end
+
+ def jobs_in_pipeline_hierarchy(deps_specifications)
+ all_pipeline_ids = []
+ all_job_names = []
+
+ deps_specifications.each do |spec|
+ all_pipeline_ids << spec[:pipeline]
+ all_job_names << spec[:job]
+ end
+
+ model_class.latest.success
+ .in_pipelines(processable.pipeline.same_family_pipeline_ids)
+ .in_pipelines(all_pipeline_ids.uniq)
+ .by_name(all_job_names.uniq)
+ .select do |dependency|
+ # the query may not return exact matches pipeline-job, so we filter
+ # them separately.
+ deps_specifications.find do |spec|
+ spec[:pipeline] == dependency.pipeline_id &&
+ spec[:job] == dependency.name
+ end
+ end
+ end
+
+ def expand_variables_and_validate(specifications)
+ specifications.map do |spec|
+ pipeline = ExpandVariables.expand(spec[:pipeline].to_s, processable_variables).to_i
+ # current pipeline is not allowed because local dependencies
+ # should be used instead.
+ next if pipeline == processable.pipeline_id
+
+ job = ExpandVariables.expand(spec[:job], processable_variables)
+
+ { job: job, pipeline: pipeline }
+ end.compact
+ end
+
+ def valid_cross_pipeline?
+ cross_pipeline.size == specified_cross_pipeline_dependencies.size
+ end
+
def valid_local?
return true if Feature.enabled?(:ci_disable_validates_dependencies)
local.all?(&:valid_dependency?)
end
- def valid_cross_pipeline?
+ def valid_cross_project?
true
end
@@ -78,6 +136,22 @@ module Ci
scope.where(name: processable.options[:dependencies])
end
+
+ def processable_variables
+ -> { processable.simple_variables_without_dependencies }
+ end
+
+ def specified_cross_pipeline_dependencies
+ strong_memoize(:specified_cross_pipeline_dependencies) do
+ next [] unless Feature.enabled?(:ci_cross_pipeline_artifacts_download, processable.project, default_enabled: true)
+
+ specified_cross_dependencies.select { |dep| dep[:pipeline] && dep[:artifacts] }
+ end
+ end
+
+ def specified_cross_dependencies
+ Array(processable.options[:cross_dependencies])
+ end
end
end
diff --git a/app/models/ci/build_trace_chunks/fog.rb b/app/models/ci/build_trace_chunks/fog.rb
index d3051e3dadc..27b579bf428 100644
--- a/app/models/ci/build_trace_chunks/fog.rb
+++ b/app/models/ci/build_trace_chunks/fog.rb
@@ -14,11 +14,15 @@ module Ci
end
def set_data(model, new_data)
- # TODO: Support AWS S3 server side encryption
- files.create({
- key: key(model),
- body: new_data
- })
+ if Feature.enabled?(:ci_live_trace_use_fog_attributes, default_enabled: true)
+ files.create(create_attributes(model, new_data))
+ else
+ # TODO: Support AWS S3 server side encryption
+ files.create({
+ key: key(model),
+ body: new_data
+ })
+ end
end
def append_data(model, new_data, offset)
@@ -57,6 +61,13 @@ module Ci
key_raw(model.build_id, model.chunk_index)
end
+ def create_attributes(model, new_data)
+ {
+ key: key(model),
+ body: new_data
+ }.merge(object_store_config.fog_attributes)
+ end
+
def key_raw(build_id, chunk_index)
"tmp/builds/#{build_id.to_i}/chunks/#{chunk_index.to_i}.log"
end
@@ -84,6 +95,14 @@ module Ci
def object_store
Gitlab.config.artifacts.object_store
end
+
+ def object_store_raw_config
+ object_store
+ end
+
+ def object_store_config
+ @object_store_config ||= ::ObjectStorage::Config.new(object_store_raw_config)
+ end
end
end
end
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 7cedd13b407..c80d50ea131 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -7,15 +7,13 @@ module Ci
include UpdateProjectStatistics
include UsageStatistics
include Sortable
- include IgnorableColumns
include Artifactable
include FileStoreMounter
extend Gitlab::Ci::Model
- ignore_columns :locked, remove_after: '2020-07-22', remove_with: '13.4'
-
TEST_REPORT_FILE_TYPES = %w[junit].freeze
COVERAGE_REPORT_FILE_TYPES = %w[cobertura].freeze
+ CODEQUALITY_REPORT_FILE_TYPES = %w[codequality].freeze
ACCESSIBILITY_REPORT_FILE_TYPES = %w[accessibility].freeze
NON_ERASABLE_FILE_TYPES = %w[trace].freeze
TERRAFORM_REPORT_FILE_TYPES = %w[terraform].freeze
@@ -157,6 +155,10 @@ module Ci
with_file_types(COVERAGE_REPORT_FILE_TYPES)
end
+ scope :codequality_reports, -> do
+ with_file_types(CODEQUALITY_REPORT_FILE_TYPES)
+ end
+
scope :terraform_reports, -> do
with_file_types(TERRAFORM_REPORT_FILE_TYPES)
end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 8707d635e03..5e5f51d776f 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -7,6 +7,7 @@ module Ci
include Importable
include AfterCommitQueue
include Presentable
+ include Gitlab::Allowable
include Gitlab::OptimisticLocking
include Gitlab::Utils::StrongMemoize
include AtomicInternalId
@@ -16,6 +17,8 @@ module Ci
include FromUnion
include UpdatedAtFilterable
+ MAX_OPEN_MERGE_REQUESTS_REFS = 4
+
PROJECT_ROUTE_AND_NAMESPACE_ROUTE = {
project: [:project_feature, :route, { namespace: :route }]
}.freeze
@@ -104,7 +107,6 @@ module Ci
accepts_nested_attributes_for :variables, reject_if: :persisted?
- delegate :id, to: :project, prefix: true
delegate :full_path, to: :project, prefix: true
validates :sha, presence: { unless: :importing? }
@@ -259,6 +261,22 @@ module Ci
end
end
+ after_transition any => any do |pipeline|
+ next unless Feature.enabled?(:jira_sync_builds, pipeline.project)
+
+ pipeline.run_after_commit do
+ # Passing the seq-id ensures this is idempotent
+ seq_id = ::Atlassian::JiraConnect::Client.generate_update_sequence_id
+ ::JiraConnect::SyncBuildsWorker.perform_async(pipeline.id, seq_id)
+ end
+ end
+
+ after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
+ pipeline.run_after_commit do
+ ::Ci::TestFailureHistoryService.new(pipeline).async.perform_if_needed # rubocop: disable CodeReuse/ServiceClass
+ end
+ end
+
after_transition any => [:success, :failed] do |pipeline|
ref_status = pipeline.ci_ref&.update_status_by!(pipeline)
@@ -277,15 +295,17 @@ module Ci
scope :internal, -> { where(source: internal_sources) }
scope :no_child, -> { where.not(source: :parent_pipeline) }
scope :ci_sources, -> { where(source: Enums::Ci::Pipeline.ci_sources.values) }
+ scope :ci_branch_sources, -> { where(source: Enums::Ci::Pipeline.ci_branch_sources.values) }
scope :ci_and_parent_sources, -> { where(source: Enums::Ci::Pipeline.ci_and_parent_sources.values) }
scope :for_user, -> (user) { where(user: user) }
scope :for_sha, -> (sha) { where(sha: sha) }
scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) }
scope :for_sha_or_source_sha, -> (sha) { for_sha(sha).or(for_source_sha(sha)) }
scope :for_ref, -> (ref) { where(ref: ref) }
+ scope :for_branch, -> (branch) { for_ref(branch).where(tag: false) }
scope :for_id, -> (id) { where(id: id) }
scope :for_iid, -> (iid) { where(iid: iid) }
- scope :for_project, -> (project) { where(project: project) }
+ scope :for_project, -> (project_id) { where(project_id: project_id) }
scope :created_after, -> (time) { where('ci_pipelines.created_at > ?', time) }
scope :created_before_id, -> (id) { where('ci_pipelines.id < ?', id) }
scope :before_pipeline, -> (pipeline) { created_before_id(pipeline.id).outside_pipeline_family(pipeline) }
@@ -310,9 +330,9 @@ module Ci
# In general, please use `Ci::PipelinesForMergeRequestFinder` instead,
# for checking permission of the actor.
scope :triggered_by_merge_request, -> (merge_request) do
- ci_sources.where(source: :merge_request_event,
- merge_request: merge_request,
- project: [merge_request.source_project, merge_request.target_project])
+ where(source: :merge_request_event,
+ merge_request: merge_request,
+ project: [merge_request.source_project, merge_request.target_project])
end
# Returns the pipelines in descending order (= newest first), optionally
@@ -774,9 +794,20 @@ module Ci
variables.append(key: 'CI_MERGE_REQUEST_EVENT_TYPE', value: merge_request_event_type.to_s)
variables.append(key: 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA', value: source_sha.to_s)
variables.append(key: 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA', value: target_sha.to_s)
+
+ diff = self.merge_request_diff
+ if diff.present?
+ variables.append(key: 'CI_MERGE_REQUEST_DIFF_ID', value: diff.id.to_s)
+ variables.append(key: 'CI_MERGE_REQUEST_DIFF_BASE_SHA', value: diff.base_commit_sha)
+ end
+
variables.concat(merge_request.predefined_variables)
end
+ if Gitlab::Ci::Features.pipeline_open_merge_requests?(project) && open_merge_requests_refs.any?
+ variables.append(key: 'CI_OPEN_MERGE_REQUESTS', value: open_merge_requests_refs.join(','))
+ end
+
variables.append(key: 'CI_KUBERNETES_ACTIVE', value: 'true') if has_kubernetes_active?
variables.append(key: 'CI_DEPLOY_FREEZE', value: 'true') if freeze_period?
@@ -824,9 +855,8 @@ module Ci
end
def execute_hooks
- data = pipeline_data
- project.execute_hooks(data, :pipeline_hooks)
- project.execute_services(data, :pipeline_hooks)
+ project.execute_hooks(pipeline_data, :pipeline_hooks) if project.has_active_hooks?(:pipeline_hooks)
+ project.execute_services(pipeline_data, :pipeline_hooks) if project.has_active_services?(:pipeline_hooks)
end
# All the merge requests for which the current pipeline runs/ran against
@@ -844,9 +874,39 @@ module Ci
all_merge_requests.order(id: :desc)
end
+ # This returns a list of MRs that point
+ # to the same source project/branch
+ def related_merge_requests
+ if merge_request?
+ # We look for all other MRs that this branch might be pointing to
+ MergeRequest.where(
+ source_project_id: merge_request.source_project_id,
+ source_branch: merge_request.source_branch)
+ else
+ MergeRequest.where(
+ source_project_id: project_id,
+ source_branch: ref)
+ end
+ end
+
+ # We cannot use `all_merge_requests`, due to race condition
+ # This returns a list of at most 4 open MRs
+ def open_merge_requests_refs
+ strong_memoize(:open_merge_requests_refs) do
+ # We ensure that triggering user can actually read the pipeline
+ related_merge_requests
+ .opened
+ .limit(MAX_OPEN_MERGE_REQUESTS_REFS)
+ .order(id: :desc)
+ .preload(:target_project)
+ .select { |mr| can?(user, :read_merge_request, mr) }
+ .map { |mr| mr.to_reference(project, full: true) }
+ end
+ end
+
def same_family_pipeline_ids
::Gitlab::Ci::PipelineObjectHierarchy.new(
- base_and_ancestors(same_project: true), options: { same_project: true }
+ self.class.where(id: root_ancestor), options: { same_project: true }
).base_and_descendants.select(:id)
end
@@ -869,6 +929,15 @@ module Ci
.base_and_descendants
end
+ def root_ancestor
+ return self unless child?
+
+ Gitlab::Ci::PipelineObjectHierarchy
+ .new(self.class.unscoped.where(id: id), options: { same_project: true })
+ .base_and_ancestors(hierarchy_order: :desc)
+ .first
+ end
+
def bridge_triggered?
source_bridge.present?
end
@@ -878,7 +947,8 @@ module Ci
end
def child?
- parent_pipeline.present?
+ parent_pipeline? && # child pipelines have `parent_pipeline` source
+ parent_pipeline.present?
end
def parent?
@@ -910,10 +980,18 @@ module Ci
builds.latest.with_reports(reports_scope)
end
+ def latest_test_report_builds
+ latest_report_builds(Ci::JobArtifact.test_reports).preload(:project)
+ end
+
def builds_with_coverage
builds.latest.with_coverage
end
+ def builds_with_failed_tests(limit: nil)
+ latest_test_report_builds.failed.limit(limit)
+ end
+
def has_reports?(reports_scope)
complete? && latest_report_builds(reports_scope).exists?
end
@@ -934,7 +1012,7 @@ module Ci
def test_reports
Gitlab::Ci::Reports::TestReports.new.tap do |test_reports|
- latest_report_builds(Ci::JobArtifact.test_reports).preload(:project).find_each do |build|
+ latest_test_report_builds.find_each do |build|
build.collect_test_reports!(test_reports)
end
end
@@ -950,12 +1028,20 @@ module Ci
def coverage_reports
Gitlab::Ci::Reports::CoverageReports.new.tap do |coverage_reports|
- latest_report_builds(Ci::JobArtifact.coverage_reports).each do |build|
+ latest_report_builds(Ci::JobArtifact.coverage_reports).includes(:project).find_each do |build|
build.collect_coverage_reports!(coverage_reports)
end
end
end
+ def codequality_reports
+ Gitlab::Ci::Reports::CodequalityReports.new.tap do |codequality_reports|
+ latest_report_builds(Ci::JobArtifact.codequality_reports).each do |build|
+ build.collect_codequality_reports!(codequality_reports)
+ end
+ end
+ end
+
def terraform_reports
::Gitlab::Ci::Reports::TerraformReports.new.tap do |terraform_reports|
latest_report_builds(::Ci::JobArtifact.terraform_reports).each do |build|
@@ -1128,7 +1214,25 @@ module Ci
end
def pipeline_data
- Gitlab::DataBuilder::Pipeline.build(self)
+ strong_memoize(:pipeline_data) do
+ Gitlab::DataBuilder::Pipeline.build(self)
+ end
+ end
+
+ def merge_request_diff_sha
+ return unless merge_request?
+
+ if merge_request_pipeline?
+ source_sha
+ else
+ sha
+ end
+ end
+
+ def merge_request_diff
+ return unless merge_request?
+
+ merge_request.merge_request_diff_for(merge_request_diff_sha)
end
def push_details
diff --git a/app/models/clusters/agent.rb b/app/models/clusters/agent.rb
index 5feb3b0a1e6..c58a3bab1a9 100644
--- a/app/models/clusters/agent.rb
+++ b/app/models/clusters/agent.rb
@@ -19,5 +19,9 @@ module Clusters
with: Gitlab::Regex.cluster_agent_name_regex,
message: Gitlab::Regex.cluster_agent_name_regex_message
}
+
+ def has_access_to?(requested_project)
+ requested_project == project
+ end
end
end
diff --git a/app/models/clusters/applications/helm.rb b/app/models/clusters/applications/helm.rb
index d1d6defb713..6f4b273a2c8 100644
--- a/app/models/clusters/applications/helm.rb
+++ b/app/models/clusters/applications/helm.rb
@@ -4,8 +4,8 @@ require 'openssl'
module Clusters
module Applications
- # DEPRECATED: This model represents the Helm 2 Tiller server, and is no longer being actively used.
- # It is being kept around for a potential cleanup of the unused Tiller server.
+ # DEPRECATED: This model represents the Helm 2 Tiller server.
+ # It is being kept around to enable the cleanup of the unused Tiller server.
class Helm < ApplicationRecord
self.table_name = 'clusters_applications_helm'
@@ -27,29 +27,11 @@ module Clusters
end
def set_initial_status
- return unless not_installable?
-
- self.status = status_states[:installable] if cluster&.platform_kubernetes_active?
- end
-
- # It can only be uninstalled if there are no other applications installed
- # or with intermitent installation statuses in the database.
- def allowed_to_uninstall?
- strong_memoize(:allowed_to_uninstall) do
- applications = nil
-
- Clusters::Cluster::APPLICATIONS.each do |application_name, klass|
- next if application_name == 'helm'
-
- extra_apps = Clusters::Applications::Helm.where('EXISTS (?)', klass.select(1).where(cluster_id: cluster_id))
-
- applications = applications ? applications.or(extra_apps) : extra_apps
- end
-
- !applications.exists?
- end
+ # The legacy Tiller server is not installable, which is the initial status of every app
end
+ # DEPRECATED: This command is only for development and testing purposes, to simulate
+ # a Helm 2 cluster with an existing Tiller server.
def install_command
Gitlab::Kubernetes::Helm::V2::InitCommand.new(
name: name,
@@ -70,13 +52,6 @@ module Clusters
ca_key.present? && ca_cert.present?
end
- def post_uninstall
- cluster.kubeclient.delete_namespace(Gitlab::Kubernetes::Helm::NAMESPACE)
- rescue Kubeclient::ResourceNotFoundError
- # we actually don't care if the namespace is not present
- # since we want to delete it anyway.
- end
-
private
def files
diff --git a/app/models/clusters/applications/runner.rb b/app/models/clusters/applications/runner.rb
index 03f4caccccd..1e41b6f4f31 100644
--- a/app/models/clusters/applications/runner.rb
+++ b/app/models/clusters/applications/runner.rb
@@ -3,7 +3,7 @@
module Clusters
module Applications
class Runner < ApplicationRecord
- VERSION = '0.22.0'
+ VERSION = '0.23.0'
self.table_name = 'clusters_applications_runners'
diff --git a/app/models/clusters/cluster.rb b/app/models/clusters/cluster.rb
index 3cf5542ae76..a34d8a6b98d 100644
--- a/app/models/clusters/cluster.rb
+++ b/app/models/clusters/cluster.rb
@@ -149,8 +149,8 @@ module Clusters
scope :for_project_namespace, -> (namespace_id) { joins(:projects).where(projects: { namespace_id: namespace_id }) }
scope :with_application_prometheus, -> { includes(:application_prometheus).joins(:application_prometheus) }
- scope :with_project_alert_service_data, -> (project_ids) do
- conditions = { projects: { alerts_service: [:data] } }
+ scope :with_project_http_integrations, -> (project_ids) do
+ conditions = { projects: :alert_management_http_integrations }
includes(conditions).joins(conditions).where(projects: { id: project_ids })
end
diff --git a/app/models/clusters/concerns/application_core.rb b/app/models/clusters/concerns/application_core.rb
index b82b1887308..ad6699daa78 100644
--- a/app/models/clusters/concerns/application_core.rb
+++ b/app/models/clusters/concerns/application_core.rb
@@ -62,6 +62,14 @@ module Clusters
end
end
+ def uninstall_command
+ helm_command_module::DeleteCommand.new(
+ name: name,
+ rbac: cluster.platform_kubernetes_rbac?,
+ files: files
+ )
+ end
+
def prepare_uninstall
# Override if your application needs any action before
# being uninstalled by Helm
diff --git a/app/models/clusters/concerns/application_data.rb b/app/models/clusters/concerns/application_data.rb
index 00aeb7669ad..a022f174faf 100644
--- a/app/models/clusters/concerns/application_data.rb
+++ b/app/models/clusters/concerns/application_data.rb
@@ -3,14 +3,6 @@
module Clusters
module Concerns
module ApplicationData
- def uninstall_command
- helm_command_module::DeleteCommand.new(
- name: name,
- rbac: cluster.platform_kubernetes_rbac?,
- files: files
- )
- end
-
def repository
nil
end
diff --git a/app/models/clusters/platforms/kubernetes.rb b/app/models/clusters/platforms/kubernetes.rb
index b85a902d58b..84de5828491 100644
--- a/app/models/clusters/platforms/kubernetes.rb
+++ b/app/models/clusters/platforms/kubernetes.rb
@@ -94,9 +94,20 @@ module Clusters
return unless enabled?
pods = read_pods(environment.deployment_namespace)
+ deployments = read_deployments(environment.deployment_namespace)
- # extract_relevant_pod_data avoids uploading all the pod info into ReactiveCaching
- { pods: extract_relevant_pod_data(pods) }
+ ingresses = if ::Feature.enabled?(:canary_ingress_weight_control, environment.project, default_enabled: true)
+ read_ingresses(environment.deployment_namespace)
+ else
+ []
+ end
+
+ # extract only the data required for display to avoid unnecessary caching
+ {
+ pods: extract_relevant_pod_data(pods),
+ deployments: extract_relevant_deployment_data(deployments),
+ ingresses: extract_relevant_ingress_data(ingresses)
+ }
end
def terminals(environment, data)
@@ -109,6 +120,25 @@ module Clusters
@kubeclient ||= build_kube_client!
end
+ def rollout_status(environment, data)
+ project = environment.project
+
+ deployments = filter_by_project_environment(data[:deployments], project.full_path_slug, environment.slug)
+ pods = filter_by_project_environment(data[:pods], project.full_path_slug, environment.slug)
+ ingresses = data[:ingresses].presence || []
+
+ ::Gitlab::Kubernetes::RolloutStatus.from_deployments(*deployments, pods_attrs: pods, ingresses: ingresses)
+ end
+
+ def ingresses(namespace)
+ ingresses = read_ingresses(namespace)
+ ingresses.map { |ingress| ::Gitlab::Kubernetes::Ingress.new(ingress) }
+ end
+
+ def patch_ingress(namespace, ingress, data)
+ kubeclient.patch_ingress(ingress.name, data, namespace)
+ end
+
private
def default_namespace(project, environment_name:)
@@ -140,6 +170,18 @@ module Clusters
[]
end
+ def read_deployments(namespace)
+ kubeclient.get_deployments(namespace: namespace).as_json
+ rescue Kubeclient::ResourceNotFoundError
+ []
+ end
+
+ def read_ingresses(namespace)
+ kubeclient.get_ingresses(namespace: namespace).as_json
+ rescue Kubeclient::ResourceNotFoundError
+ []
+ end
+
def build_kube_client!
raise "Incomplete settings" unless api_url
@@ -231,8 +273,24 @@ module Clusters
}
end
end
+
+ def extract_relevant_deployment_data(deployments)
+ deployments.map do |deployment|
+ {
+ 'metadata' => deployment.fetch('metadata', {}).slice('name', 'generation', 'labels', 'annotations'),
+ 'spec' => deployment.fetch('spec', {}).slice('replicas'),
+ 'status' => deployment.fetch('status', {}).slice('observedGeneration')
+ }
+ end
+ end
+
+ def extract_relevant_ingress_data(ingresses)
+ ingresses.map do |ingress|
+ {
+ 'metadata' => ingress.fetch('metadata', {}).slice('name', 'labels', 'annotations')
+ }
+ end
+ end
end
end
end
-
-Clusters::Platforms::Kubernetes.prepend_if_ee('EE::Clusters::Platforms::Kubernetes')
diff --git a/app/models/commit_collection.rb b/app/models/commit_collection.rb
index 07c49ed48e6..a3ee8e4f364 100644
--- a/app/models/commit_collection.rb
+++ b/app/models/commit_collection.rb
@@ -86,9 +86,9 @@ class CommitCollection
# Batch load full Commits from the repository
# and map to a Hash of id => Commit
- replacements = Hash[unenriched.map do |c|
- [c.id, Commit.lazy(container, c.id)]
- end.compact]
+ replacements = unenriched.each_with_object({}) do |c, result|
+ result[c.id] = Commit.lazy(container, c.id)
+ end.compact
# Replace the commits, keeping the same order
@commits = @commits.map do |original_commit|
diff --git a/app/models/concerns/cache_markdown_field.rb b/app/models/concerns/cache_markdown_field.rb
index 49fc780f372..45944401c2d 100644
--- a/app/models/concerns/cache_markdown_field.rb
+++ b/app/models/concerns/cache_markdown_field.rb
@@ -70,8 +70,12 @@ module CacheMarkdownField
def refresh_markdown_cache!
updates = refresh_markdown_cache
-
- save_markdown(updates)
+ if updates.present? && save_markdown(updates)
+ # save_markdown updates DB columns directly, so compute and save mentions
+ # by calling store_mentions! or we end-up with missing mentions although those
+ # would appear in the notes, descriptions, etc in the UI
+ store_mentions! if mentionable_attributes_changed?(updates)
+ end
end
def cached_html_up_to_date?(markdown_field)
@@ -106,7 +110,19 @@ module CacheMarkdownField
def updated_cached_html_for(markdown_field)
return unless cached_markdown_fields.markdown_fields.include?(markdown_field)
- refresh_markdown_cache! if attribute_invalidated?(cached_markdown_fields.html_field(markdown_field))
+ if attribute_invalidated?(cached_markdown_fields.html_field(markdown_field))
+ # Invalidated due to Markdown content change
+ # We should not persist the updated HTML here since this will depend on whether the
+ # Markdown content change will be persisted. Both will be persisted together when the model is saved.
+ if changed_attributes.key?(markdown_field)
+ refresh_markdown_cache
+ else
+ # Invalidated due to stale HTML cache
+ # This could happen when the Markdown cache version is bumped or when a model is imported and the HTML is empty.
+ # We persist the updated HTML here so that subsequent calls to this method do not have to regenerate the HTML again.
+ refresh_markdown_cache!
+ end
+ end
cached_html_for(markdown_field)
end
@@ -140,6 +156,46 @@ module CacheMarkdownField
nil
end
+ def store_mentions!
+ refs = all_references(self.author)
+
+ references = {}
+ references[:mentioned_users_ids] = refs.mentioned_users&.pluck(:id).presence
+ references[:mentioned_groups_ids] = refs.mentioned_groups&.pluck(:id).presence
+ references[:mentioned_projects_ids] = refs.mentioned_projects&.pluck(:id).presence
+
+ # One retry is enough as next time `model_user_mention` should return the existing mention record,
+ # that threw the `ActiveRecord::RecordNotUnique` exception in first place.
+ self.class.safe_ensure_unique(retries: 1) do
+ user_mention = model_user_mention
+
+ # this may happen due to notes polymorphism, so noteable_id may point to a record
+ # that no longer exists as we cannot have FK on noteable_id
+ break if user_mention.blank?
+
+ user_mention.mentioned_users_ids = references[:mentioned_users_ids]
+ user_mention.mentioned_groups_ids = references[:mentioned_groups_ids]
+ user_mention.mentioned_projects_ids = references[:mentioned_projects_ids]
+
+ if user_mention.has_mentions?
+ user_mention.save!
+ else
+ user_mention.destroy!
+ end
+ end
+
+ true
+ end
+
+ def mentionable_attributes_changed?(changes = saved_changes)
+ return false unless is_a?(Mentionable)
+
+ self.class.mentionable_attrs.any? do |attr|
+ changes.key?(cached_markdown_fields.html_field(attr.first)) &&
+ changes.fetch(cached_markdown_fields.html_field(attr.first)).last.present?
+ end
+ end
+
included do
cattr_reader :cached_markdown_fields do
Gitlab::MarkdownCache::FieldData.new
diff --git a/app/models/concerns/can_move_repository_storage.rb b/app/models/concerns/can_move_repository_storage.rb
new file mode 100644
index 00000000000..52c3a4106e3
--- /dev/null
+++ b/app/models/concerns/can_move_repository_storage.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module CanMoveRepositoryStorage
+ extend ActiveSupport::Concern
+
+ RepositoryReadOnlyError = Class.new(StandardError)
+
+ # Tries to set repository as read_only, checking for existing Git transfers in
+ # progress beforehand. Setting a repository read-only will fail if it is
+ # already in that state.
+ #
+ # @return nil. Failures will raise an exception
+ def set_repository_read_only!(skip_git_transfer_check: false)
+ with_lock do
+ raise RepositoryReadOnlyError, _('Git transfer in progress') if
+ !skip_git_transfer_check && git_transfer_in_progress?
+
+ raise RepositoryReadOnlyError, _('Repository already read-only') if
+ self.class.where(id: id).pick(:repository_read_only)
+
+ raise ActiveRecord::RecordNotSaved, _('Database update failed') unless
+ update_column(:repository_read_only, true)
+
+ nil
+ end
+ end
+
+ # Set repository as writable again. Unlike setting it read-only, this will
+ # succeed if the repository is already writable.
+ def set_repository_writable!
+ with_lock do
+ raise ActiveRecord::RecordNotSaved, _('Database update failed') unless
+ update_column(:repository_read_only, false)
+
+ nil
+ end
+ end
+
+ def git_transfer_in_progress?
+ reference_counter(type: repository.repo_type).value > 0
+ end
+
+ def reference_counter(type:)
+ Gitlab::ReferenceCounter.new(type.identifier_for_container(self))
+ end
+end
diff --git a/app/models/concerns/case_sensitivity.rb b/app/models/concerns/case_sensitivity.rb
index abddbf1c7e3..31b5afd604d 100644
--- a/app/models/concerns/case_sensitivity.rb
+++ b/app/models/concerns/case_sensitivity.rb
@@ -11,12 +11,14 @@ module CaseSensitivity
def iwhere(params)
criteria = self
- params.each do |key, value|
+ params.each do |column, value|
+ column = arel_table[column] unless column.is_a?(Arel::Attribute)
+
criteria = case value
when Array
- criteria.where(value_in(key, value))
+ criteria.where(value_in(column, value))
else
- criteria.where(value_equal(key, value))
+ criteria.where(value_equal(column, value))
end
end
@@ -28,7 +30,7 @@ module CaseSensitivity
def value_equal(column, value)
lower_value = lower_value(value)
- lower_column(arel_table[column]).eq(lower_value).to_sql
+ lower_column(column).eq(lower_value).to_sql
end
def value_in(column, values)
@@ -36,7 +38,7 @@ module CaseSensitivity
lower_value(value)
end
- lower_column(arel_table[column]).in(lower_values).to_sql
+ lower_column(column).in(lower_values).to_sql
end
def lower_value(value)
diff --git a/app/models/concerns/enums/ci/pipeline.rb b/app/models/concerns/enums/ci/pipeline.rb
index bb8df37f649..e1f07fa162c 100644
--- a/app/models/concerns/enums/ci/pipeline.rb
+++ b/app/models/concerns/enums/ci/pipeline.rb
@@ -9,7 +9,8 @@ module Enums
{
unknown_failure: 0,
config_error: 1,
- external_validation_failure: 2
+ external_validation_failure: 2,
+ deployments_limit_exceeded: 23
}
end
@@ -24,8 +25,6 @@ module Enums
schedule: 4,
api: 5,
external: 6,
- # TODO: Rename `pipeline` to `cross_project_pipeline` in 13.0
- # https://gitlab.com/gitlab-org/gitlab/issues/195991
pipeline: 7,
chat: 8,
webide: 9,
@@ -53,6 +52,10 @@ module Enums
sources.except(*dangling_sources.keys)
end
+ def self.ci_branch_sources
+ ci_sources.except(:merge_request_event)
+ end
+
def self.ci_and_parent_sources
ci_sources.merge(sources.slice(:parent_pipeline))
end
diff --git a/app/models/concerns/enums/data_visualization_palette.rb b/app/models/concerns/enums/data_visualization_palette.rb
new file mode 100644
index 00000000000..25002e64ba6
--- /dev/null
+++ b/app/models/concerns/enums/data_visualization_palette.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Enums
+ # These color palettes are part of the Pajamas Design System.
+ # See https://design.gitlab.com/data-visualization/color/#categorical-data
+ module DataVisualizationPalette
+ def self.colors
+ {
+ blue: 0,
+ orange: 1,
+ aqua: 2,
+ green: 3,
+ magenta: 4
+ }
+ end
+
+ def self.weights
+ {
+ '50' => 0,
+ '100' => 1,
+ '200' => 2,
+ '300' => 3,
+ '400' => 4,
+ '500' => 5,
+ '600' => 6,
+ '700' => 7,
+ '800' => 8,
+ '900' => 9,
+ '950' => 10
+ }
+ end
+ end
+end
diff --git a/app/models/concerns/enums/internal_id.rb b/app/models/concerns/enums/internal_id.rb
index f01bd60ef16..b08c05b1934 100644
--- a/app/models/concerns/enums/internal_id.rb
+++ b/app/models/concerns/enums/internal_id.rb
@@ -15,7 +15,8 @@ module Enums
operations_user_lists: 7,
alert_management_alerts: 8,
sprints: 9, # iterations
- design_management_designs: 10
+ design_management_designs: 10,
+ incident_management_oncall_schedules: 11
}
end
end
diff --git a/app/models/concerns/has_repository.rb b/app/models/concerns/has_repository.rb
index 3dea4a9f5fb..9692941d8b2 100644
--- a/app/models/concerns/has_repository.rb
+++ b/app/models/concerns/has_repository.rb
@@ -88,7 +88,7 @@ module HasRepository
group_branch_default_name = group&.default_branch_name if respond_to?(:group)
- group_branch_default_name || Gitlab::CurrentSettings.default_branch_name
+ (group_branch_default_name || Gitlab::CurrentSettings.default_branch_name).presence
end
def reload_default_branch
diff --git a/app/models/concerns/has_wiki_page_meta_attributes.rb b/app/models/concerns/has_wiki_page_meta_attributes.rb
new file mode 100644
index 00000000000..136f2d00ce3
--- /dev/null
+++ b/app/models/concerns/has_wiki_page_meta_attributes.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+module HasWikiPageMetaAttributes
+ extend ActiveSupport::Concern
+ include Gitlab::Utils::StrongMemoize
+
+ CanonicalSlugConflictError = Class.new(ActiveRecord::RecordInvalid)
+ WikiPageInvalid = Class.new(ArgumentError)
+
+ included do
+ has_many :events, as: :target, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
+
+ validates :title, length: { maximum: 255 }, allow_nil: false
+ validate :no_two_metarecords_in_same_container_can_have_same_canonical_slug
+
+ scope :with_canonical_slug, ->(slug) do
+ slug_table_name = klass.reflect_on_association(:slugs).table_name
+
+ joins(:slugs).where(slug_table_name => { canonical: true, slug: slug })
+ end
+ end
+
+ class_methods do
+ # Return the (updated) WikiPage::Meta record for a given wiki page
+ #
+ # If none is found, then a new record is created, and its fields are set
+ # to reflect the wiki_page passed.
+ #
+ # @param [String] last_known_slug
+ # @param [WikiPage] wiki_page
+ #
+ # This method raises errors on validation issues.
+ def find_or_create(last_known_slug, wiki_page)
+ raise WikiPageInvalid unless wiki_page.valid?
+
+ container = wiki_page.wiki.container
+ known_slugs = [last_known_slug, wiki_page.slug].compact.uniq
+ raise 'No slugs found! This should not be possible.' if known_slugs.empty?
+
+ transaction do
+ updates = wiki_page_updates(wiki_page)
+ found = find_by_canonical_slug(known_slugs, container)
+ meta = found || create!(updates.merge(container_attrs(container)))
+
+ meta.update_state(found.nil?, known_slugs, wiki_page, updates)
+
+ # We don't need to run validations here, since find_by_canonical_slug
+ # guarantees that there is no conflict in canonical_slug, and DB
+ # constraints on title and project_id/group_id enforce our other invariants
+ # This saves us a query.
+ meta
+ end
+ end
+
+ def find_by_canonical_slug(canonical_slug, container)
+ meta, conflict = with_canonical_slug(canonical_slug)
+ .where(container_attrs(container))
+ .limit(2)
+
+ if conflict.present?
+ meta.errors.add(:canonical_slug, 'Duplicate value found')
+ raise CanonicalSlugConflictError.new(meta)
+ end
+
+ meta
+ end
+
+ private
+
+ def wiki_page_updates(wiki_page)
+ last_commit_date = wiki_page.version_commit_timestamp || Time.now.utc
+
+ {
+ title: wiki_page.title,
+ created_at: last_commit_date,
+ updated_at: last_commit_date
+ }
+ end
+
+ def container_key
+ raise NotImplementedError
+ end
+
+ def container_attrs(container)
+ { container_key => container.id }
+ end
+ end
+
+ def canonical_slug
+ strong_memoize(:canonical_slug) { slugs.canonical.take&.slug }
+ end
+
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def canonical_slug=(slug)
+ return if @canonical_slug == slug
+
+ if persisted?
+ transaction do
+ slugs.canonical.update_all(canonical: false)
+ page_slug = slugs.create_with(canonical: true).find_or_create_by(slug: slug)
+ page_slug.update_columns(canonical: true) unless page_slug.canonical?
+ end
+ else
+ slugs.new(slug: slug, canonical: true)
+ end
+
+ @canonical_slug = slug
+ end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
+
+ def update_state(created, known_slugs, wiki_page, updates)
+ update_wiki_page_attributes(updates)
+ insert_slugs(known_slugs, created, wiki_page.slug)
+ self.canonical_slug = wiki_page.slug
+ end
+
+ private
+
+ def update_wiki_page_attributes(updates)
+ # Remove all unnecessary updates:
+ updates.delete(:updated_at) if updated_at == updates[:updated_at]
+ updates.delete(:created_at) if created_at <= updates[:created_at]
+ updates.delete(:title) if title == updates[:title]
+
+ update_columns(updates) unless updates.empty?
+ end
+
+ def insert_slugs(strings, is_new, canonical_slug)
+ creation = Time.current.utc
+
+ slug_attrs = strings.map do |slug|
+ slug_attributes(slug, canonical_slug, is_new, creation)
+ end
+ slugs.insert_all(slug_attrs) unless !is_new && slug_attrs.size == 1
+
+ @canonical_slug = canonical_slug if is_new || strings.size == 1 # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+
+ def slug_attributes(slug, canonical_slug, is_new, creation)
+ {
+ slug: slug,
+ canonical: (is_new && slug == canonical_slug),
+ created_at: creation,
+ updated_at: creation
+ }.merge(slug_meta_attributes)
+ end
+
+ def slug_meta_attributes
+ { self.association(:slugs).reflection.foreign_key => id }
+ end
+
+ def no_two_metarecords_in_same_container_can_have_same_canonical_slug
+ container_id = attributes[self.class.container_key.to_s]
+
+ return unless container_id.present? && canonical_slug.present?
+
+ offending = self.class.with_canonical_slug(canonical_slug).where(self.class.container_key => container_id)
+ offending = offending.where.not(id: id) if persisted?
+
+ if offending.exists?
+ errors.add(:canonical_slug, 'each page in a wiki must have a distinct canonical slug')
+ end
+ end
+end
diff --git a/app/models/concerns/has_wiki_page_slug_attributes.rb b/app/models/concerns/has_wiki_page_slug_attributes.rb
new file mode 100644
index 00000000000..3335eccbaf6
--- /dev/null
+++ b/app/models/concerns/has_wiki_page_slug_attributes.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module HasWikiPageSlugAttributes
+ extend ActiveSupport::Concern
+
+ included do
+ validates :slug, uniqueness: { scope: meta_foreign_key }
+ validates :slug, length: { maximum: 2048 }, allow_nil: false
+ validates :canonical, uniqueness: {
+ scope: meta_foreign_key,
+ if: :canonical?,
+ message: 'Only one slug can be canonical per wiki metadata record'
+ }
+
+ scope :canonical, -> { where(canonical: true) }
+
+ def update_columns(attrs = {})
+ super(attrs.reverse_merge(updated_at: Time.current.utc))
+ end
+ end
+
+ def self.update_all(attrs = {})
+ super(attrs.reverse_merge(updated_at: Time.current.utc))
+ end
+end
diff --git a/app/models/concerns/ignorable_columns.rb b/app/models/concerns/ignorable_columns.rb
index 744a1f0b5f3..4cbcb25406d 100644
--- a/app/models/concerns/ignorable_columns.rb
+++ b/app/models/concerns/ignorable_columns.rb
@@ -31,15 +31,13 @@ module IgnorableColumns
alias_method :ignore_column, :ignore_columns
def ignored_columns_details
- unless defined?(@ignored_columns_details)
- IGNORE_COLUMN_MUTEX.synchronize do
- @ignored_columns_details ||= superclass.try(:ignored_columns_details)&.dup || {}
- end
- end
+ return @ignored_columns_details if defined?(@ignored_columns_details)
- @ignored_columns_details
+ IGNORE_COLUMN_MONITOR.synchronize do
+ @ignored_columns_details ||= superclass.try(:ignored_columns_details)&.dup || {}
+ end
end
- IGNORE_COLUMN_MUTEX = Mutex.new
+ IGNORE_COLUMN_MONITOR = Monitor.new
end
end
diff --git a/app/models/concerns/issuable.rb b/app/models/concerns/issuable.rb
index 7624a1a4e80..c3a394c1ca5 100644
--- a/app/models/concerns/issuable.rb
+++ b/app/models/concerns/issuable.rb
@@ -84,7 +84,6 @@ module Issuable
validate :description_max_length_for_new_records_is_valid, on: :update
before_validation :truncate_description_on_import!
- after_save :store_mentions!, if: :any_mentionable_attributes_changed?
scope :authored, ->(user) { where(author_id: user) }
scope :recent, -> { reorder(id: :desc) }
@@ -198,7 +197,7 @@ module Issuable
end
def severity
- return IssuableSeverity::DEFAULT unless incident?
+ return IssuableSeverity::DEFAULT unless supports_severity?
issuable_severity&.severity || IssuableSeverity::DEFAULT
end
@@ -305,14 +304,12 @@ module Issuable
end
def order_labels_priority(direction = 'ASC', excluded_labels: [], extra_select_columns: [], with_cte: false)
- params = {
+ highest_priority = highest_label_priority(
target_type: name,
target_column: "#{table_name}.id",
project_column: "#{table_name}.#{project_foreign_key}",
excluded_labels: excluded_labels
- }
-
- highest_priority = highest_label_priority(params).to_sql
+ ).to_sql
# When using CTE make sure to select the same columns that are on the group_by clause.
# This prevents errors when ignored columns are present in the database.
diff --git a/app/models/concerns/mentionable.rb b/app/models/concerns/mentionable.rb
index b10e8547e86..5db077c178d 100644
--- a/app/models/concerns/mentionable.rb
+++ b/app/models/concerns/mentionable.rb
@@ -80,37 +80,6 @@ module Mentionable
all_references(current_user).users
end
- def store_mentions!
- refs = all_references(self.author)
-
- references = {}
- references[:mentioned_users_ids] = refs.mentioned_users&.pluck(:id).presence
- references[:mentioned_groups_ids] = refs.mentioned_groups&.pluck(:id).presence
- references[:mentioned_projects_ids] = refs.mentioned_projects&.pluck(:id).presence
-
- # One retry should be enough as next time `model_user_mention` should return the existing mention record, that
- # threw the `ActiveRecord::RecordNotUnique` exception in first place.
- self.class.safe_ensure_unique(retries: 1) do
- user_mention = model_user_mention
-
- # this may happen due to notes polymorphism, so noteable_id may point to a record that no longer exists
- # as we cannot have FK on noteable_id
- break if user_mention.blank?
-
- user_mention.mentioned_users_ids = references[:mentioned_users_ids]
- user_mention.mentioned_groups_ids = references[:mentioned_groups_ids]
- user_mention.mentioned_projects_ids = references[:mentioned_projects_ids]
-
- if user_mention.has_mentions?
- user_mention.save!
- else
- user_mention.destroy!
- end
- end
-
- true
- end
-
def referenced_users
User.where(id: user_mentions.select("unnest(mentioned_users_ids)"))
end
@@ -216,12 +185,6 @@ module Mentionable
source.select { |key, val| mentionable.include?(key) }
end
- def any_mentionable_attributes_changed?
- self.class.mentionable_attrs.any? do |attr|
- saved_changes.key?(attr.first)
- end
- end
-
# Determine whether or not a cross-reference Note has already been created between this Mentionable and
# the specified target.
def cross_reference_exists?(target)
@@ -237,12 +200,12 @@ module Mentionable
end
# User mention that is parsed from model description rather then its related notes.
- # Models that have a descriprion attribute like Issue, MergeRequest, Epic, Snippet may have such a user mention.
+ # Models that have a description attribute like Issue, MergeRequest, Epic, Snippet may have such a user mention.
# Other mentionable models like Commit, DesignManagement::Design, will never have such record as those do not have
# a description attribute.
#
# Using this method followed by a call to *save* may result in *ActiveRecord::RecordNotUnique* exception
- # in a multithreaded environment. Make sure to use it within a *safe_ensure_unique* block.
+ # in a multi-threaded environment. Make sure to use it within a *safe_ensure_unique* block.
def model_user_mention
user_mentions.where(note_id: nil).first_or_initialize
end
diff --git a/app/models/concerns/optimized_issuable_label_filter.rb b/app/models/concerns/optimized_issuable_label_filter.rb
index 7be4a26d4fa..82055822cfb 100644
--- a/app/models/concerns/optimized_issuable_label_filter.rb
+++ b/app/models/concerns/optimized_issuable_label_filter.rb
@@ -1,6 +1,15 @@
# frozen_string_literal: true
module OptimizedIssuableLabelFilter
+ extend ActiveSupport::Concern
+
+ prepended do
+ extend Gitlab::Cache::RequestCache
+
+ # Avoid repeating label queries times when the finder is instantiated multiple times during the request.
+ request_cache(:find_label_ids) { [root_namespace.id, params.label_names] }
+ end
+
def by_label(items)
return items unless params.labels?
@@ -41,7 +50,7 @@ module OptimizedIssuableLabelFilter
def issuables_with_selected_labels(items, target_model)
if root_namespace
- all_label_ids = find_label_ids(root_namespace)
+ all_label_ids = find_label_ids
# Found less labels in the DB than we were searching for. Return nothing.
return items.none if all_label_ids.size != params.label_names.size
@@ -57,18 +66,20 @@ module OptimizedIssuableLabelFilter
items
end
- def find_label_ids(root_namespace)
- finder_params = {
- include_subgroups: true,
- include_ancestor_groups: true,
- include_descendant_groups: true,
- group: root_namespace,
- title: params.label_names
- }
-
- LabelsFinder
- .new(nil, finder_params)
- .execute(skip_authorization: true)
+ def find_label_ids
+ group_labels = Label
+ .where(project_id: nil)
+ .where(title: params.label_names)
+ .where(group_id: root_namespace.self_and_descendants.select(:id))
+
+ project_labels = Label
+ .where(group_id: nil)
+ .where(title: params.label_names)
+ .where(project_id: Project.select(:id).where(namespace_id: root_namespace.self_and_descendants.select(:id)))
+
+ Label
+ .from_union([group_labels, project_labels], remove_duplicates: false)
+ .reorder(nil)
.pluck(:title, :id)
.group_by(&:first)
.values
diff --git a/app/models/concerns/project_features_compatibility.rb b/app/models/concerns/project_features_compatibility.rb
index b69fb2931c3..07bec07e556 100644
--- a/app/models/concerns/project_features_compatibility.rb
+++ b/app/models/concerns/project_features_compatibility.rb
@@ -70,6 +70,14 @@ module ProjectFeaturesCompatibility
write_feature_attribute_string(:metrics_dashboard_access_level, value)
end
+ def analytics_access_level=(value)
+ write_feature_attribute_string(:analytics_access_level, value)
+ end
+
+ def operations_access_level=(value)
+ write_feature_attribute_string(:operations_access_level, value)
+ end
+
private
def write_feature_attribute_boolean(field, value)
diff --git a/app/models/concerns/protected_ref.rb b/app/models/concerns/protected_ref.rb
index cddca72f91f..65195a8d5aa 100644
--- a/app/models/concerns/protected_ref.rb
+++ b/app/models/concerns/protected_ref.rb
@@ -12,6 +12,10 @@ module ProtectedRef
delegate :matching, :matches?, :wildcard?, to: :ref_matcher
scope :for_project, ->(project) { where(project: project) }
+
+ def allow_multiple?(type)
+ false
+ end
end
def commit
@@ -29,7 +33,7 @@ module ProtectedRef
# to fail.
has_many :"#{type}_access_levels", inverse_of: self.model_name.singular
- validates :"#{type}_access_levels", length: { is: 1, message: "are restricted to a single instance per #{self.model_name.human}." }
+ validates :"#{type}_access_levels", length: { is: 1, message: "are restricted to a single instance per #{self.model_name.human}." }, unless: -> { allow_multiple?(type) }
accepts_nested_attributes_for :"#{type}_access_levels", allow_destroy: true
end
diff --git a/app/models/concerns/protected_ref_access.rb b/app/models/concerns/protected_ref_access.rb
index 28dc3366e51..5e38ce7cad8 100644
--- a/app/models/concerns/protected_ref_access.rb
+++ b/app/models/concerns/protected_ref_access.rb
@@ -45,6 +45,7 @@ module ProtectedRefAccess
end
def check_access(user)
+ return false unless user
return true if user.admin?
user.can?(:push_code, project) &&
diff --git a/app/models/concerns/reactive_caching.rb b/app/models/concerns/reactive_caching.rb
index 3470bdab5fb..dbc70ac2218 100644
--- a/app/models/concerns/reactive_caching.rb
+++ b/app/models/concerns/reactive_caching.rb
@@ -1,7 +1,8 @@
# frozen_string_literal: true
# The usage of the ReactiveCaching module is documented here:
-# https://docs.gitlab.com/ee/development/reactive_caching.md
+# https://docs.gitlab.com/ee/development/reactive_caching.html
+#
module ReactiveCaching
extend ActiveSupport::Concern
diff --git a/app/models/concerns/repository_storage_movable.rb b/app/models/concerns/repository_storage_movable.rb
new file mode 100644
index 00000000000..a45b4626628
--- /dev/null
+++ b/app/models/concerns/repository_storage_movable.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+module RepositoryStorageMovable
+ extend ActiveSupport::Concern
+ include AfterCommitQueue
+
+ included do
+ scope :order_created_at_desc, -> { order(created_at: :desc) }
+
+ validates :container, presence: true
+ validates :state, presence: true
+ validates :source_storage_name,
+ on: :create,
+ presence: true,
+ inclusion: { in: ->(_) { Gitlab.config.repositories.storages.keys } }
+ validates :destination_storage_name,
+ on: :create,
+ presence: true,
+ inclusion: { in: ->(_) { Gitlab.config.repositories.storages.keys } }
+ validate :container_repository_writable, on: :create
+
+ default_value_for(:destination_storage_name, allows_nil: false) do
+ pick_repository_storage
+ end
+
+ state_machine initial: :initial do
+ event :schedule do
+ transition initial: :scheduled
+ end
+
+ event :start do
+ transition scheduled: :started
+ end
+
+ event :finish_replication do
+ transition started: :replicated
+ end
+
+ event :finish_cleanup do
+ transition replicated: :finished
+ end
+
+ event :do_fail do
+ transition [:initial, :scheduled, :started] => :failed
+ transition replicated: :cleanup_failed
+ end
+
+ around_transition initial: :scheduled do |storage_move, block|
+ block.call
+
+ begin
+ storage_move.container.set_repository_read_only!(skip_git_transfer_check: true)
+ rescue => err
+ storage_move.add_error(err.message)
+ next false
+ end
+
+ storage_move.run_after_commit do
+ storage_move.schedule_repository_storage_update_worker
+ end
+
+ true
+ end
+
+ before_transition started: :replicated do |storage_move|
+ storage_move.container.set_repository_writable!
+
+ storage_move.update_repository_storage(storage_move.destination_storage_name)
+ end
+
+ before_transition started: :failed do |storage_move|
+ storage_move.container.set_repository_writable!
+ end
+
+ state :initial, value: 1
+ state :scheduled, value: 2
+ state :started, value: 3
+ state :finished, value: 4
+ state :failed, value: 5
+ state :replicated, value: 6
+ state :cleanup_failed, value: 7
+ end
+ end
+
+ class_methods do
+ private
+
+ def pick_repository_storage
+ container_klass = reflect_on_association(:container).class_name.constantize
+
+ container_klass.pick_repository_storage
+ end
+ end
+
+ # Projects, snippets, and group wikis has different db structure. In projects,
+ # we need to update some columns in this step, but we don't with the other resources.
+ #
+ # Therefore, we create this No-op method for snippets and wikis and let project
+ # overwrite it in their implementation.
+ def update_repository_storage(new_storage)
+ # No-op
+ end
+
+ def schedule_repository_storage_update_worker
+ raise NotImplementedError
+ end
+
+ def add_error(message)
+ errors.add(error_key, message)
+ end
+
+ private
+
+ def container_repository_writable
+ add_error(_('is read only')) if container&.repository_read_only?
+ end
+
+ def error_key
+ raise NotImplementedError
+ end
+end
diff --git a/app/models/concerns/routable.rb b/app/models/concerns/routable.rb
index c70ce9bebcc..71d8e06de76 100644
--- a/app/models/concerns/routable.rb
+++ b/app/models/concerns/routable.rb
@@ -4,6 +4,36 @@
# Object must have name and path db fields and respond to parent and parent_changed? methods.
module Routable
extend ActiveSupport::Concern
+ include CaseSensitivity
+
+ # Finds a Routable object by its full path, without knowing the class.
+ #
+ # Usage:
+ #
+ # Routable.find_by_full_path('groupname') # -> Group
+ # Routable.find_by_full_path('groupname/projectname') # -> Project
+ #
+ # Returns a single object, or nil.
+ def self.find_by_full_path(path, follow_redirects: false, route_scope: Route, redirect_route_scope: RedirectRoute)
+ return unless path.present?
+
+ # Case sensitive match first (it's cheaper and the usual case)
+ # If we didn't have an exact match, we perform a case insensitive search
+ #
+ # We need to qualify the columns with the table name, to support both direct lookups on
+ # Route/RedirectRoute, and scoped lookups through the Routable classes.
+ route =
+ route_scope.find_by(routes: { path: path }) ||
+ route_scope.iwhere(Route.arel_table[:path] => path).take
+
+ if follow_redirects
+ route ||= redirect_route_scope.iwhere(RedirectRoute.arel_table[:path] => path).take
+ end
+
+ return unless route
+
+ route.is_a?(Routable) ? route : route.source
+ end
included do
# Remove `inverse_of: source` when upgraded to rails 5.2
@@ -30,15 +60,14 @@ module Routable
#
# Returns a single object, or nil.
def find_by_full_path(path, follow_redirects: false)
- # Case sensitive match first (it's cheaper and the usual case)
- # If we didn't have an exact match, we perform a case insensitive search
- found = includes(:route).find_by(routes: { path: path }) || where_full_path_in([path]).take
-
- return found if found
-
- if follow_redirects
- joins(:redirect_routes).find_by("LOWER(redirect_routes.path) = LOWER(?)", path)
- end
+ # TODO: Optimize these queries by avoiding joins
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/292252
+ Routable.find_by_full_path(
+ path,
+ follow_redirects: follow_redirects,
+ route_scope: includes(:route).references(:routes),
+ redirect_route_scope: joins(:redirect_routes)
+ )
end
# Builds a relation to find multiple objects by their full paths.
diff --git a/app/models/concerns/shardable.rb b/app/models/concerns/shardable.rb
index c0883c08289..4bebb99d195 100644
--- a/app/models/concerns/shardable.rb
+++ b/app/models/concerns/shardable.rb
@@ -8,6 +8,7 @@ module Shardable
scope :for_repository_storage, -> (repository_storage) { joins(:shard).where(shards: { name: repository_storage }) }
scope :excluding_repository_storage, -> (repository_storage) { joins(:shard).where.not(shards: { name: repository_storage }) }
+ scope :for_shard, -> (shard) { where(shard_id: shard) }
validates :shard, presence: true
end
diff --git a/app/models/concerns/timebox.rb b/app/models/concerns/timebox.rb
index 23fd73f2904..8273059b30c 100644
--- a/app/models/concerns/timebox.rb
+++ b/app/models/concerns/timebox.rb
@@ -12,10 +12,16 @@ module Timebox
include FromUnion
TimeboxStruct = Struct.new(:title, :name, :id) do
+ include GlobalID::Identification
+
# Ensure these models match the interface required for exporting
def serializable_hash(_opts = {})
{ title: title, name: name, id: id }
end
+
+ def self.declarative_policy_class
+ "TimeboxPolicy"
+ end
end
# Represents a "No Timebox" state used for filtering Issues and Merge
diff --git a/app/models/concerns/token_authenticatable.rb b/app/models/concerns/token_authenticatable.rb
index a1f83884f02..535cf25eb9d 100644
--- a/app/models/concerns/token_authenticatable.rb
+++ b/app/models/concerns/token_authenticatable.rb
@@ -57,6 +57,13 @@ module TokenAuthenticatable
token = read_attribute(token_field)
token.present? && ActiveSupport::SecurityUtils.secure_compare(other_token, token)
end
+
+ # Base strategy delegates to this method for formatting a token before
+ # calling set_token. Can be overridden in models to e.g. add a prefix
+ # to the tokens
+ mod.define_method("format_#{token_field}") do |token|
+ token
+ end
end
def token_authenticatable_module
diff --git a/app/models/concerns/token_authenticatable_strategies/base.rb b/app/models/concerns/token_authenticatable_strategies/base.rb
index aafd0b538a3..f72a41f06b1 100644
--- a/app/models/concerns/token_authenticatable_strategies/base.rb
+++ b/app/models/concerns/token_authenticatable_strategies/base.rb
@@ -18,10 +18,15 @@ module TokenAuthenticatableStrategies
raise NotImplementedError
end
- def set_token(instance)
+ def set_token(instance, token)
raise NotImplementedError
end
+ # Default implementation returns the token as-is
+ def format_token(instance, token)
+ instance.send("format_#{@token_field}", token) # rubocop:disable GitlabSecurity/PublicSend
+ end
+
def ensure_token(instance)
write_new_token(instance) unless token_set?(instance)
get_token(instance)
@@ -57,7 +62,8 @@ module TokenAuthenticatableStrategies
def write_new_token(instance)
new_token = generate_available_token
- set_token(instance, new_token)
+ formatted_token = format_token(instance, new_token)
+ set_token(instance, formatted_token)
end
def unique
diff --git a/app/models/concerns/triggerable_hooks.rb b/app/models/concerns/triggerable_hooks.rb
index 325a5531926..473b430bb04 100644
--- a/app/models/concerns/triggerable_hooks.rb
+++ b/app/models/concerns/triggerable_hooks.rb
@@ -15,14 +15,13 @@ module TriggerableHooks
wiki_page_hooks: :wiki_page_events,
deployment_hooks: :deployment_events,
feature_flag_hooks: :feature_flag_events,
- release_hooks: :releases_events
+ release_hooks: :releases_events,
+ member_hooks: :member_events
}.freeze
extend ActiveSupport::Concern
class_methods do
- attr_reader :triggerable_hooks
-
attr_reader :triggers
def hooks_for(trigger)
diff --git a/app/models/container_repository.rb b/app/models/container_repository.rb
index 4adbd37608f..0d7ce966537 100644
--- a/app/models/container_repository.rb
+++ b/app/models/container_repository.rb
@@ -25,8 +25,7 @@ class ContainerRepository < ApplicationRecord
.with_container_registry
.select(:id)
- ContainerRepository
- .joins("INNER JOIN (#{project_scope.to_sql}) projects on projects.id=container_repositories.project_id")
+ joins("INNER JOIN (#{project_scope.to_sql}) projects on projects.id=container_repositories.project_id")
end
scope :for_project_id, ->(project_id) { where(project_id: project_id) }
scope :search_by_name, ->(query) { fuzzy_search(query, [:name], use_minimum_char_limit: false) }
diff --git a/app/models/custom_emoji.rb b/app/models/custom_emoji.rb
index ed22d4ba231..4f8f86965d7 100644
--- a/app/models/custom_emoji.rb
+++ b/app/models/custom_emoji.rb
@@ -17,7 +17,7 @@ class CustomEmoji < ApplicationRecord
uniqueness: { scope: [:namespace_id, :name] },
presence: true,
length: { maximum: 36 },
- format: { with: /\A([a-z0-9]+[-_]?)+[a-z0-9]+\z/ }
+ format: { with: /\A[a-z0-9][a-z0-9\-_]*[a-z0-9]\z/ }
private
diff --git a/app/models/cycle_analytics/level_base.rb b/app/models/cycle_analytics/level_base.rb
index 967de9a22b4..901636a7263 100644
--- a/app/models/cycle_analytics/level_base.rb
+++ b/app/models/cycle_analytics/level_base.rb
@@ -4,9 +4,52 @@ module CycleAnalytics
module LevelBase
STAGES = %i[issue plan code test review staging].freeze
+ # This is a temporary adapter class which makes the new value stream (cycle analytics)
+ # backend compatible with the old implementation.
+ class StageAdapter
+ def initialize(stage, options)
+ @stage = stage
+ @options = options
+ end
+
+ # rubocop: disable CodeReuse/Presenter
+ def as_json(serializer: AnalyticsStageSerializer)
+ presenter = Analytics::CycleAnalytics::StagePresenter.new(stage)
+
+ serializer.new.represent(OpenStruct.new(
+ title: presenter.title,
+ description: presenter.description,
+ legend: presenter.legend,
+ name: stage.name,
+ project_median: median,
+ group_median: median
+ ))
+ end
+ # rubocop: enable CodeReuse/Presenter
+
+ def events
+ data_collector.records_fetcher.serialized_records
+ end
+
+ def median
+ data_collector.median.seconds
+ end
+
+ alias_method :project_median, :median
+ alias_method :group_median, :median
+
+ private
+
+ attr_reader :stage, :options
+
+ def data_collector
+ @data_collector ||= Gitlab::Analytics::CycleAnalytics::DataCollector.new(stage: stage, params: options)
+ end
+ end
+
def all_medians_by_stage
STAGES.each_with_object({}) do |stage_name, medians_per_stage|
- medians_per_stage[stage_name] = self[stage_name].project_median
+ medians_per_stage[stage_name] = self[stage_name].median
end
end
@@ -16,12 +59,16 @@ module CycleAnalytics
end
end
- def no_stats?
- stats.all? { |hash| hash[:value].nil? }
+ def [](stage_name)
+ if Feature.enabled?(:new_project_level_vsa_backend, resource_parent, default_enabled: true)
+ StageAdapter.new(build_stage(stage_name), options)
+ else
+ Gitlab::CycleAnalytics::Stage[stage_name].new(options: options)
+ end
end
- def [](stage_name)
- Gitlab::CycleAnalytics::Stage[stage_name].new(options: options)
+ def stage_params_by_name(name)
+ Gitlab::Analytics::CycleAnalytics::DefaultStages.find_by_name!(name)
end
end
end
diff --git a/app/models/cycle_analytics/project_level.rb b/app/models/cycle_analytics/project_level.rb
index 591435baf34..26cdcc0db4b 100644
--- a/app/models/cycle_analytics/project_level.rb
+++ b/app/models/cycle_analytics/project_level.rb
@@ -20,5 +20,14 @@ module CycleAnalytics
def permissions(user:)
Gitlab::CycleAnalytics::Permissions.get(user: user, project: project)
end
+
+ def build_stage(stage_name)
+ stage_params = stage_params_by_name(stage_name).merge(project: project)
+ Analytics::CycleAnalytics::ProjectStage.new(stage_params)
+ end
+
+ def resource_parent
+ project
+ end
end
end
diff --git a/app/models/dependency_proxy.rb b/app/models/dependency_proxy.rb
index 510a304ff17..9cbaf7e9884 100644
--- a/app/models/dependency_proxy.rb
+++ b/app/models/dependency_proxy.rb
@@ -1,5 +1,7 @@
# frozen_string_literal: true
module DependencyProxy
+ URL_SUFFIX = '/dependency_proxy/containers'
+
def self.table_name_prefix
'dependency_proxy_'
end
diff --git a/app/models/dependency_proxy/manifest.rb b/app/models/dependency_proxy/manifest.rb
new file mode 100644
index 00000000000..f3c7f34e0d7
--- /dev/null
+++ b/app/models/dependency_proxy/manifest.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+class DependencyProxy::Manifest < ApplicationRecord
+ include FileStoreMounter
+
+ belongs_to :group
+
+ validates :group, presence: true
+ validates :file, presence: true
+ validates :file_name, presence: true
+ validates :digest, presence: true
+
+ mount_file_store_uploader DependencyProxy::FileUploader
+
+ scope :find_or_initialize_by_file_name, ->(file_name) { find_or_initialize_by(file_name: file_name) }
+end
diff --git a/app/models/dependency_proxy/registry.rb b/app/models/dependency_proxy/registry.rb
index 471d5be2600..6492acf325a 100644
--- a/app/models/dependency_proxy/registry.rb
+++ b/app/models/dependency_proxy/registry.rb
@@ -1,8 +1,9 @@
# frozen_string_literal: true
class DependencyProxy::Registry
- AUTH_URL = 'https://auth.docker.io'.freeze
- LIBRARY_URL = 'https://registry-1.docker.io/v2'.freeze
+ AUTH_URL = 'https://auth.docker.io'
+ LIBRARY_URL = 'https://registry-1.docker.io/v2'
+ PROXY_AUTH_URL = Gitlab::Utils.append_path(Gitlab.config.gitlab.url, "jwt/auth")
class << self
def auth_url(image)
@@ -17,6 +18,10 @@ class DependencyProxy::Registry
"#{LIBRARY_URL}/#{image_path(image)}/blobs/#{blob_sha}"
end
+ def authenticate_header
+ "Bearer realm=\"#{PROXY_AUTH_URL}\",service=\"#{::Auth::DependencyProxyAuthenticationService::AUDIENCE}\""
+ end
+
private
def image_path(image)
diff --git a/app/models/deployment.rb b/app/models/deployment.rb
index 36ac1bdb236..b93b714ec8b 100644
--- a/app/models/deployment.rb
+++ b/app/models/deployment.rb
@@ -37,12 +37,19 @@ class Deployment < ApplicationRecord
end
scope :for_status, -> (status) { where(status: status) }
+ scope :for_project, -> (project_id) { where(project_id: project_id) }
scope :visible, -> { where(status: %i[running success failed canceled]) }
scope :stoppable, -> { where.not(on_stop: nil).where.not(deployable_id: nil).success }
scope :active, -> { where(status: %i[created running]) }
- scope :older_than, -> (deployment) { where('id < ?', deployment.id) }
- scope :with_deployable, -> { includes(:deployable).where('deployable_id IS NOT NULL') }
+ scope :older_than, -> (deployment) { where('deployments.id < ?', deployment.id) }
+ scope :with_deployable, -> { joins('INNER JOIN ci_builds ON ci_builds.id = deployments.deployable_id').preload(:deployable) }
+
+ scope :finished_between, -> (start_date, end_date = nil) do
+ selected = where('deployments.finished_at >= ?', start_date)
+ selected = selected.where('deployments.finished_at < ?', end_date) if end_date
+ selected
+ end
FINISHED_STATUSES = %i[success failed canceled].freeze
@@ -63,6 +70,10 @@ class Deployment < ApplicationRecord
transition any - [:canceled] => :canceled
end
+ event :skip do
+ transition any - [:skipped] => :skipped
+ end
+
before_transition any => FINISHED_STATUSES do |deployment|
deployment.finished_at = Time.current
end
@@ -105,7 +116,8 @@ class Deployment < ApplicationRecord
running: 1,
success: 2,
failed: 3,
- canceled: 4
+ canceled: 4,
+ skipped: 5
}
def self.last_for_environment(environment)
@@ -144,6 +156,10 @@ class Deployment < ApplicationRecord
project.repository.delete_refs(*ref_paths.flatten)
end
end
+
+ def latest_for_sha(sha)
+ where(sha: sha).order(id: :desc).take
+ end
end
def commit
@@ -297,6 +313,8 @@ class Deployment < ApplicationRecord
drop
when 'canceled'
cancel
+ when 'skipped'
+ skip
else
raise ArgumentError, "The status #{status.inspect} is invalid"
end
diff --git a/app/models/diff_note.rb b/app/models/diff_note.rb
index 4b2e62bf761..944a64f5419 100644
--- a/app/models/diff_note.rb
+++ b/app/models/diff_note.rb
@@ -19,7 +19,7 @@ class DiffNote < Note
# EE might have added a type when the module was prepended
validates :noteable_type, inclusion: { in: -> (_note) { noteable_types } }
validate :positions_complete
- validate :verify_supported
+ validate :verify_supported, unless: :importing?
before_validation :set_line_code, if: :on_text?, unless: :importing?
after_save :keep_around_commits, unless: :importing?
@@ -149,7 +149,7 @@ class DiffNote < Note
end
def supported?
- for_commit? || for_design? || self.noteable.has_complete_diff_refs?
+ for_commit? || for_design? || self.noteable&.has_complete_diff_refs?
end
def set_line_code
diff --git a/app/models/environment.rb b/app/models/environment.rb
index deded3eeae0..31a95bb1b5d 100644
--- a/app/models/environment.rb
+++ b/app/models/environment.rb
@@ -32,6 +32,7 @@ class Environment < ApplicationRecord
has_one :last_visible_deployment, -> { visible.distinct_on_environment }, inverse_of: :environment, class_name: 'Deployment'
has_one :last_visible_deployable, through: :last_visible_deployment, source: 'deployable', source_type: 'CommitStatus'
has_one :last_visible_pipeline, through: :last_visible_deployable, source: 'pipeline'
+ has_one :upcoming_deployment, -> { running.order('deployments.id DESC') }, class_name: 'Deployment'
has_one :latest_opened_most_severe_alert, -> { order_severity_with_open_prometheus_alert }, class_name: 'AlertManagement::Alert', inverse_of: :environment
before_validation :nullify_external_url
@@ -60,6 +61,7 @@ class Environment < ApplicationRecord
addressable_url: true
delegate :stop_action, :manual_actions, to: :last_deployment, allow_nil: true
+ delegate :auto_rollback_enabled?, to: :project
scope :available, -> { with_state(:available) }
scope :stopped, -> { with_state(:stopped) }
@@ -240,10 +242,6 @@ class Environment < ApplicationRecord
def cancel_deployment_jobs!
jobs = active_deployments.with_deployable
jobs.each do |deployment|
- # guard against data integrity issues,
- # for example https://gitlab.com/gitlab-org/gitlab/-/issues/218659#note_348823660
- next unless deployment.deployable
-
Gitlab::OptimisticLocking.retry_lock(deployment.deployable) do |deployable|
deployable.cancel! if deployable&.cancelable?
end
@@ -387,8 +385,38 @@ class Environment < ApplicationRecord
!!deployment_platform&.cluster&.application_elastic_stack_available?
end
+ def rollout_status
+ return unless rollout_status_available?
+
+ result = rollout_status_with_reactive_cache
+
+ result || ::Gitlab::Kubernetes::RolloutStatus.loading
+ end
+
+ def ingresses
+ return unless rollout_status_available?
+
+ deployment_platform.ingresses(deployment_namespace)
+ end
+
+ def patch_ingress(ingress, data)
+ return unless rollout_status_available?
+
+ deployment_platform.patch_ingress(deployment_namespace, ingress, data)
+ end
+
private
+ def rollout_status_available?
+ has_terminals?
+ end
+
+ def rollout_status_with_reactive_cache
+ with_reactive_cache do |data|
+ deployment_platform.rollout_status(self, data)
+ end
+ end
+
def has_metrics_and_can_query?
has_metrics? && prometheus_adapter.can_query?
end
@@ -396,11 +424,6 @@ class Environment < ApplicationRecord
def generate_slug
self.slug = Gitlab::Slug::Environment.new(name).generate
end
-
- # Overrides ReactiveCaching default to activate limit checking behind a FF
- def reactive_cache_limit_enabled?
- Feature.enabled?(:reactive_caching_limit_environment, project, default_enabled: true)
- end
end
Environment.prepend_if_ee('EE::Environment')
diff --git a/app/models/experiment.rb b/app/models/experiment.rb
index f179a1fc6ce..a4cacab25ee 100644
--- a/app/models/experiment.rb
+++ b/app/models/experiment.rb
@@ -2,17 +2,24 @@
class Experiment < ApplicationRecord
has_many :experiment_users
+ has_many :experiment_subjects, inverse_of: :experiment
validates :name, presence: true, uniqueness: true, length: { maximum: 255 }
- def self.add_user(name, group_type, user)
- return unless experiment = find_or_create_by(name: name)
+ def self.add_user(name, group_type, user, context = {})
+ find_or_create_by!(name: name).record_user_and_group(user, group_type, context)
+ end
- experiment.record_user_and_group(user, group_type)
+ def self.record_conversion_event(name, user)
+ find_or_create_by!(name: name).record_conversion_event_for_user(user)
end
# Create or update the recorded experiment_user row for the user in this experiment.
- def record_user_and_group(user, group_type)
- experiment_users.find_or_initialize_by(user: user).update!(group_type: group_type)
+ def record_user_and_group(user, group_type, context = {})
+ experiment_users.find_or_initialize_by(user: user).update!(group_type: group_type, context: context)
+ end
+
+ def record_conversion_event_for_user(user)
+ experiment_users.find_by(user: user, converted_at: nil)&.touch(:converted_at)
end
end
diff --git a/app/models/experiment_subject.rb b/app/models/experiment_subject.rb
new file mode 100644
index 00000000000..51ffc0b304e
--- /dev/null
+++ b/app/models/experiment_subject.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+class ExperimentSubject < ApplicationRecord
+ include ::Gitlab::Experimentation::GroupTypes
+
+ belongs_to :experiment, inverse_of: :experiment_subjects
+ belongs_to :user
+ belongs_to :group
+ belongs_to :project
+
+ validates :experiment, presence: true
+ validates :variant, presence: true
+ validate :must_have_one_subject_present
+
+ enum variant: { GROUP_CONTROL => 0, GROUP_EXPERIMENTAL => 1 }
+
+ private
+
+ def must_have_one_subject_present
+ if non_nil_subjects.length != 1
+ errors.add(:base, s_("ExperimentSubject|Must have exactly one of User, Group, or Project."))
+ end
+ end
+
+ def non_nil_subjects
+ @non_nil_subjects ||= [user, group, project].reject(&:blank?)
+ end
+end
diff --git a/app/models/exported_protected_branch.rb b/app/models/exported_protected_branch.rb
new file mode 100644
index 00000000000..6e8abbc2389
--- /dev/null
+++ b/app/models/exported_protected_branch.rb
@@ -0,0 +1,5 @@
+# frozen_string_literal: true
+
+class ExportedProtectedBranch < ProtectedBranch
+ has_many :push_access_levels, -> { where(deploy_key_id: nil) }, class_name: "ProtectedBranch::PushAccessLevel", foreign_key: :protected_branch_id
+end
diff --git a/app/models/group.rb b/app/models/group.rb
index 3509299a579..739135e82dd 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -73,6 +73,7 @@ class Group < Namespace
has_one :dependency_proxy_setting, class_name: 'DependencyProxy::GroupSetting'
has_many :dependency_proxy_blobs, class_name: 'DependencyProxy::Blob'
+ has_many :dependency_proxy_manifests, class_name: 'DependencyProxy::Manifest'
accepts_nested_attributes_for :variables, allow_destroy: true
@@ -402,6 +403,13 @@ class Group < Namespace
.where(source_id: self_and_hierarchy.reorder(nil).select(:id))
end
+ def direct_and_indirect_members_with_inactive
+ GroupMember
+ .non_request
+ .non_invite
+ .where(source_id: self_and_hierarchy.reorder(nil).select(:id))
+ end
+
def users_with_parents
User
.where(id: members_with_parents.select(:user_id))
@@ -428,6 +436,20 @@ class Group < Namespace
])
end
+ # Returns all users (also inactive) that are members of the group because:
+ # 1. They belong to the group
+ # 2. They belong to a project that belongs to the group
+ # 3. They belong to a sub-group or project in such sub-group
+ # 4. They belong to an ancestor group
+ def direct_and_indirect_users_with_inactive
+ User.from_union([
+ User
+ .where(id: direct_and_indirect_members_with_inactive.select(:user_id))
+ .reorder(nil),
+ project_users_with_descendants
+ ])
+ end
+
def users_count
members.count
end
diff --git a/app/models/group_import_state.rb b/app/models/group_import_state.rb
index 89602e40357..c47ae3a80ba 100644
--- a/app/models/group_import_state.rb
+++ b/app/models/group_import_state.rb
@@ -3,6 +3,8 @@
class GroupImportState < ApplicationRecord
self.primary_key = :group_id
+ MAX_ERROR_LENGTH = 255
+
belongs_to :group, inverse_of: :import_state
belongs_to :user, optional: false
@@ -30,7 +32,7 @@ class GroupImportState < ApplicationRecord
after_transition any => :failed do |state, transition|
last_error = transition.args.first
- state.update_column(:last_error, last_error) if last_error
+ state.update_column(:last_error, last_error.truncate(MAX_ERROR_LENGTH)) if last_error
end
end
diff --git a/app/models/identity.rb b/app/models/identity.rb
index 40d9f856abf..fc97c68b756 100644
--- a/app/models/identity.rb
+++ b/app/models/identity.rb
@@ -18,6 +18,9 @@ class Identity < ApplicationRecord
scope :with_extern_uid, ->(provider, extern_uid) do
iwhere(extern_uid: normalize_uid(provider, extern_uid)).with_provider(provider)
end
+ scope :with_any_extern_uid, ->(provider) do
+ where.not(extern_uid: nil).with_provider(provider)
+ end
def ldap?
Gitlab::Auth::OAuth::Provider.ldap_provider?(provider)
diff --git a/app/models/issue.rb b/app/models/issue.rb
index 7dc18cacd7c..253f4465cd9 100644
--- a/app/models/issue.rb
+++ b/app/models/issue.rb
@@ -22,6 +22,7 @@ class Issue < ApplicationRecord
include Presentable
include IssueAvailableFeatures
include Todoable
+ include FromUnion
DueDateStruct = Struct.new(:title, :name).freeze
NoDueDate = DueDateStruct.new('No Due Date', '0').freeze
@@ -90,6 +91,8 @@ class Issue < ApplicationRecord
alias_attribute :parent_ids, :project_id
alias_method :issuing_parent, :project
+ alias_attribute :external_author, :service_desk_reply_to
+
scope :in_projects, ->(project_ids) { where(project_id: project_ids) }
scope :not_in_projects, ->(project_ids) { where.not(project_id: project_ids) }
@@ -306,6 +309,7 @@ class Issue < ApplicationRecord
!moved? && persisted? &&
user.can?(:admin_issue, self.project)
end
+ alias_method :can_clone?, :can_move?
def to_branch_name
if self.confidential?
@@ -328,7 +332,9 @@ class Issue < ApplicationRecord
related_issues = ::Issue
.select(['issues.*', 'issue_links.id AS issue_link_id',
'issue_links.link_type as issue_link_type_value',
- 'issue_links.target_id as issue_link_source_id'])
+ 'issue_links.target_id as issue_link_source_id',
+ 'issue_links.created_at as issue_link_created_at',
+ 'issue_links.updated_at as issue_link_updated_at'])
.joins("INNER JOIN issue_links ON
(issue_links.source_id = issues.id AND issue_links.target_id = #{id})
OR
diff --git a/app/models/iteration.rb b/app/models/iteration.rb
index ba7cd973e9d..7a35bb1cd1f 100644
--- a/app/models/iteration.rb
+++ b/app/models/iteration.rb
@@ -32,9 +32,9 @@ class Iteration < ApplicationRecord
scope :closed, -> { with_state(:closed) }
scope :within_timeframe, -> (start_date, end_date) do
- where('start_date is not NULL or due_date is not NULL')
- .where('start_date is NULL or start_date <= ?', end_date)
- .where('due_date is NULL or due_date >= ?', start_date)
+ where('start_date IS NOT NULL OR due_date IS NOT NULL')
+ .where('start_date IS NULL OR start_date <= ?', end_date)
+ .where('due_date IS NULL OR due_date >= ?', start_date)
end
scope :start_date_passed, -> { where('start_date <= ?', Date.current).where('due_date >= ?', Date.current) }
diff --git a/app/models/label.rb b/app/models/label.rb
index 3c70eef9bd5..54129c7c7f3 100644
--- a/app/models/label.rb
+++ b/app/models/label.rb
@@ -257,7 +257,7 @@ class Label < ApplicationRecord
end
def present(attributes)
- super(attributes.merge(presenter_class: ::LabelPresenter))
+ super(**attributes.merge(presenter_class: ::LabelPresenter))
end
private
diff --git a/app/models/label_priority.rb b/app/models/label_priority.rb
index 8f8f36efbfe..11854404a71 100644
--- a/app/models/label_priority.rb
+++ b/app/models/label_priority.rb
@@ -1,10 +1,13 @@
# frozen_string_literal: true
class LabelPriority < ApplicationRecord
+ include Importable
+
belongs_to :project
belongs_to :label
- validates :project, :label, :priority, presence: true
+ validates :label, presence: true, unless: :importing?
+ validates :project, :priority, presence: true
validates :label_id, uniqueness: { scope: :project_id }
validates :priority, numericality: { only_integer: true, greater_than_or_equal_to: 0 }
end
diff --git a/app/models/list.rb b/app/models/list.rb
index ec211dfd497..1df565c83e6 100644
--- a/app/models/list.rb
+++ b/app/models/list.rb
@@ -7,7 +7,7 @@ class List < ApplicationRecord
belongs_to :label
has_many :list_user_preferences
- enum list_type: { backlog: 0, label: 1, closed: 2, assignee: 3, milestone: 4 }
+ enum list_type: { backlog: 0, label: 1, closed: 2, assignee: 3, milestone: 4, iteration: 5 }
validates :board, :list_type, presence: true, unless: :importing?
validates :label, :position, presence: true, if: :label?
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index d379f85bc15..043f07cf9f3 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -233,13 +233,13 @@ class MergeRequest < ApplicationRecord
cannot_be_merged_rechecking? ? 'checking' : merge_status
end
- validates :source_project, presence: true, unless: [:allow_broken, :importing?, :closed_without_fork?]
+ validates :source_project, presence: true, unless: [:allow_broken, :importing?, :closed_or_merged_without_fork?]
validates :source_branch, presence: true
validates :target_project, presence: true
validates :target_branch, presence: true
validates :merge_user, presence: true, if: :auto_merge_enabled?, unless: :importing?
- validate :validate_branches, unless: [:allow_broken, :importing?, :closed_without_fork?]
- validate :validate_fork, unless: :closed_without_fork?
+ validate :validate_branches, unless: [:allow_broken, :importing?, :closed_or_merged_without_fork?]
+ validate :validate_fork, unless: :closed_or_merged_without_fork?
validate :validate_target_project, on: :create
scope :by_source_or_target_branch, ->(branch_name) do
@@ -274,7 +274,7 @@ class MergeRequest < ApplicationRecord
scope :with_api_entity_associations, -> {
preload_routables
.preload(:assignees, :author, :unresolved_notes, :labels, :milestone,
- :timelogs, :latest_merge_request_diff,
+ :timelogs, :latest_merge_request_diff, :reviewers,
target_project: :project_feature,
metrics: [:latest_closed_by, :merged_by])
}
@@ -314,6 +314,38 @@ class MergeRequest < ApplicationRecord
scope :with_jira_issue_keys, -> { where('title ~ :regex OR merge_requests.description ~ :regex', regex: Gitlab::Regex.jira_issue_key_regex.source) }
+ scope :review_requested, -> do
+ where(reviewers_subquery.exists)
+ end
+
+ scope :no_review_requested, -> do
+ where(reviewers_subquery.exists.not)
+ end
+
+ scope :review_requested_to, ->(user) do
+ where(
+ reviewers_subquery
+ .where(Arel::Table.new("#{to_ability_name}_reviewers")[:user_id].eq(user))
+ .exists
+ )
+ end
+
+ scope :no_review_requested_to, ->(user) do
+ where(
+ reviewers_subquery
+ .where(Arel::Table.new("#{to_ability_name}_reviewers")[:user_id].eq(user))
+ .exists
+ .not
+ )
+ end
+
+ def self.total_time_to_merge
+ join_metrics
+ .merge(MergeRequest::Metrics.with_valid_time_to_merge)
+ .pluck(MergeRequest::Metrics.time_to_merge_expression)
+ .first
+ end
+
after_save :keep_around_commit, unless: :importing?
alias_attribute :project, :target_project
@@ -361,6 +393,12 @@ class MergeRequest < ApplicationRecord
end
end
+ def self.reviewers_subquery
+ MergeRequestReviewer.arel_table
+ .project('true')
+ .where(Arel::Nodes::SqlLiteral.new("#{to_ability_name}_id = #{to_ability_name}s.id"))
+ end
+
def rebase_in_progress?
rebase_jid.present? && Gitlab::SidekiqStatus.running?(rebase_jid)
end
@@ -845,8 +883,8 @@ class MergeRequest < ApplicationRecord
!!merge_jid && !merged? && Gitlab::SidekiqStatus.running?(merge_jid)
end
- def closed_without_fork?
- closed? && source_project_missing?
+ def closed_or_merged_without_fork?
+ (closed? || merged?) && source_project_missing?
end
def source_project_missing?
@@ -941,7 +979,7 @@ class MergeRequest < ApplicationRecord
# rubocop: enable CodeReuse/ServiceClass
def diffable_merge_ref?
- merge_ref_head.present? && (Feature.enabled?(:display_merge_conflicts_in_diff, project) || can_be_merged?)
+ open? && merge_ref_head.present? && (Feature.enabled?(:display_merge_conflicts_in_diff, project) || can_be_merged?)
end
# Returns boolean indicating the merge_status should be rechecked in order to
@@ -1423,6 +1461,20 @@ class MergeRequest < ApplicationRecord
compare_reports(Ci::GenerateCoverageReportsService)
end
+ def has_codequality_reports?
+ return false unless Feature.enabled?(:codequality_mr_diff, project)
+
+ actual_head_pipeline&.has_reports?(Ci::JobArtifact.codequality_reports)
+ end
+
+ def compare_codequality_reports
+ unless has_codequality_reports?
+ return { status: :error, status_reason: _('This merge request does not have codequality reports') }
+ end
+
+ compare_reports(Ci::CompareCodequalityReportsService)
+ end
+
def find_terraform_reports
unless has_terraform_reports?
return { status: :error, status_reason: 'This merge request does not have terraform reports' }
@@ -1703,7 +1755,7 @@ class MergeRequest < ApplicationRecord
end
def allows_reviewers?
- Feature.enabled?(:merge_request_reviewers, project)
+ Feature.enabled?(:merge_request_reviewers, project, default_enabled: true)
end
def allows_multiple_reviewers?
diff --git a/app/models/merge_request/metrics.rb b/app/models/merge_request/metrics.rb
index 66bff3f5982..d3fe256fb1b 100644
--- a/app/models/merge_request/metrics.rb
+++ b/app/models/merge_request/metrics.rb
@@ -10,6 +10,11 @@ class MergeRequest::Metrics < ApplicationRecord
scope :merged_after, ->(date) { where(arel_table[:merged_at].gteq(date)) }
scope :merged_before, ->(date) { where(arel_table[:merged_at].lteq(date)) }
+ scope :with_valid_time_to_merge, -> { where(arel_table[:merged_at].gt(arel_table[:created_at])) }
+
+ def self.time_to_merge_expression
+ Arel.sql('EXTRACT(epoch FROM SUM(AGE(merge_request_metrics.merged_at, merge_request_metrics.created_at)))')
+ end
private
diff --git a/app/models/merge_request_diff.rb b/app/models/merge_request_diff.rb
index 24809141570..d23e66b9697 100644
--- a/app/models/merge_request_diff.rb
+++ b/app/models/merge_request_diff.rb
@@ -358,6 +358,7 @@ class MergeRequestDiff < ApplicationRecord
if comparison
comparison.diffs_in_batch(batch_page, batch_size, diff_options: diff_options)
else
+ reorder_diff_files!
diffs_in_batch_collection(batch_page, batch_size, diff_options: diff_options)
end
end
@@ -371,6 +372,7 @@ class MergeRequestDiff < ApplicationRecord
if comparison
comparison.diffs(diff_options)
else
+ reorder_diff_files!
diffs_collection(diff_options)
end
end
@@ -565,7 +567,7 @@ class MergeRequestDiff < ApplicationRecord
end
def build_merge_request_diff_files(diffs)
- diffs.map.with_index do |diff, index|
+ sort_diffs(diffs).map.with_index do |diff, index|
diff_hash = diff.to_hash.merge(
binary: false,
merge_request_diff_id: self.id,
@@ -678,6 +680,7 @@ class MergeRequestDiff < ApplicationRecord
rows = build_merge_request_diff_files(diff_collection)
create_merge_request_diff_files(rows)
+ new_attributes[:sorted] = true
self.class.uncached { merge_request_diff_files.reset }
end
@@ -719,6 +722,35 @@ class MergeRequestDiff < ApplicationRecord
repo.keep_around(start_commit_sha, head_commit_sha, base_commit_sha)
end
end
+
+ def reorder_diff_files!
+ return unless sort_diffs?
+ return if sorted? || merge_request_diff_files.empty?
+
+ diff_files = sort_diffs(merge_request_diff_files)
+
+ diff_files.each_with_index do |diff_file, index|
+ diff_file.relative_order = index
+ end
+
+ transaction do
+ # The `merge_request_diff_files` table doesn't have an `id` column so
+ # we cannot use `Gitlab::Database::BulkUpdate`.
+ MergeRequestDiffFile.where(merge_request_diff_id: id).delete_all
+ MergeRequestDiffFile.bulk_insert!(diff_files)
+ update_column(:sorted, true)
+ end
+ end
+
+ def sort_diffs(diffs)
+ return diffs unless sort_diffs?
+
+ Gitlab::Diff::FileCollectionSorter.new(diffs).sort
+ end
+
+ def sort_diffs?
+ Feature.enabled?(:sort_diffs, project, default_enabled: false)
+ end
end
MergeRequestDiff.prepend_if_ee('EE::MergeRequestDiff')
diff --git a/app/models/merge_request_reviewer.rb b/app/models/merge_request_reviewer.rb
index 1cb49c0cd76..c4e5274f832 100644
--- a/app/models/merge_request_reviewer.rb
+++ b/app/models/merge_request_reviewer.rb
@@ -2,5 +2,5 @@
class MergeRequestReviewer < ApplicationRecord
belongs_to :merge_request
- belongs_to :reviewer, class_name: "User", foreign_key: :user_id, inverse_of: :merge_request_assignees
+ belongs_to :reviewer, class_name: 'User', foreign_key: :user_id, inverse_of: :merge_request_reviewers
end
diff --git a/app/models/milestone.rb b/app/models/milestone.rb
index c8776be5e4a..c244150e7a3 100644
--- a/app/models/milestone.rb
+++ b/app/models/milestone.rb
@@ -9,6 +9,10 @@ class Milestone < ApplicationRecord
prepend_if_ee('::EE::Milestone') # rubocop: disable Cop/InjectEnterpriseEditionModule
+ class Predefined
+ ALL = [::Timebox::None, ::Timebox::Any, ::Timebox::Started, ::Timebox::Upcoming].freeze
+ end
+
has_many :milestone_releases
has_many :releases, through: :milestone_releases
diff --git a/app/models/namespace.rb b/app/models/namespace.rb
index 232d0a6b05d..238e8f70778 100644
--- a/app/models/namespace.rb
+++ b/app/models/namespace.rb
@@ -28,6 +28,7 @@ class Namespace < ApplicationRecord
has_many :runner_namespaces, inverse_of: :namespace, class_name: 'Ci::RunnerNamespace'
has_many :runners, through: :runner_namespaces, source: :runner, class_name: 'Ci::Runner'
+ has_many :namespace_onboarding_actions
# This should _not_ be `inverse_of: :namespace`, because that would also set
# `user.namespace` when this user creates a group with themselves as `owner`.
diff --git a/app/models/namespace_onboarding_action.rb b/app/models/namespace_onboarding_action.rb
new file mode 100644
index 00000000000..43dd872673c
--- /dev/null
+++ b/app/models/namespace_onboarding_action.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+class NamespaceOnboardingAction < ApplicationRecord
+ belongs_to :namespace, optional: false
+
+ validates :action, presence: true
+
+ ACTIONS = {
+ subscription_created: 1,
+ git_write: 2,
+ merge_request_created: 3,
+ git_read: 4,
+ user_added: 6
+ }.freeze
+
+ enum action: ACTIONS
+
+ class << self
+ def completed?(namespace, action)
+ where(namespace: namespace, action: action).exists?
+ end
+
+ def create_action(namespace, action)
+ NamespaceOnboardingAction.safe_find_or_create_by(namespace: namespace, action: action)
+ end
+ end
+end
diff --git a/app/models/note.rb b/app/models/note.rb
index cfdac6c432f..77f7726079c 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -145,7 +145,6 @@ class Note < ApplicationRecord
after_save :expire_etag_cache, unless: :importing?
after_save :touch_noteable, unless: :importing?
after_destroy :expire_etag_cache
- after_save :store_mentions!, if: :any_mentionable_attributes_changed?
after_commit :notify_after_create, on: :create
after_commit :notify_after_destroy, on: :destroy
@@ -548,8 +547,8 @@ class Note < ApplicationRecord
private
- # Using this method followed by a call to `save` may result in ActiveRecord::RecordNotUnique exception
- # in a multithreaded environment. Make sure to use it within a `safe_ensure_unique` block.
+ # Using this method followed by a call to *save* may result in *ActiveRecord::RecordNotUnique* exception
+ # in a multi-threaded environment. Make sure to use it within a *safe_ensure_unique* block.
def model_user_mention
return if user_mentions.is_a?(ActiveRecord::NullRelation)
diff --git a/app/models/notification_setting.rb b/app/models/notification_setting.rb
index 6066046a722..82e39e4f207 100644
--- a/app/models/notification_setting.rb
+++ b/app/models/notification_setting.rb
@@ -30,6 +30,7 @@ class NotificationSetting < ApplicationRecord
scope :preload_source_route, -> { preload(source: [:route]) }
+ # NOTE: Applicable unfound_translations.rb also needs to be updated when below events are changed.
EMAIL_EVENTS = [
:new_release,
:new_note,
@@ -51,7 +52,6 @@ class NotificationSetting < ApplicationRecord
:moved_project
].freeze
- # Update unfound_translations.rb when events are changed
def self.email_events(source = nil)
EMAIL_EVENTS
end
diff --git a/app/models/packages/event.rb b/app/models/packages/event.rb
index 959c94931ec..13da82d16d3 100644
--- a/app/models/packages/event.rb
+++ b/app/models/packages/event.rb
@@ -25,7 +25,7 @@ class Packages::Event < ApplicationRecord
enum originator_type: { user: 0, deploy_token: 1, guest: 2 }
def self.allowed_event_name(event_scope, event_type, originator)
- return unless event_allowed?(event_scope, event_type, originator)
+ return unless event_allowed?(event_type)
# remove `package` from the event name to avoid issues with HLLRedisCounter class parsing
"i_package_#{event_scope}_#{originator}_#{event_type.gsub(/_packages?/, "")}"
@@ -33,8 +33,7 @@ class Packages::Event < ApplicationRecord
# Remove some of the events, for now, so we don't hammer Redis too hard.
# See: https://gitlab.com/gitlab-org/gitlab/-/issues/280770
- def self.event_allowed?(event_scope, event_type, originator)
- return false if originator.to_sym == :guest
+ def self.event_allowed?(event_type)
return true if UNIQUE_EVENTS_ALLOWED.include?(event_type.to_sym)
false
diff --git a/app/models/packages/package.rb b/app/models/packages/package.rb
index 60aab0a7222..10c98f03804 100644
--- a/app/models/packages/package.rb
+++ b/app/models/packages/package.rb
@@ -28,7 +28,7 @@ class Packages::Package < ApplicationRecord
validates :project, presence: true
validates :name, presence: true
- validates :name, format: { with: Gitlab::Regex.package_name_regex }, unless: -> { conan? || generic? }
+ validates :name, format: { with: Gitlab::Regex.package_name_regex }, unless: -> { conan? || generic? || debian? }
validates :name,
uniqueness: { scope: %i[project_id version package_type] }, unless: :conan?
@@ -40,6 +40,8 @@ class Packages::Package < ApplicationRecord
validates :name, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan?
validates :name, format: { with: Gitlab::Regex.generic_package_name_regex }, if: :generic?
validates :name, format: { with: Gitlab::Regex.nuget_package_name_regex }, if: :nuget?
+ validates :name, format: { with: Gitlab::Regex.debian_package_name_regex }, if: :debian_package?
+ validates :name, inclusion: { in: %w[incoming] }, if: :debian_incoming?
validates :version, format: { with: Gitlab::Regex.nuget_version_regex }, if: :nuget?
validates :version, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan?
validates :version, format: { with: Gitlab::Regex.maven_version_regex }, if: -> { version? && maven? }
@@ -51,6 +53,11 @@ class Packages::Package < ApplicationRecord
presence: true,
format: { with: Gitlab::Regex.generic_package_version_regex },
if: :generic?
+ validates :version,
+ presence: true,
+ format: { with: Gitlab::Regex.debian_version_regex },
+ if: :debian_package?
+ validate :forbidden_debian_changes, if: :debian?
enum package_type: { maven: 1, npm: 2, conan: 3, nuget: 4, pypi: 5, composer: 6, generic: 7, golang: 8, debian: 9 }
@@ -184,6 +191,14 @@ class Packages::Package < ApplicationRecord
tags.pluck(:name)
end
+ def debian_incoming?
+ debian? && version.nil?
+ end
+
+ def debian_package?
+ debian? && !version.nil?
+ end
+
private
def composer_tag_version?
@@ -228,4 +243,13 @@ class Packages::Package < ApplicationRecord
errors.add(:base, _('Package already exists'))
end
end
+
+ def forbidden_debian_changes
+ return unless persisted?
+
+ # Debian incoming
+ if version_was.nil? || version.nil?
+ errors.add(:version, _('cannot be changed')) if version_changed?
+ end
+ end
end
diff --git a/app/models/packages/package_file.rb b/app/models/packages/package_file.rb
index d68f75140ac..e8d1dd1e8c4 100644
--- a/app/models/packages/package_file.rb
+++ b/app/models/packages/package_file.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
class Packages::PackageFile < ApplicationRecord
include UpdateProjectStatistics
+ include FileStoreMounter
delegate :project, :project_id, to: :package
delegate :conan_file_type, to: :conan_file_metadatum
@@ -35,20 +36,12 @@ class Packages::PackageFile < ApplicationRecord
.where(packages_conan_file_metadata: { conan_package_reference: conan_package_reference })
end
- mount_uploader :file, Packages::PackageFileUploader
-
- after_save :update_file_metadata, if: :saved_change_to_file?
+ mount_file_store_uploader Packages::PackageFileUploader
update_project_statistics project_statistics_name: :packages_size
before_save :update_size_from_file
- def update_file_metadata
- # The file.object_store is set during `uploader.store!`
- # which happens after object is inserted/updated
- self.update_column(:file_store, file.object_store)
- end
-
def download_path
Gitlab::Routing.url_helpers.download_project_package_file_path(project, self)
end
diff --git a/app/models/pages/lookup_path.rb b/app/models/pages/lookup_path.rb
index 9855731778f..84928468ad1 100644
--- a/app/models/pages/lookup_path.rb
+++ b/app/models/pages/lookup_path.rb
@@ -2,6 +2,8 @@
module Pages
class LookupPath
+ include Gitlab::Utils::StrongMemoize
+
def initialize(project, trim_prefix: nil, domain: nil)
@project = project
@domain = domain
@@ -37,37 +39,28 @@ module Pages
attr_reader :project, :trim_prefix, :domain
- def artifacts_archive
- return unless Feature.enabled?(:pages_serve_from_artifacts_archive, project)
-
- project.pages_metadatum.artifacts_archive
- end
-
def deployment
- return unless Feature.enabled?(:pages_serve_from_deployments, project)
+ strong_memoize(:deployment) do
+ next unless Feature.enabled?(:pages_serve_from_deployments, project, default_enabled: true)
- project.pages_metadatum.pages_deployment
+ project.pages_metadatum.pages_deployment
+ end
end
def zip_source
- source = deployment || artifacts_archive
-
- return unless source&.file
-
- return if source.file.file_storage? && !Feature.enabled?(:pages_serve_with_zip_file_protocol, project)
+ return unless deployment&.file
- # artifacts archive doesn't support this
- file_count = source.file_count if source.respond_to?(:file_count)
+ return if deployment.file.file_storage? && !Feature.enabled?(:pages_serve_with_zip_file_protocol, project)
- global_id = ::Gitlab::GlobalId.build(source, id: source.id).to_s
+ global_id = ::Gitlab::GlobalId.build(deployment, id: deployment.id).to_s
{
type: 'zip',
- path: source.file.url_or_file_path(expire_at: 1.day.from_now),
+ path: deployment.file.url_or_file_path(expire_at: 1.day.from_now),
global_id: global_id,
- sha256: source.file_sha256,
- file_size: source.size,
- file_count: file_count
+ sha256: deployment.file_sha256,
+ file_size: deployment.size,
+ file_count: deployment.file_count
}
end
diff --git a/app/models/pages_domain.rb b/app/models/pages_domain.rb
index 8192310ddfb..4004ea9a662 100644
--- a/app/models/pages_domain.rb
+++ b/app/models/pages_domain.rb
@@ -34,10 +34,10 @@ class PagesDomain < ApplicationRecord
validate :validate_matching_key, if: ->(domain) { domain.certificate.present? || domain.key.present? }
validate :validate_intermediates, if: ->(domain) { domain.certificate.present? && domain.certificate_changed? }
- default_value_for(:auto_ssl_enabled, allow_nil: false) { ::Gitlab::LetsEncrypt.enabled? }
- default_value_for :scope, allow_nil: false, value: :project
- default_value_for :wildcard, allow_nil: false, value: false
- default_value_for :usage, allow_nil: false, value: :pages
+ default_value_for(:auto_ssl_enabled, allows_nil: false) { ::Gitlab::LetsEncrypt.enabled? }
+ default_value_for :scope, allows_nil: false, value: :project
+ default_value_for :wildcard, allows_nil: false, value: false
+ default_value_for :usage, allows_nil: false, value: :pages
attr_encrypted :key,
mode: :per_attribute_iv_and_salt,
diff --git a/app/models/personal_access_token.rb b/app/models/personal_access_token.rb
index 5aa5f2c842b..3b07551fe05 100644
--- a/app/models/personal_access_token.rb
+++ b/app/models/personal_access_token.rb
@@ -9,7 +9,9 @@ class PersonalAccessToken < ApplicationRecord
add_authentication_token_field :token, digest: true
REDIS_EXPIRY_TIME = 3.minutes
- TOKEN_LENGTH = 20
+
+ # PATs are 20 characters + optional configurable settings prefix (0..20)
+ TOKEN_LENGTH_RANGE = (20..40).freeze
serialize :scopes, Array # rubocop:disable Cop/ActiveRecordSerialize
@@ -77,6 +79,15 @@ class PersonalAccessToken < ApplicationRecord
)
end
+ def self.token_prefix
+ Gitlab::CurrentSettings.current_application_settings.personal_access_token_prefix
+ end
+
+ override :format_token
+ def format_token(token)
+ "#{self.class.token_prefix}#{token}"
+ end
+
protected
def validate_scopes
diff --git a/app/models/project.rb b/app/models/project.rb
index ebd8e56246d..daa5605c2e0 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -19,6 +19,7 @@ class Project < ApplicationRecord
include Presentable
include HasRepository
include HasWiki
+ include CanMoveRepositoryStorage
include Routable
include GroupDescendant
include Gitlab::SQL::Pattern
@@ -64,6 +65,8 @@ class Project < ApplicationRecord
SORTING_PREFERENCE_FIELD = :projects_sort
MAX_BUILD_TIMEOUT = 1.month
+ GL_REPOSITORY_TYPES = [Gitlab::GlRepository::PROJECT, Gitlab::GlRepository::WIKI, Gitlab::GlRepository::DESIGN].freeze
+
cache_markdown_field :description, pipeline: :description
default_value_for :packages_enabled, true
@@ -145,6 +148,7 @@ class Project < ApplicationRecord
# Project services
has_one :alerts_service
has_one :campfire_service
+ has_one :datadog_service
has_one :discord_service
has_one :drone_ci_service
has_one :emails_on_push_service
@@ -164,6 +168,7 @@ class Project < ApplicationRecord
has_one :bamboo_service
has_one :teamcity_service
has_one :pushover_service
+ has_one :jenkins_service
has_one :jira_service
has_one :redmine_service
has_one :youtrack_service
@@ -222,6 +227,7 @@ class Project < ApplicationRecord
has_many :snippets, class_name: 'ProjectSnippet'
has_many :hooks, class_name: 'ProjectHook'
has_many :protected_branches
+ has_many :exported_protected_branches
has_many :protected_tags
has_many :repository_languages, -> { order "share DESC" }
has_many :designs, inverse_of: :project, class_name: 'DesignManagement::Design'
@@ -336,7 +342,7 @@ class Project < ApplicationRecord
has_many :daily_build_group_report_results, class_name: 'Ci::DailyBuildGroupReportResult'
- has_many :repository_storage_moves, class_name: 'ProjectRepositoryStorageMove'
+ has_many :repository_storage_moves, class_name: 'ProjectRepositoryStorageMove', inverse_of: :container
has_many :webide_pipelines, -> { webide_source }, class_name: 'Ci::Pipeline', inverse_of: :project
has_many :reviews, inverse_of: :project
@@ -379,11 +385,11 @@ class Project < ApplicationRecord
delegate :feature_available?, :builds_enabled?, :wiki_enabled?,
:merge_requests_enabled?, :forking_enabled?, :issues_enabled?,
- :pages_enabled?, :snippets_enabled?, :public_pages?, :private_pages?,
+ :pages_enabled?, :analytics_enabled?, :snippets_enabled?, :public_pages?, :private_pages?,
:merge_requests_access_level, :forking_access_level, :issues_access_level,
:wiki_access_level, :snippets_access_level, :builds_access_level,
- :repository_access_level, :pages_access_level, :metrics_dashboard_access_level,
- to: :project_feature, allow_nil: true
+ :repository_access_level, :pages_access_level, :metrics_dashboard_access_level, :analytics_access_level,
+ :operations_enabled?, :operations_access_level, to: :project_feature, allow_nil: true
delegate :show_default_award_emojis, :show_default_award_emojis=,
:show_default_award_emojis?,
to: :project_setting, allow_nil: true
@@ -404,7 +410,7 @@ class Project < ApplicationRecord
delegate :forward_deployment_enabled, :forward_deployment_enabled=, :forward_deployment_enabled?, to: :ci_cd_settings, prefix: :ci
delegate :actual_limits, :actual_plan_name, to: :namespace, allow_nil: true
delegate :allow_merge_on_skipped_pipeline, :allow_merge_on_skipped_pipeline?,
- :allow_merge_on_skipped_pipeline=, :has_confluence?,
+ :allow_merge_on_skipped_pipeline=, :has_confluence?, :allow_editing_commit_messages?,
to: :project_setting
delegate :active?, to: :prometheus_service, allow_nil: true, prefix: true
@@ -1349,6 +1355,8 @@ class Project < ApplicationRecord
end
def disabled_services
+ return ['datadog'] unless Feature.enabled?(:datadog_ci_integration, self)
+
[]
end
@@ -1836,6 +1844,7 @@ class Project < ApplicationRecord
wiki.repository.expire_content_cache
DetectRepositoryLanguagesWorker.perform_async(id)
+ ProjectCacheWorker.perform_async(self.id, [], [:repository_size])
# The import assigns iid values on its own, e.g. by re-using GitHub ids.
# Flush existing InternalId records for this project for consistency reasons.
@@ -1952,6 +1961,7 @@ class Project < ApplicationRecord
.concat(predefined_project_variables)
.concat(pages_variables)
.concat(container_registry_variables)
+ .concat(dependency_proxy_variables)
.concat(auto_devops_variables)
.concat(api_variables)
end
@@ -2003,6 +2013,18 @@ class Project < ApplicationRecord
end
end
+ def dependency_proxy_variables
+ Gitlab::Ci::Variables::Collection.new.tap do |variables|
+ break variables unless Gitlab.config.dependency_proxy.enabled
+
+ variables.append(key: 'CI_DEPENDENCY_PROXY_SERVER', value: "#{Gitlab.config.gitlab.host}:#{Gitlab.config.gitlab.port}")
+ variables.append(
+ key: 'CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX',
+ value: "#{Gitlab.config.gitlab.host}:#{Gitlab.config.gitlab.port}/#{namespace.root_ancestor.path}#{DependencyProxy::URL_SUFFIX}"
+ )
+ end
+ end
+
def container_registry_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
break variables unless Gitlab.config.registry.enabled
@@ -2091,39 +2113,6 @@ class Project < ApplicationRecord
(auto_devops || build_auto_devops)&.predefined_variables
end
- RepositoryReadOnlyError = Class.new(StandardError)
-
- # Tries to set repository as read_only, checking for existing Git transfers in
- # progress beforehand. Setting a repository read-only will fail if it is
- # already in that state.
- #
- # @return nil. Failures will raise an exception
- def set_repository_read_only!
- with_lock do
- raise RepositoryReadOnlyError, _('Git transfer in progress') if
- git_transfer_in_progress?
-
- raise RepositoryReadOnlyError, _('Repository already read-only') if
- self.class.where(id: id).pick(:repository_read_only)
-
- raise ActiveRecord::RecordNotSaved, _('Database update failed') unless
- update_column(:repository_read_only, true)
-
- nil
- end
- end
-
- # Set repository as writable again. Unlike setting it read-only, this will
- # succeed if the repository is already writable.
- def set_repository_writable!
- with_lock do
- raise ActiveRecord::RecordNotSaved, _('Database update failed') unless
- update_column(:repository_read_only, false)
-
- nil
- end
- end
-
def pushes_since_gc
Gitlab::Redis::SharedState.with { |redis| redis.get(pushes_since_gc_redis_shared_state_key).to_i }
end
@@ -2273,8 +2262,11 @@ class Project < ApplicationRecord
end
end
+ override :git_transfer_in_progress?
def git_transfer_in_progress?
- repo_reference_count > 0 || wiki_reference_count > 0
+ GL_REPOSITORY_TYPES.any? do |type|
+ reference_counter(type: type).value > 0
+ end
end
def storage_version=(value)
@@ -2283,10 +2275,6 @@ class Project < ApplicationRecord
@storage = nil if storage_version_changed?
end
- def reference_counter(type: Gitlab::GlRepository::PROJECT)
- Gitlab::ReferenceCounter.new(type.identifier_for_container(self))
- end
-
def badges
return project_badges unless group
@@ -2498,8 +2486,7 @@ class Project < ApplicationRecord
end
def service_desk_custom_address
- return unless ::Gitlab::ServiceDeskEmail.enabled?
- return unless ::Feature.enabled?(:service_desk_custom_address, self)
+ return unless service_desk_custom_address_enabled?
key = service_desk_setting&.project_key
return unless key.present?
@@ -2507,6 +2494,10 @@ class Project < ApplicationRecord
::Gitlab::ServiceDeskEmail.address_for_key("#{full_path_slug}-#{key}")
end
+ def service_desk_custom_address_enabled?
+ ::Gitlab::ServiceDeskEmail.enabled? && ::Feature.enabled?(:service_desk_custom_address, self, default_enabled: true)
+ end
+
def root_namespace
if namespace.has_parent?
namespace.root_ancestor
@@ -2607,14 +2598,6 @@ class Project < ApplicationRecord
end
end
- def repo_reference_count
- reference_counter.value
- end
-
- def wiki_reference_count
- reference_counter(type: Gitlab::GlRepository::WIKI).value
- end
-
def check_repository_absence!
return if skip_disk_validation
diff --git a/app/models/project_feature.rb b/app/models/project_feature.rb
index b3ebcbd4b17..7b204cfb1c0 100644
--- a/app/models/project_feature.rb
+++ b/app/models/project_feature.rb
@@ -3,7 +3,7 @@
class ProjectFeature < ApplicationRecord
include Featurable
- FEATURES = %i(issues forking merge_requests wiki snippets builds repository pages metrics_dashboard).freeze
+ FEATURES = %i(issues forking merge_requests wiki snippets builds repository pages metrics_dashboard analytics operations).freeze
set_available_features(FEATURES)
@@ -44,7 +44,9 @@ class ProjectFeature < ApplicationRecord
default_value_for :snippets_access_level, value: ENABLED, allows_nil: false
default_value_for :wiki_access_level, value: ENABLED, allows_nil: false
default_value_for :repository_access_level, value: ENABLED, allows_nil: false
+ default_value_for :analytics_access_level, value: ENABLED, allows_nil: false
default_value_for :metrics_dashboard_access_level, value: PRIVATE, allows_nil: false
+ default_value_for :operations_access_level, value: ENABLED, allows_nil: false
default_value_for(:pages_access_level, allows_nil: false) do |feature|
if ::Gitlab::Pages.access_control_is_forced?
diff --git a/app/models/project_repository.rb b/app/models/project_repository.rb
index 092efabd73f..a9cef16f3ac 100644
--- a/app/models/project_repository.rb
+++ b/app/models/project_repository.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
class ProjectRepository < ApplicationRecord
+ include EachBatch
include Shardable
belongs_to :project, inverse_of: :project_repository
diff --git a/app/models/project_repository_storage_move.rb b/app/models/project_repository_storage_move.rb
index 3429dbe3a85..1e3782a1fb5 100644
--- a/app/models/project_repository_storage_move.rb
+++ b/app/models/project_repository_storage_move.rb
@@ -4,100 +4,31 @@
# project. For example, moving a project to another gitaly node to help
# balance storage capacity.
class ProjectRepositoryStorageMove < ApplicationRecord
- include AfterCommitQueue
+ extend ::Gitlab::Utils::Override
+ include RepositoryStorageMovable
- belongs_to :project, inverse_of: :repository_storage_moves
+ belongs_to :container, class_name: 'Project', inverse_of: :repository_storage_moves, foreign_key: :project_id
+ alias_attribute :project, :container
+ scope :with_projects, -> { includes(container: :route) }
- validates :project, presence: true
- validates :state, presence: true
- validates :source_storage_name,
- on: :create,
- presence: true,
- inclusion: { in: ->(_) { Gitlab.config.repositories.storages.keys } }
- validates :destination_storage_name,
- on: :create,
- presence: true,
- inclusion: { in: ->(_) { Gitlab.config.repositories.storages.keys } }
- validate :project_repository_writable, on: :create
-
- default_value_for(:destination_storage_name, allows_nil: false) do
- pick_repository_storage
- end
-
- state_machine initial: :initial do
- event :schedule do
- transition initial: :scheduled
- end
-
- event :start do
- transition scheduled: :started
- end
-
- event :finish_replication do
- transition started: :replicated
- end
-
- event :finish_cleanup do
- transition replicated: :finished
- end
-
- event :do_fail do
- transition [:initial, :scheduled, :started] => :failed
- transition replicated: :cleanup_failed
- end
-
- around_transition initial: :scheduled do |storage_move, block|
- block.call
-
- begin
- storage_move.project.set_repository_read_only!
- rescue => err
- errors.add(:project, err.message)
- next false
- end
-
- storage_move.run_after_commit do
- ProjectUpdateRepositoryStorageWorker.perform_async(
- storage_move.project_id,
- storage_move.destination_storage_name,
- storage_move.id
- )
- end
-
- true
- end
-
- before_transition started: :replicated do |storage_move|
- storage_move.project.set_repository_writable!
-
- storage_move.project.update_column(:repository_storage, storage_move.destination_storage_name)
- end
-
- before_transition started: :failed do |storage_move|
- storage_move.project.set_repository_writable!
- end
-
- state :initial, value: 1
- state :scheduled, value: 2
- state :started, value: 3
- state :finished, value: 4
- state :failed, value: 5
- state :replicated, value: 6
- state :cleanup_failed, value: 7
+ override :update_repository_storage
+ def update_repository_storage(new_storage)
+ container.update_column(:repository_storage, new_storage)
end
- scope :order_created_at_desc, -> { order(created_at: :desc) }
- scope :with_projects, -> { includes(project: :route) }
-
- class << self
- def pick_repository_storage
- Project.pick_repository_storage
- end
+ override :schedule_repository_storage_update_worker
+ def schedule_repository_storage_update_worker
+ ProjectUpdateRepositoryStorageWorker.perform_async(
+ project_id,
+ destination_storage_name,
+ id
+ )
end
private
- def project_repository_writable
- errors.add(:project, _('is read only')) if project&.repository_read_only?
+ override :error_key
+ def error_key
+ :project
end
end
diff --git a/app/models/project_services/datadog_service.rb b/app/models/project_services/datadog_service.rb
new file mode 100644
index 00000000000..543843ab1b0
--- /dev/null
+++ b/app/models/project_services/datadog_service.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+class DatadogService < Service
+ DEFAULT_SITE = 'datadoghq.com'.freeze
+ URL_TEMPLATE = 'https://webhooks-http-intake.logs.%{datadog_site}/v1/input/'.freeze
+ URL_TEMPLATE_API_KEYS = 'https://app.%{datadog_site}/account/settings#api'.freeze
+ URL_API_KEYS_DOCS = "https://docs.#{DEFAULT_SITE}/account_management/api-app-keys/".freeze
+
+ SUPPORTED_EVENTS = %w[
+ pipeline job
+ ].freeze
+
+ prop_accessor :datadog_site, :api_url, :api_key, :datadog_service, :datadog_env
+
+ with_options presence: true, if: :activated? do
+ validates :api_key, format: { with: /\A\w+\z/ }
+ validates :datadog_site, format: { with: /\A[\w\.]+\z/ }, unless: :api_url
+ validates :api_url, public_url: true, unless: :datadog_site
+ end
+
+ after_save :compose_service_hook, if: :activated?
+
+ def self.supported_events
+ SUPPORTED_EVENTS
+ end
+
+ def self.default_test_event
+ 'pipeline'
+ end
+
+ def configurable_events
+ [] # do not allow to opt out of required hooks
+ end
+
+ def title
+ 'Datadog'
+ end
+
+ def description
+ 'Trace your GitLab pipelines with Datadog'
+ end
+
+ def help
+ nil
+ # Maybe adding something in the future
+ # We could link to static help pages as well
+ # [More information](#{Gitlab::Routing.url_helpers.help_page_url('integration/datadog')})"
+ end
+
+ def self.to_param
+ 'datadog'
+ end
+
+ def fields
+ [
+ {
+ type: 'text', name: 'datadog_site',
+ placeholder: DEFAULT_SITE, default: DEFAULT_SITE,
+ help: 'Choose the Datadog site to send data to. Set to "datadoghq.eu" to send data to the EU site',
+ required: false
+ },
+ {
+ type: 'text', name: 'api_url', title: 'Custom URL',
+ help: '(Advanced) Define the full URL for your Datadog site directly',
+ required: false
+ },
+ {
+ type: 'password', name: 'api_key', title: 'API key',
+ help: "<a href=\"#{api_keys_url}\" target=\"_blank\">API key</a> used for authentication with Datadog",
+ required: true
+ },
+ {
+ type: 'text', name: 'datadog_service', title: 'Service', placeholder: 'gitlab-ci',
+ help: 'Name of this GitLab instance that all data will be tagged with'
+ },
+ {
+ type: 'text', name: 'datadog_env', title: 'Env',
+ help: 'The environment tag that traces will be tagged with'
+ }
+ ]
+ end
+
+ def compose_service_hook
+ hook = service_hook || build_service_hook
+ hook.url = hook_url
+ hook.save
+ end
+
+ def hook_url
+ url = api_url.presence || sprintf(URL_TEMPLATE, datadog_site: datadog_site)
+ url = URI.parse(url)
+ url.path = File.join(url.path || '/', api_key)
+ query = { service: datadog_service, env: datadog_env }.compact
+ url.query = query.to_query unless query.empty?
+ url.to_s
+ end
+
+ def api_keys_url
+ return URL_API_KEYS_DOCS unless datadog_site.presence
+
+ sprintf(URL_TEMPLATE_API_KEYS, datadog_site: datadog_site)
+ end
+
+ def execute(data)
+ return if project.disabled_services.include?(to_param)
+
+ object_kind = data[:object_kind]
+ object_kind = 'job' if object_kind == 'build'
+ return unless supported_events.include?(object_kind)
+
+ service_hook.execute(data, "#{object_kind} hook")
+ end
+
+ def test(data)
+ begin
+ result = execute(data)
+ return { success: false, result: result[:message] } if result[:http_status] != 200
+ rescue StandardError => error
+ return { success: false, result: error }
+ end
+
+ { success: true, result: result[:message] }
+ end
+end
diff --git a/app/models/project_services/jenkins_service.rb b/app/models/project_services/jenkins_service.rb
new file mode 100644
index 00000000000..63ecfc66877
--- /dev/null
+++ b/app/models/project_services/jenkins_service.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+class JenkinsService < CiService
+ prop_accessor :jenkins_url, :project_name, :username, :password
+
+ before_update :reset_password
+
+ validates :jenkins_url, presence: true, addressable_url: true, if: :activated?
+ validates :project_name, presence: true, if: :activated?
+ validates :username, presence: true, if: ->(service) { service.activated? && service.password_touched? && service.password.present? }
+
+ default_value_for :push_events, true
+ default_value_for :merge_requests_events, false
+ default_value_for :tag_push_events, false
+
+ after_save :compose_service_hook, if: :activated?
+
+ def reset_password
+ # don't reset the password if a new one is provided
+ if (jenkins_url_changed? || username.blank?) && !password_touched?
+ self.password = nil
+ end
+ end
+
+ def compose_service_hook
+ hook = service_hook || build_service_hook
+ hook.url = hook_url
+ hook.save
+ end
+
+ def execute(data)
+ return if project.disabled_services.include?(to_param)
+ return unless supported_events.include?(data[:object_kind])
+
+ service_hook.execute(data, "#{data[:object_kind]}_hook")
+ end
+
+ def test(data)
+ begin
+ result = execute(data)
+ return { success: false, result: result[:message] } if result[:http_status] != 200
+ rescue StandardError => error
+ return { success: false, result: error }
+ end
+
+ { success: true, result: result[:message] }
+ end
+
+ def hook_url
+ url = URI.parse(jenkins_url)
+ url.path = File.join(url.path || '/', "project/#{project_name}")
+ url.user = ERB::Util.url_encode(username) unless username.blank?
+ url.password = ERB::Util.url_encode(password) unless password.blank?
+ url.to_s
+ end
+
+ def self.supported_events
+ %w(push merge_request tag_push)
+ end
+
+ def title
+ 'Jenkins CI'
+ end
+
+ def description
+ 'An extendable open source continuous integration server'
+ end
+
+ def help
+ "You must have installed the Git Plugin and GitLab Plugin in Jenkins. [More information](#{Gitlab::Routing.url_helpers.help_page_url('integration/jenkins')})"
+ end
+
+ def self.to_param
+ 'jenkins'
+ end
+
+ def fields
+ [
+ {
+ type: 'text', name: 'jenkins_url',
+ placeholder: 'Jenkins URL like http://jenkins.example.com'
+ },
+ {
+ type: 'text', name: 'project_name', placeholder: 'Project Name',
+ help: 'The URL-friendly project name. Example: my_project_name'
+ },
+ { type: 'text', name: 'username' },
+ { type: 'password', name: 'password' }
+ ]
+ end
+end
diff --git a/app/models/project_services/jira_service.rb b/app/models/project_services/jira_service.rb
index 7814bdb7106..1f4abfc1aca 100644
--- a/app/models/project_services/jira_service.rb
+++ b/app/models/project_services/jira_service.rb
@@ -122,12 +122,15 @@ class JiraService < IssueTrackerService
end
def fields
+ transition_id_help_path = help_page_path('user/project/integrations/jira', anchor: 'obtaining-a-transition-id')
+ transition_id_help_link_start = '<a href="%{transition_id_help_path}" target="_blank" rel="noopener noreferrer">'.html_safe % { transition_id_help_path: transition_id_help_path }
+
[
{ type: 'text', name: 'url', title: s_('JiraService|Web URL'), placeholder: 'https://jira.example.com', required: true },
{ type: 'text', name: 'api_url', title: s_('JiraService|Jira API URL'), placeholder: s_('JiraService|If different from Web URL') },
{ type: 'text', name: 'username', title: s_('JiraService|Username or Email'), placeholder: s_('JiraService|Use a username for server version and an email for cloud version'), required: true },
{ type: 'password', name: 'password', title: s_('JiraService|Password or API token'), placeholder: s_('JiraService|Use a password for server version and an API token for cloud version'), required: true },
- { type: 'text', name: 'jira_issue_transition_id', title: s_('JiraService|Transition ID(s)'), placeholder: s_('JiraService|Use , or ; to separate multiple transition IDs') }
+ { type: 'text', name: 'jira_issue_transition_id', title: s_('JiraService|Jira workflow transition IDs'), placeholder: s_('JiraService|For example, 12, 24'), help: s_('JiraService|Set transition IDs for Jira workflow transitions. %{link_start}Learn more%{link_end}'.html_safe % { link_start: transition_id_help_link_start, link_end: '</a>'.html_safe }) }
]
end
diff --git a/app/models/project_services/mock_deployment_service.rb b/app/models/project_services/mock_deployment_service.rb
index f80819de9fb..e55335d9aae 100644
--- a/app/models/project_services/mock_deployment_service.rb
+++ b/app/models/project_services/mock_deployment_service.rb
@@ -1,5 +1,9 @@
# frozen_string_literal: true
+# Deprecated, to be deleted in 13.8 (https://gitlab.com/gitlab-org/gitlab/-/issues/293914)
+#
+# This was a class used only in development environment but became unusable
+# since DeploymentService was deleted
class MockDeploymentService < Service
default_value_for :category, 'deployment'
@@ -32,5 +36,3 @@ class MockDeploymentService < Service
false
end
end
-
-MockDeploymentService.prepend_if_ee('EE::MockDeploymentService')
diff --git a/app/models/project_services/pipelines_email_service.rb b/app/models/project_services/pipelines_email_service.rb
index c11b2f7cc65..8af4cd952c9 100644
--- a/app/models/project_services/pipelines_email_service.rb
+++ b/app/models/project_services/pipelines_email_service.rb
@@ -40,6 +40,10 @@ class PipelinesEmailService < Service
%w[pipeline]
end
+ def self.default_test_event
+ 'pipeline'
+ end
+
def execute(data, force: false)
return unless supported_events.include?(data[:object_kind])
return unless force || should_pipeline_be_notified?(data)
diff --git a/app/models/project_statistics.rb b/app/models/project_statistics.rb
index c11a7fea1c6..7605ef54d5b 100644
--- a/app/models/project_statistics.rb
+++ b/app/models/project_statistics.rb
@@ -73,8 +73,6 @@ class ProjectStatistics < ApplicationRecord
end
def update_uploads_size
- return uploads_size unless Feature.enabled?(:count_uploads_size_in_storage_stats, project)
-
self.uploads_size = project.uploads.sum(:size)
end
diff --git a/app/models/protected_branch.rb b/app/models/protected_branch.rb
index 599c174ddd7..ad418a47476 100644
--- a/app/models/protected_branch.rb
+++ b/app/models/protected_branch.rb
@@ -48,6 +48,10 @@ class ProtectedBranch < ApplicationRecord
where(fuzzy_arel_match(:name, query.downcase))
end
+
+ def allow_multiple?(type)
+ type == :push
+ end
end
ProtectedBranch.prepend_if_ee('EE::ProtectedBranch')
diff --git a/app/models/protected_branch/push_access_level.rb b/app/models/protected_branch/push_access_level.rb
index 63d577a4866..f28440f2444 100644
--- a/app/models/protected_branch/push_access_level.rb
+++ b/app/models/protected_branch/push_access_level.rb
@@ -18,6 +18,14 @@ class ProtectedBranch::PushAccessLevel < ApplicationRecord
end
end
+ def check_access(user)
+ if Feature.enabled?(:deploy_keys_on_protected_branches, project) && user && deploy_key.present?
+ return true if user.can?(:read_project, project) && enabled_deploy_key_for_user?(deploy_key, user)
+ end
+
+ super
+ end
+
private
def validate_deploy_key_membership
@@ -27,4 +35,8 @@ class ProtectedBranch::PushAccessLevel < ApplicationRecord
self.errors.add(:deploy_key, 'is not enabled for this project')
end
end
+
+ def enabled_deploy_key_for_user?(deploy_key, user)
+ deploy_key.user_id == user.id && DeployKey.with_write_access_for_project(protected_branch.project, deploy_key: deploy_key).any?
+ end
end
diff --git a/app/models/raw_usage_data.rb b/app/models/raw_usage_data.rb
index 18cee55d06e..06cd4ad3f6c 100644
--- a/app/models/raw_usage_data.rb
+++ b/app/models/raw_usage_data.rb
@@ -5,6 +5,6 @@ class RawUsageData < ApplicationRecord
validates :recorded_at, presence: true, uniqueness: true
def update_sent_at!
- self.update_column(:sent_at, Time.current) if Feature.enabled?(:save_raw_usage_data)
+ self.update_column(:sent_at, Time.current)
end
end
diff --git a/app/models/redirect_route.rb b/app/models/redirect_route.rb
index 22f60802257..749f4a87818 100644
--- a/app/models/redirect_route.rb
+++ b/app/models/redirect_route.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class RedirectRoute < ApplicationRecord
+ include CaseSensitivity
+
belongs_to :source, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
validates :source, presence: true
diff --git a/app/models/release.rb b/app/models/release.rb
index c56df0a6aa3..bebf91fb247 100644
--- a/app/models/release.rb
+++ b/app/models/release.rb
@@ -29,6 +29,8 @@ class Release < ApplicationRecord
scope :preloaded, -> { includes(:evidences, :milestones, project: [:project_feature, :route, { namespace: :route }]) }
scope :with_project_and_namespace, -> { includes(project: :namespace) }
scope :recent, -> { sorted.limit(MAX_NUMBER_TO_DISPLAY) }
+ scope :without_evidence, -> { left_joins(:evidences).where(::Releases::Evidence.arel_table[:id].eq(nil)) }
+ scope :released_within_2hrs, -> { where(released_at: Time.zone.now - 1.hour..Time.zone.now + 1.hour) }
# Sorting
scope :order_created, -> { reorder('created_at ASC') }
diff --git a/app/models/release_highlight.rb b/app/models/release_highlight.rb
new file mode 100644
index 00000000000..1efba6380e9
--- /dev/null
+++ b/app/models/release_highlight.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+class ReleaseHighlight
+ CACHE_DURATION = 1.hour
+ FILES_PATH = Rails.root.join('data', 'whats_new', '*.yml')
+ RELEASE_VERSIONS_IN_A_YEAR = 12
+
+ def self.for_version(version:)
+ index = self.versions.index(version)
+
+ return if index.nil?
+
+ page = index + 1
+
+ self.paginated(page: page)
+ end
+
+ def self.paginated(page: 1)
+ key = self.cache_key("items:page-#{page}")
+
+ Rails.cache.fetch(key, expires_in: CACHE_DURATION) do
+ items = self.load_items(page: page)
+
+ next if items.nil?
+
+ QueryResult.new(items: items, next_page: next_page(current_page: page))
+ end
+ end
+
+ def self.load_items(page:)
+ index = page - 1
+ file_path = file_paths[index]
+
+ return if file_path.nil?
+
+ file = File.read(file_path)
+ items = YAML.safe_load(file, permitted_classes: [Date])
+
+ platform = Gitlab.com? ? 'gitlab-com' : 'self-managed'
+
+ items&.map! do |item|
+ next unless item[platform]
+
+ begin
+ item.tap {|i| i['body'] = Kramdown::Document.new(i['body']).to_html }
+ rescue => e
+ Gitlab::ErrorTracking.track_exception(e, file_path: file_path)
+
+ next
+ end
+ end
+
+ items&.compact
+ rescue Psych::Exception => e
+ Gitlab::ErrorTracking.track_exception(e, file_path: file_path)
+
+ nil
+ end
+
+ def self.file_paths
+ @file_paths ||= Rails.cache.fetch(self.cache_key('file_paths'), expires_in: CACHE_DURATION) do
+ Dir.glob(FILES_PATH).sort.reverse
+ end
+ end
+
+ def self.cache_key(key)
+ ['release_highlight', key, Gitlab.revision].join(':')
+ end
+
+ def self.next_page(current_page: 1)
+ next_page = current_page + 1
+ next_index = next_page - 1
+
+ next_page if self.file_paths[next_index]
+ end
+
+ def self.most_recent_item_count
+ key = self.cache_key('recent_item_count')
+
+ Gitlab::ProcessMemoryCache.cache_backend.fetch(key, expires_in: CACHE_DURATION) do
+ self.paginated&.items&.count
+ end
+ end
+
+ def self.versions
+ key = self.cache_key('versions')
+
+ Gitlab::ProcessMemoryCache.cache_backend.fetch(key, expires_in: CACHE_DURATION) do
+ versions = self.file_paths.first(RELEASE_VERSIONS_IN_A_YEAR).map do |path|
+ /\d*\_(\d*\_\d*)\.yml$/.match(path).captures[0].gsub(/0(?=\d)/, "").tr("_", ".")
+ end
+
+ versions.uniq
+ end
+ end
+
+ QueryResult = Struct.new(:items, :next_page, keyword_init: true) do
+ include Enumerable
+
+ delegate :each, to: :items
+ end
+end
diff --git a/app/models/repository.rb b/app/models/repository.rb
index d4fd202b966..93f22dbe122 100644
--- a/app/models/repository.rb
+++ b/app/models/repository.rb
@@ -513,6 +513,9 @@ class Repository
# Don't attempt to return a special result if there is no blob at all
return unless blob
+ # Don't attempt to return a special result if this can't be a README
+ return blob unless Gitlab::FileDetector.type_of(blob.name) == :readme
+
# Don't attempt to return a special result unless we're looking at HEAD
return blob unless head_commit&.sha == sha
@@ -615,7 +618,7 @@ class Repository
end
def readme_path
- readme&.path
+ head_tree&.readme_path
end
cache_method :readme_path
diff --git a/app/models/resource_event.rb b/app/models/resource_event.rb
index 26dcda2630a..54fa4137f73 100644
--- a/app/models/resource_event.rb
+++ b/app/models/resource_event.rb
@@ -30,14 +30,6 @@ class ResourceEvent < ApplicationRecord
return true if issuable_count == 1
- # if none of issuable IDs is set, check explicitly if nested issuable
- # object is set, this is used during project import
- if issuable_count == 0 && importing?
- issuable_count = self.class.issuable_attrs.count { |attr| self.public_send(attr) } # rubocop:disable GitlabSecurity/PublicSend
-
- return true if issuable_count == 1
- end
-
errors.add(
:base, _("Exactly one of %{attributes} is required") %
{ attributes: self.class.issuable_attrs.join(', ') }
diff --git a/app/models/resource_label_event.rb b/app/models/resource_label_event.rb
index 18e2944a9ca..57a3b568c53 100644
--- a/app/models/resource_label_event.rb
+++ b/app/models/resource_label_event.rb
@@ -12,10 +12,9 @@ class ResourceLabelEvent < ResourceEvent
scope :inc_relations, -> { includes(:label, :user) }
validates :label, presence: { unless: :importing? }, on: :create
- validate :exactly_one_issuable
+ validate :exactly_one_issuable, unless: :importing?
after_save :expire_etag_cache
- after_save :usage_metrics
after_destroy :expire_etag_cache
enum action: {
@@ -114,16 +113,6 @@ class ResourceLabelEvent < ResourceEvent
def discussion_id_key
[self.class.name, created_at, user_id]
end
-
- def for_issue?
- issue_id.present?
- end
-
- def usage_metrics
- return unless for_issue?
-
- Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_label_changed_action(author: user)
- end
end
ResourceLabelEvent.prepend_if_ee('EE::ResourceLabelEvent')
diff --git a/app/models/resource_state_event.rb b/app/models/resource_state_event.rb
index 6475633868a..73eb4987143 100644
--- a/app/models/resource_state_event.rb
+++ b/app/models/resource_state_event.rb
@@ -11,7 +11,7 @@ class ResourceStateEvent < ResourceEvent
# state is used for issue and merge request states.
enum state: Issue.available_states.merge(MergeRequest.available_states).merge(reopened: 5)
- after_save :usage_metrics
+ after_create :issue_usage_metrics
def self.issuable_attrs
%i(issue merge_request).freeze
@@ -27,7 +27,7 @@ class ResourceStateEvent < ResourceEvent
private
- def usage_metrics
+ def issue_usage_metrics
return unless for_issue?
case state
diff --git a/app/models/resource_timebox_event.rb b/app/models/resource_timebox_event.rb
index ac164783945..71077758b69 100644
--- a/app/models/resource_timebox_event.rb
+++ b/app/models/resource_timebox_event.rb
@@ -13,7 +13,7 @@ class ResourceTimeboxEvent < ResourceEvent
remove: 2
}
- after_save :usage_metrics
+ after_create :issue_usage_metrics
def self.issuable_attrs
%i(issue merge_request).freeze
@@ -25,7 +25,13 @@ class ResourceTimeboxEvent < ResourceEvent
private
- def usage_metrics
+ def for_issue?
+ issue_id.present?
+ end
+
+ def issue_usage_metrics
+ return unless for_issue?
+
case self
when ResourceMilestoneEvent
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_milestone_changed_action(author: user)
diff --git a/app/models/route.rb b/app/models/route.rb
index fe4846b3be5..fcc8459d6e5 100644
--- a/app/models/route.rb
+++ b/app/models/route.rb
@@ -4,7 +4,7 @@ class Route < ApplicationRecord
include CaseSensitivity
include Gitlab::SQL::Pattern
- belongs_to :source, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
+ belongs_to :source, polymorphic: true, inverse_of: :route # rubocop:disable Cop/PolymorphicAssociations
validates :source, presence: true
validates :path,
diff --git a/app/models/sentry_issue.rb b/app/models/sentry_issue.rb
index 30f4026e633..fec1a55f17d 100644
--- a/app/models/sentry_issue.rb
+++ b/app/models/sentry_issue.rb
@@ -1,9 +1,12 @@
# frozen_string_literal: true
class SentryIssue < ApplicationRecord
+ include Importable
+
belongs_to :issue
- validates :issue, uniqueness: true, presence: true
+ validates :issue, uniqueness: true
+ validates :issue, presence: true, unless: :importing?
validates :sentry_issue_identifier, presence: true
validate :ensure_sentry_issue_identifier_is_unique_per_project
diff --git a/app/models/service.rb b/app/models/service.rb
index 2b6971954e3..57c099d6f04 100644
--- a/app/models/service.rb
+++ b/app/models/service.rb
@@ -11,15 +11,20 @@ class Service < ApplicationRecord
include EachBatch
SERVICE_NAMES = %w[
- alerts asana assembla bamboo bugzilla buildkite campfire confluence custom_issue_tracker discord
+ asana assembla bamboo bugzilla buildkite campfire confluence custom_issue_tracker datadog discord
drone_ci emails_on_push ewm external_wiki flowdock hangouts_chat hipchat irker jira
mattermost mattermost_slash_commands microsoft_teams packagist pipelines_email
pivotaltracker prometheus pushover redmine slack slack_slash_commands teamcity unify_circuit webex_teams youtrack
].freeze
+ PROJECT_SPECIFIC_SERVICE_NAMES = %w[
+ jenkins
+ alerts
+ ].freeze
+
# Fake services to help with local development.
DEV_SERVICE_NAMES = %w[
- mock_ci mock_deployment mock_monitoring
+ mock_ci mock_monitoring
].freeze
serialize :properties, JSON # rubocop:disable Cop/ActiveRecordSerialize
@@ -66,6 +71,7 @@ class Service < ApplicationRecord
scope :by_type, -> (type) { where(type: type) }
scope :by_active_flag, -> (flag) { where(active: flag) }
scope :inherit_from_id, -> (id) { where(inherit_from_id: id) }
+ scope :inherit, -> { where.not(inherit_from_id: nil) }
scope :for_group, -> (group) { where(group_id: group, type: available_services_types(include_project_specific: false)) }
scope :for_template, -> { where(template: true, type: available_services_types(include_project_specific: false)) }
scope :for_instance, -> { where(instance: true, type: available_services_types(include_project_specific: false)) }
@@ -147,6 +153,10 @@ class Service < ApplicationRecord
%w[commit push tag_push issue confidential_issue merge_request wiki_page]
end
+ def self.default_test_event
+ 'push'
+ end
+
def self.event_description(event)
ServicesHelper.service_event_description(event)
end
@@ -212,7 +222,7 @@ class Service < ApplicationRecord
end
def self.project_specific_services_names
- []
+ PROJECT_SPECIFIC_SERVICE_NAMES
end
def self.available_services_types(include_project_specific: true, include_dev: true)
@@ -270,7 +280,7 @@ class Service < ApplicationRecord
active.where(instance: true),
active.where(group_id: group_ids, inherit_from_id: nil)
]).order(Arel.sql("type ASC, array_position(#{array}::bigint[], services.group_id), instance DESC")).group_by(&:type).each do |type, records|
- build_from_integration(records.first, association => scope.id).save!
+ build_from_integration(records.first, association => scope.id).save
end
end
@@ -386,6 +396,10 @@ class Service < ApplicationRecord
self.class.supported_events
end
+ def default_test_event
+ self.class.default_test_event
+ end
+
def execute(data)
# implement inside child
end
@@ -402,6 +416,10 @@ class Service < ApplicationRecord
!instance? && !group_id
end
+ def parent
+ project || group
+ end
+
# Returns a hash of the properties that have been assigned a new value since last save,
# indicating their original values (attr => original value).
# ActiveRecord does not provide a mechanism to track changes in serialized keys,
diff --git a/app/models/snippet.rb b/app/models/snippet.rb
index dc370b46bda..817f9d014eb 100644
--- a/app/models/snippet.rb
+++ b/app/models/snippet.rb
@@ -15,6 +15,7 @@ class Snippet < ApplicationRecord
include FromUnion
include IgnorableColumns
include HasRepository
+ include CanMoveRepositoryStorage
include AfterCommitQueue
extend ::Gitlab::Utils::Override
@@ -43,6 +44,7 @@ class Snippet < ApplicationRecord
has_many :notes, as: :noteable, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :user_mentions, class_name: "SnippetUserMention", dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
has_one :snippet_repository, inverse_of: :snippet
+ has_many :repository_storage_moves, class_name: 'SnippetRepositoryStorageMove', inverse_of: :container
# We need to add the `dependent` in order to call the after_destroy callback
has_one :statistics, class_name: 'SnippetStatistics', dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
@@ -69,7 +71,6 @@ class Snippet < ApplicationRecord
validates :visibility_level, inclusion: { in: Gitlab::VisibilityLevel.values }
- after_save :store_mentions!, if: :any_mentionable_attributes_changed?
after_create :create_statistics
# Scopes
@@ -213,7 +214,8 @@ class Snippet < ApplicationRecord
def blobs
return [] unless repository_exists?
- repository.ls_files(default_branch).map { |file| Blob.lazy(repository, default_branch, file) }
+ branch = default_branch
+ list_files(branch).map { |file| Blob.lazy(repository, branch, file) }
end
def hook_attrs
diff --git a/app/models/snippet_blob.rb b/app/models/snippet_blob.rb
index cf1ab089829..bad24cc45f6 100644
--- a/app/models/snippet_blob.rb
+++ b/app/models/snippet_blob.rb
@@ -21,6 +21,10 @@ class SnippetBlob
data.bytesize
end
+ def commit_id
+ nil
+ end
+
def data
snippet.content
end
diff --git a/app/models/snippet_repository_storage_move.rb b/app/models/snippet_repository_storage_move.rb
new file mode 100644
index 00000000000..a365569bfa8
--- /dev/null
+++ b/app/models/snippet_repository_storage_move.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+# SnippetRepositoryStorageMove are details of repository storage moves for a
+# snippet. For example, moving a snippet to another gitaly node to help
+# balance storage capacity.
+class SnippetRepositoryStorageMove < ApplicationRecord
+ extend ::Gitlab::Utils::Override
+ include RepositoryStorageMovable
+
+ belongs_to :container, class_name: 'Snippet', inverse_of: :repository_storage_moves, foreign_key: :snippet_id
+ alias_attribute :snippet, :container
+
+ override :schedule_repository_storage_update_worker
+ def schedule_repository_storage_update_worker
+ # TODO https://gitlab.com/gitlab-org/gitlab/-/issues/218991
+ end
+
+ private
+
+ override :error_key
+ def error_key
+ :snippet
+ end
+end
diff --git a/app/models/suggestion.rb b/app/models/suggestion.rb
index 8c72bd5ae7e..ff564d87449 100644
--- a/app/models/suggestion.rb
+++ b/app/models/suggestion.rb
@@ -1,10 +1,11 @@
# frozen_string_literal: true
class Suggestion < ApplicationRecord
+ include Importable
include Suggestible
belongs_to :note, inverse_of: :suggestions
- validates :note, presence: true
+ validates :note, presence: true, unless: :importing?
validates :commit_id, presence: true, if: :applied?
delegate :position, :noteable, to: :note
diff --git a/app/models/system_note_metadata.rb b/app/models/system_note_metadata.rb
index 0ddf2c5fbcd..20107147b4f 100644
--- a/app/models/system_note_metadata.rb
+++ b/app/models/system_note_metadata.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class SystemNoteMetadata < ApplicationRecord
+ include Importable
+
# These notes's action text might contain a reference that is external.
# We should always force a deep validation upon references that are found
# in this note type.
@@ -12,18 +14,19 @@ class SystemNoteMetadata < ApplicationRecord
moved merge
label milestone
relate unrelate
+ cloned
].freeze
ICON_TYPES = %w[
commit description merge confidential visible label assignee cross_reference
designs_added designs_modified designs_removed designs_discussion_added
- title time_tracking branch milestone discussion task moved
+ title time_tracking branch milestone discussion task moved cloned
opened closed merged duplicate locked unlocked outdated reviewer
tag due_date pinned_embed cherry_pick health_status approved unapproved
status alert_issue_added relate unrelate new_alert_added severity
].freeze
- validates :note, presence: true
+ validates :note, presence: true, unless: :importing?
validates :action, inclusion: { in: :icon_types }, allow_nil: true
belongs_to :note
diff --git a/app/models/terraform/state.rb b/app/models/terraform/state.rb
index d329b429c9d..1b99f310e1a 100644
--- a/app/models/terraform/state.rb
+++ b/app/models/terraform/state.rb
@@ -3,13 +3,6 @@
module Terraform
class State < ApplicationRecord
include UsageStatistics
- include FileStoreMounter
- include IgnorableColumns
- # These columns are being removed since geo replication falls to the versioned state
- # Tracking in https://gitlab.com/gitlab-org/gitlab/-/issues/258262
- ignore_columns %i[verification_failure verification_retry_at verified_at verification_retry_count verification_checksum],
- remove_with: '13.7',
- remove_after: '2020-12-22'
HEX_REGEXP = %r{\A\h+\z}.freeze
UUID_LENGTH = 32
@@ -35,20 +28,9 @@ module Terraform
format: { with: HEX_REGEXP, message: 'only allows hex characters' }
default_value_for(:uuid, allows_nil: false) { SecureRandom.hex(UUID_LENGTH / 2) }
- default_value_for(:versioning_enabled, true)
-
- mount_file_store_uploader StateUploader
-
- def file_store
- super || StateUploader.default_store
- end
def latest_file
- if versioning_enabled?
- latest_version&.file
- else
- latest_version&.file || file
- end
+ latest_version&.file
end
def locked?
@@ -56,13 +38,14 @@ module Terraform
end
def update_file!(data, version:, build:)
+ # This check is required to maintain backwards compatibility with
+ # states that were created prior to versioning being supported.
+ # This can be removed in 14.0 when support for these states is dropped.
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/258960
if versioning_enabled?
create_new_version!(data: data, version: version, build: build)
- elsif latest_version.present?
- migrate_legacy_version!(data: data, version: version, build: build)
else
- self.file = data
- save!
+ migrate_legacy_version!(data: data, version: version, build: build)
end
end
diff --git a/app/models/terraform/state_version.rb b/app/models/terraform/state_version.rb
index cc5d94b8e09..19d708616fc 100644
--- a/app/models/terraform/state_version.rb
+++ b/app/models/terraform/state_version.rb
@@ -10,9 +10,9 @@ module Terraform
scope :ordered_by_version_desc, -> { order(version: :desc) }
- default_value_for(:file_store) { VersionedStateUploader.default_store }
+ default_value_for(:file_store) { StateUploader.default_store }
- mount_file_store_uploader VersionedStateUploader
+ mount_file_store_uploader StateUploader
delegate :project_id, :uuid, to: :terraform_state, allow_nil: true
diff --git a/app/models/timelog.rb b/app/models/timelog.rb
index 60aaaaef831..f4debedb656 100644
--- a/app/models/timelog.rb
+++ b/app/models/timelog.rb
@@ -1,8 +1,10 @@
# frozen_string_literal: true
class Timelog < ApplicationRecord
+ include Importable
+
validates :time_spent, :user, presence: true
- validate :issuable_id_is_present
+ validate :issuable_id_is_present, unless: :importing?
belongs_to :issue, touch: true
belongs_to :merge_request, touch: true
diff --git a/app/models/todo.rb b/app/models/todo.rb
index 0d893b25253..12dc9ce0fe6 100644
--- a/app/models/todo.rb
+++ b/app/models/todo.rb
@@ -139,13 +139,11 @@ class Todo < ApplicationRecord
# Todos with highest priority first then oldest todos
# Need to order by created_at last because of differences on Mysql and Postgres when joining by type "Merge_request/Issue"
def order_by_labels_priority
- params = {
+ highest_priority = highest_label_priority(
target_type_column: "todos.target_type",
target_column: "todos.target_id",
project_column: "todos.project_id"
- }
-
- highest_priority = highest_label_priority(params).to_sql
+ ).to_sql
select("#{table_name}.*, (#{highest_priority}) AS highest_priority")
.order(Gitlab::Database.nulls_last_order('highest_priority', 'ASC'))
diff --git a/app/models/user.rb b/app/models/user.rb
index be64e057d59..c735f20b92c 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -25,11 +25,14 @@ class User < ApplicationRecord
include IgnorableColumns
include UpdateHighestRole
include HasUserType
+ include Gitlab::Auth::Otp::Fortinet
DEFAULT_NOTIFICATION_LEVEL = :participating
INSTANCE_ACCESS_REQUEST_APPROVERS_TO_BE_NOTIFIED_LIMIT = 10
+ BLOCKED_PENDING_APPROVAL_STATE = 'blocked_pending_approval'.freeze
+
add_authentication_token_field :incoming_email_token, token_generator: -> { SecureRandom.hex.to_i(16).to_s(36) }
add_authentication_token_field :feed_token
add_authentication_token_field :static_object_token
@@ -166,6 +169,7 @@ class User < ApplicationRecord
has_many :issue_assignees, inverse_of: :assignee
has_many :merge_request_assignees, inverse_of: :assignee
+ has_many :merge_request_reviewers, inverse_of: :reviewer
has_many :assigned_issues, class_name: "Issue", through: :issue_assignees, source: :issue
has_many :assigned_merge_requests, class_name: "MergeRequest", through: :merge_request_assignees, source: :merge_request
@@ -286,6 +290,7 @@ class User < ApplicationRecord
delegate :path, to: :namespace, allow_nil: true, prefix: true
delegate :job_title, :job_title=, to: :user_detail, allow_nil: true
+ delegate :other_role, :other_role=, to: :user_detail, allow_nil: true
delegate :bio, :bio=, :bio_html, to: :user_detail, allow_nil: true
delegate :webauthn_xid, :webauthn_xid=, to: :user_detail, allow_nil: true
@@ -587,11 +592,7 @@ class User < ApplicationRecord
sanitized_order_sql = Arel.sql(sanitize_sql_array([order, query: query]))
- where(
- fuzzy_arel_match(:name, query, lower_exact_match: true)
- .or(fuzzy_arel_match(:username, query, lower_exact_match: true))
- .or(arel_table[:email].eq(query))
- ).reorder(sanitized_order_sql, :name)
+ search_with_secondary_emails(query).reorder(sanitized_order_sql, :name)
end
# Limits the result set to users _not_ in the given query/list of IDs.
@@ -606,6 +607,18 @@ class User < ApplicationRecord
reorder(:name)
end
+ def search_without_secondary_emails(query)
+ return none if query.blank?
+
+ query = query.downcase
+
+ where(
+ fuzzy_arel_match(:name, query, lower_exact_match: true)
+ .or(fuzzy_arel_match(:username, query, lower_exact_match: true))
+ .or(arel_table[:email].eq(query))
+ )
+ end
+
# searches user by given pattern
# it compares name, email, username fields and user's secondary emails with given pattern
# This method uses ILIKE on PostgreSQL.
@@ -616,15 +629,16 @@ class User < ApplicationRecord
query = query.downcase
email_table = Email.arel_table
- matched_by_emails_user_ids = email_table
+ matched_by_email_user_id = email_table
.project(email_table[:user_id])
.where(email_table[:email].eq(query))
+ .take(1) # at most 1 record as there is a unique constraint
where(
fuzzy_arel_match(:name, query)
.or(fuzzy_arel_match(:username, query))
.or(arel_table[:email].eq(query))
- .or(arel_table[:id].in(matched_by_emails_user_ids))
+ .or(arel_table[:id].eq(matched_by_email_user_id))
)
end
@@ -708,6 +722,7 @@ class User < ApplicationRecord
u.name = 'GitLab Security Bot'
u.website_url = Gitlab::Routing.url_helpers.help_page_url('user/application_security/security_bot/index.md')
u.avatar = bot_avatar(image: 'security-bot.png')
+ u.confirmed_at = Time.zone.now
end
end
@@ -797,7 +812,9 @@ class User < ApplicationRecord
end
def two_factor_otp_enabled?
- otp_required_for_login? || Feature.enabled?(:forti_authenticator, self)
+ otp_required_for_login? ||
+ forti_authenticator_enabled?(self) ||
+ forti_token_cloud_enabled?(self)
end
def two_factor_u2f_enabled?
@@ -1032,7 +1049,7 @@ class User < ApplicationRecord
end
def require_personal_access_token_creation_for_git_auth?
- return false if allow_password_authentication_for_git? || ldap_user?
+ return false if allow_password_authentication_for_git? || password_based_omniauth_user?
PersonalAccessTokensFinder.new(user: self, impersonation: false, state: 'active').execute.none?
end
@@ -1050,7 +1067,7 @@ class User < ApplicationRecord
end
def allow_password_authentication_for_git?
- Gitlab::CurrentSettings.password_authentication_enabled_for_git? && !ldap_user?
+ Gitlab::CurrentSettings.password_authentication_enabled_for_git? && !password_based_omniauth_user?
end
def can_change_username?
@@ -1130,6 +1147,18 @@ class User < ApplicationRecord
namespace.find_fork_of(project)
end
+ def password_based_omniauth_user?
+ ldap_user? || crowd_user?
+ end
+
+ def crowd_user?
+ if identities.loaded?
+ identities.find { |identity| identity.provider == 'crowd' && identity.extern_uid.present? }
+ else
+ identities.with_any_extern_uid('crowd').exists?
+ end
+ end
+
def ldap_user?
if identities.loaded?
identities.find { |identity| Gitlab::Auth::OAuth::Provider.ldap_provider?(identity.provider) && !identity.extern_uid.nil? }
@@ -1229,7 +1258,7 @@ class User < ApplicationRecord
end
def solo_owned_groups
- @solo_owned_groups ||= owned_groups.select do |group|
+ @solo_owned_groups ||= owned_groups.includes(:owners).select do |group|
group.owners == [self]
end
end
@@ -1464,6 +1493,10 @@ class User < ApplicationRecord
!solo_owned_groups.present?
end
+ def can_remove_self?
+ true
+ end
+
def ci_owned_runners
@ci_owned_runners ||= begin
project_runners = Ci::RunnerProject
@@ -1636,11 +1669,11 @@ class User < ApplicationRecord
save
end
- # each existing user needs to have an `feed_token`.
+ # each existing user needs to have a `feed_token`.
# we do this on read since migrating all existing users is not a feasible
# solution.
def feed_token
- ensure_feed_token!
+ Gitlab::CurrentSettings.disable_feed_token ? nil : ensure_feed_token!
end
# Each existing user needs to have a `static_object_token`.
diff --git a/app/models/user_callout.rb b/app/models/user_callout.rb
index cfad58fc0db..ad5651f9439 100644
--- a/app/models/user_callout.rb
+++ b/app/models/user_callout.rb
@@ -26,7 +26,8 @@ class UserCallout < ApplicationRecord
suggest_pipeline: 22,
customize_homepage: 23,
feature_flags_new_version: 24,
- registration_enabled_callout: 25
+ registration_enabled_callout: 25,
+ new_user_signups_cap_reached: 26 # EE-only
}
validates :user, presence: true
diff --git a/app/models/user_detail.rb b/app/models/user_detail.rb
index 9674f9a41da..ef799b01452 100644
--- a/app/models/user_detail.rb
+++ b/app/models/user_detail.rb
@@ -31,3 +31,5 @@ class UserDetail < ApplicationRecord
self.bio = '' if bio_changed? && bio.nil?
end
end
+
+UserDetail.prepend_if_ee('EE::UserDetail')
diff --git a/app/models/wiki_page/meta.rb b/app/models/wiki_page/meta.rb
index 215d84dc463..70b5547ffad 100644
--- a/app/models/wiki_page/meta.rb
+++ b/app/models/wiki_page/meta.rb
@@ -2,149 +2,20 @@
class WikiPage
class Meta < ApplicationRecord
- include Gitlab::Utils::StrongMemoize
-
- CanonicalSlugConflictError = Class.new(ActiveRecord::RecordInvalid)
- WikiPageInvalid = Class.new(ArgumentError)
+ include HasWikiPageMetaAttributes
self.table_name = 'wiki_page_meta'
belongs_to :project
has_many :slugs, class_name: 'WikiPage::Slug', foreign_key: 'wiki_page_meta_id', inverse_of: :wiki_page_meta
- has_many :events, as: :target, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
- validates :title, presence: true
validates :project_id, presence: true
- validate :no_two_metarecords_in_same_project_can_have_same_canonical_slug
-
- scope :with_canonical_slug, ->(slug) do
- joins(:slugs).where(wiki_page_slugs: { canonical: true, slug: slug })
- end
alias_method :resource_parent, :project
- class << self
- # Return the (updated) WikiPage::Meta record for a given wiki page
- #
- # If none is found, then a new record is created, and its fields are set
- # to reflect the wiki_page passed.
- #
- # @param [String] last_known_slug
- # @param [WikiPage] wiki_page
- #
- # This method raises errors on validation issues.
- def find_or_create(last_known_slug, wiki_page)
- raise WikiPageInvalid unless wiki_page.valid?
-
- project = wiki_page.wiki.project
- known_slugs = [last_known_slug, wiki_page.slug].compact.uniq
- raise 'No slugs found! This should not be possible.' if known_slugs.empty?
-
- transaction do
- updates = wiki_page_updates(wiki_page)
- found = find_by_canonical_slug(known_slugs, project)
- meta = found || create!(updates.merge(project_id: project.id))
-
- meta.update_state(found.nil?, known_slugs, wiki_page, updates)
-
- # We don't need to run validations here, since find_by_canonical_slug
- # guarantees that there is no conflict in canonical_slug, and DB
- # constraints on title and project_id enforce our other invariants
- # This saves us a query.
- meta
- end
- end
-
- def find_by_canonical_slug(canonical_slug, project)
- meta, conflict = with_canonical_slug(canonical_slug)
- .where(project_id: project.id)
- .limit(2)
-
- if conflict.present?
- meta.errors.add(:canonical_slug, 'Duplicate value found')
- raise CanonicalSlugConflictError.new(meta)
- end
-
- meta
- end
-
- private
-
- def wiki_page_updates(wiki_page)
- last_commit_date = wiki_page.version_commit_timestamp || Time.now.utc
-
- {
- title: wiki_page.title,
- created_at: last_commit_date,
- updated_at: last_commit_date
- }
- end
- end
-
- def canonical_slug
- strong_memoize(:canonical_slug) { slugs.canonical.first&.slug }
- end
-
- def canonical_slug=(slug)
- return if @canonical_slug == slug
-
- if persisted?
- transaction do
- slugs.canonical.update_all(canonical: false)
- page_slug = slugs.create_with(canonical: true).find_or_create_by(slug: slug)
- page_slug.update_columns(canonical: true) unless page_slug.canonical?
- end
- else
- slugs.new(slug: slug, canonical: true)
- end
-
- @canonical_slug = slug
- end
-
- def update_state(created, known_slugs, wiki_page, updates)
- update_wiki_page_attributes(updates)
- insert_slugs(known_slugs, created, wiki_page.slug)
- self.canonical_slug = wiki_page.slug
- end
-
- private
-
- def update_wiki_page_attributes(updates)
- # Remove all unnecessary updates:
- updates.delete(:updated_at) if updated_at == updates[:updated_at]
- updates.delete(:created_at) if created_at <= updates[:created_at]
- updates.delete(:title) if title == updates[:title]
-
- update_columns(updates) unless updates.empty?
- end
-
- def insert_slugs(strings, is_new, canonical_slug)
- creation = Time.current.utc
-
- slug_attrs = strings.map do |slug|
- {
- wiki_page_meta_id: id,
- slug: slug,
- canonical: (is_new && slug == canonical_slug),
- created_at: creation,
- updated_at: creation
- }
- end
- slugs.insert_all(slug_attrs) unless !is_new && slug_attrs.size == 1
-
- @canonical_slug = canonical_slug if is_new || strings.size == 1
- end
-
- def no_two_metarecords_in_same_project_can_have_same_canonical_slug
- return unless project_id.present? && canonical_slug.present?
-
- offending = self.class.with_canonical_slug(canonical_slug).where(project_id: project_id)
- offending = offending.where.not(id: id) if persisted?
-
- if offending.exists?
- errors.add(:canonical_slug, 'each page in a wiki must have a distinct canonical slug')
- end
+ def self.container_key
+ :project_id
end
end
end
diff --git a/app/models/wiki_page/slug.rb b/app/models/wiki_page/slug.rb
index c1725d34921..b82386c0e3c 100644
--- a/app/models/wiki_page/slug.rb
+++ b/app/models/wiki_page/slug.rb
@@ -2,25 +2,14 @@
class WikiPage
class Slug < ApplicationRecord
- self.table_name = 'wiki_page_slugs'
-
- belongs_to :wiki_page_meta, class_name: 'WikiPage::Meta', inverse_of: :slugs
-
- validates :slug, presence: true, uniqueness: { scope: :wiki_page_meta_id }
- validates :canonical, uniqueness: {
- scope: :wiki_page_meta_id,
- if: :canonical?,
- message: 'Only one slug can be canonical per wiki metadata record'
- }
+ def self.meta_foreign_key
+ :wiki_page_meta_id
+ end
- scope :canonical, -> { where(canonical: true) }
+ include HasWikiPageSlugAttributes
- def update_columns(attrs = {})
- super(attrs.reverse_merge(updated_at: Time.current.utc))
- end
+ self.table_name = 'wiki_page_slugs'
- def self.update_all(attrs = {})
- super(attrs.reverse_merge(updated_at: Time.current.utc))
- end
+ belongs_to :wiki_page_meta, class_name: 'WikiPage::Meta', inverse_of: :slugs
end
end
diff --git a/app/models/zoom_meeting.rb b/app/models/zoom_meeting.rb
index f83aa93b69a..c8b510c4779 100644
--- a/app/models/zoom_meeting.rb
+++ b/app/models/zoom_meeting.rb
@@ -1,13 +1,17 @@
# frozen_string_literal: true
class ZoomMeeting < ApplicationRecord
+ include Importable
include UsageStatistics
- belongs_to :project, optional: false
- belongs_to :issue, optional: false
+ belongs_to :project
+ belongs_to :issue
+
+ validates :project, presence: true, unless: :importing?
+ validates :issue, presence: true, unless: :importing?
validates :url, presence: true, length: { maximum: 255 }, zoom_url: true
- validates :issue, same_project_association: true
+ validates :issue, same_project_association: true, unless: :importing?
enum issue_status: {
added: 1,