summaryrefslogtreecommitdiff
path: root/lib/gitlab
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-07-20 12:26:25 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-07-20 12:26:25 +0000
commita09983ae35713f5a2bbb100981116d31ce99826e (patch)
tree2ee2af7bd104d57086db360a7e6d8c9d5d43667a /lib/gitlab
parent18c5ab32b738c0b6ecb4d0df3994000482f34bd8 (diff)
downloadgitlab-ce-a09983ae35713f5a2bbb100981116d31ce99826e.tar.gz
Add latest changes from gitlab-org/gitlab@13-2-stable-ee
Diffstat (limited to 'lib/gitlab')
-rw-r--r--lib/gitlab/action_cable/config.rb17
-rw-r--r--lib/gitlab/alert_management/alert_params.rb6
-rw-r--r--lib/gitlab/alert_management/fingerprint.rb16
-rw-r--r--lib/gitlab/alerting/notification_payload_parser.rb11
-rw-r--r--lib/gitlab/analytics/cycle_analytics/records_fetcher.rb4
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events.rb2
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start.rb5
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end.rb9
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/metrics_based_stage_event.rb5
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start.rb9
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/production_stage_end.rb5
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_events/stage_event.rb8
-rw-r--r--lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb23
-rw-r--r--lib/gitlab/analytics/unique_visits.rb60
-rw-r--r--lib/gitlab/application_rate_limiter.rb24
-rw-r--r--lib/gitlab/auth/auth_finders.rb26
-rw-r--r--lib/gitlab/background_migration.rb1
-rw-r--r--lib/gitlab/background_migration/backfill_namespace_settings.rb18
-rw-r--r--lib/gitlab/background_migration/cleanup_concurrent_schema_change.rb8
-rw-r--r--lib/gitlab/background_migration/digest_column.rb25
-rw-r--r--lib/gitlab/background_migration/encrypt_columns.rb104
-rw-r--r--lib/gitlab/background_migration/encrypt_runners_tokens.rb32
-rw-r--r--lib/gitlab/background_migration/fix_pages_access_level.rb2
-rw-r--r--lib/gitlab/background_migration/mailers/unconfirm_mailer.rb24
-rw-r--r--lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml19
-rw-r--r--lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb14
-rw-r--r--lib/gitlab/background_migration/models/encrypt_columns/namespace.rb28
-rw-r--r--lib/gitlab/background_migration/models/encrypt_columns/project.rb28
-rw-r--r--lib/gitlab/background_migration/models/encrypt_columns/runner.rb28
-rw-r--r--lib/gitlab/background_migration/models/encrypt_columns/settings.rb37
-rw-r--r--lib/gitlab/background_migration/models/encrypt_columns/web_hook.rb28
-rw-r--r--lib/gitlab/background_migration/populate_project_snippet_statistics.rb61
-rw-r--r--lib/gitlab/background_migration/update_vulnerabilities_from_dismissal_feedback.rb13
-rw-r--r--lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb97
-rw-r--r--lib/gitlab/bitbucket_import/importer.rb11
-rw-r--r--lib/gitlab/bitbucket_import/metrics.rb41
-rw-r--r--lib/gitlab/bitbucket_server_import/importer.rb11
-rw-r--r--lib/gitlab/cache/ci/project_pipeline_status.rb11
-rw-r--r--lib/gitlab/ci/build/releaser.rb17
-rw-r--r--lib/gitlab/ci/config.rb6
-rw-r--r--lib/gitlab/ci/config/entry/environment.rb2
-rw-r--r--lib/gitlab/ci/config/entry/job.rb4
-rw-r--r--lib/gitlab/ci/config/entry/processable.rb4
-rw-r--r--lib/gitlab/ci/config/entry/release.rb24
-rw-r--r--lib/gitlab/ci/config/entry/reports.rb9
-rw-r--r--lib/gitlab/ci/features.rb54
-rw-r--r--lib/gitlab/ci/parsers/terraform/tfplan.rb34
-rw-r--r--lib/gitlab/ci/pipeline/chain/build.rb6
-rw-r--r--lib/gitlab/ci/pipeline/chain/command.rb8
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/content.rb1
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/content/parameter.rb30
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/content/source.rb2
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/process.rb12
-rw-r--r--lib/gitlab/ci/pipeline/chain/create.rb4
-rw-r--r--lib/gitlab/ci/pipeline/chain/helpers.rb9
-rw-r--r--lib/gitlab/ci/pipeline/chain/metrics.rb35
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/abilities.rb2
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/repository.rb2
-rw-r--r--lib/gitlab/ci/pipeline/metrics.rb42
-rw-r--r--lib/gitlab/ci/pipeline/preloader.rb18
-rw-r--r--lib/gitlab/ci/reports/test_report_summary.rb49
-rw-r--r--lib/gitlab/ci/reports/test_suite.rb14
-rw-r--r--lib/gitlab/ci/reports/test_suite_summary.rb49
-rw-r--r--lib/gitlab/ci/status/composite.rb4
-rw-r--r--lib/gitlab/ci/status/factory.rb2
-rw-r--r--lib/gitlab/ci/status/stage/play_manual.rb2
-rw-r--r--lib/gitlab/ci/templates/Android-Fastlane.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Android.gitlab-ci.yml63
-rw-r--r--lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml3
-rw-r--r--lib/gitlab/ci/templates/Composer.gitlab-ci.yml19
-rw-r--r--lib/gitlab/ci/templates/Dart.gitlab-ci.yml22
-rw-r--r--lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml20
-rw-r--r--lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml4
-rw-r--r--lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml7
-rw-r--r--lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml3
-rw-r--r--lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml13
-rw-r--r--lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml42
-rw-r--r--lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml29
-rw-r--r--lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Security/Coverage-Fuzzing.gitlab-ci.yml34
-rw-r--r--lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml7
-rw-r--r--lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml16
-rw-r--r--lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml4
-rw-r--r--lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml48
-rw-r--r--lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml28
-rw-r--r--lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml11
-rw-r--r--lib/gitlab/ci/templates/Verify/Browser-Performance.gitlab-ci.yml12
-rw-r--r--lib/gitlab/ci/templates/Verify/Load-Performance-Testing.gitlab-ci.yml23
-rw-r--r--lib/gitlab/ci/templates/index.md3
-rw-r--r--lib/gitlab/ci/templates/npm.gitlab-ci.yml59
-rw-r--r--lib/gitlab/ci/yaml_processor.rb65
-rw-r--r--lib/gitlab/class_attributes.rb30
-rw-r--r--lib/gitlab/code_navigation_path.rb12
-rw-r--r--lib/gitlab/conan_token.rb64
-rw-r--r--lib/gitlab/config/entry/configurable.rb4
-rw-r--r--lib/gitlab/config/entry/node.rb11
-rw-r--r--lib/gitlab/config/loader/yaml.rb3
-rw-r--r--lib/gitlab/config_checker/external_database_checker.rb49
-rw-r--r--lib/gitlab/danger/changelog.rb14
-rw-r--r--lib/gitlab/danger/commit_linter.rb9
-rw-r--r--lib/gitlab/danger/helper.rb67
-rw-r--r--lib/gitlab/danger/roulette.rb45
-rw-r--r--lib/gitlab/danger/sidekiq_queues.rb37
-rw-r--r--lib/gitlab/danger/teammate.rb80
-rw-r--r--lib/gitlab/database.rb63
-rw-r--r--lib/gitlab/database/background_migration_job.rb38
-rw-r--r--lib/gitlab/database/dynamic_model_helpers.rb16
-rw-r--r--lib/gitlab/database/migration_helpers.rb158
-rw-r--r--lib/gitlab/database/migrations/background_migration_helpers.rb157
-rw-r--r--lib/gitlab/database/partitioning/monthly_strategy.rb96
-rw-r--r--lib/gitlab/database/partitioning/partition_creator.rb87
-rw-r--r--lib/gitlab/database/partitioning/time_partition.rb84
-rw-r--r--lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb105
-rw-r--r--lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers.rb2
-rw-r--r--lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb165
-rw-r--r--lib/gitlab/database/schema_helpers.rb25
-rw-r--r--lib/gitlab/diff/file.rb8
-rw-r--r--lib/gitlab/diff/file_collection/base.rb27
-rw-r--r--lib/gitlab/diff/file_collection/merge_request_diff_base.rb33
-rw-r--r--lib/gitlab/diff/file_collection/wiki_page.rb24
-rw-r--r--lib/gitlab/diff/position_tracer.rb4
-rw-r--r--lib/gitlab/diff/position_tracer/base_strategy.rb1
-rw-r--r--lib/gitlab/diff/position_tracer/image_strategy.rb10
-rw-r--r--lib/gitlab/diff/position_tracer/line_strategy.rb30
-rw-r--r--lib/gitlab/diff/stats_cache.rb54
-rw-r--r--lib/gitlab/discussions_diff/file_collection.rb4
-rw-r--r--lib/gitlab/discussions_diff/highlight_cache.rb10
-rw-r--r--lib/gitlab/email/handler.rb5
-rw-r--r--lib/gitlab/email/handler/reply_processing.rb15
-rw-r--r--lib/gitlab/email/handler/service_desk_handler.rb152
-rw-r--r--lib/gitlab/email/service_desk_receiver.rb23
-rw-r--r--lib/gitlab/emoji.rb4
-rw-r--r--lib/gitlab/error_tracking.rb15
-rw-r--r--lib/gitlab/error_tracking/detailed_error.rb1
-rw-r--r--lib/gitlab/file_finder.rb2
-rw-r--r--lib/gitlab/git/commit.rb5
-rw-r--r--lib/gitlab/git/diff.rb24
-rw-r--r--lib/gitlab/git/repository.rb18
-rw-r--r--lib/gitlab/git/wiki.rb4
-rw-r--r--lib/gitlab/git_ref_validator.rb4
-rw-r--r--lib/gitlab/gitaly_client.rb32
-rw-r--r--lib/gitlab/gitaly_client/blob_service.rb52
-rw-r--r--lib/gitlab/gitaly_client/call.rb72
-rw-r--r--lib/gitlab/gitaly_client/cleanup_service.rb5
-rw-r--r--lib/gitlab/gitaly_client/commit_service.rb68
-rw-r--r--lib/gitlab/gitaly_client/conflicts_service.rb1
-rw-r--r--lib/gitlab/gitaly_client/operation_service.rb4
-rw-r--r--lib/gitlab/gitaly_client/ref_service.rb31
-rw-r--r--lib/gitlab/gitaly_client/remote_service.rb6
-rw-r--r--lib/gitlab/gitaly_client/repository_service.rb8
-rw-r--r--lib/gitlab/gl_repository.rb6
-rw-r--r--lib/gitlab/gl_repository/identifier.rb94
-rw-r--r--lib/gitlab/global_id.rb17
-rw-r--r--lib/gitlab/graphql/authorize/authorize_resource.rb3
-rw-r--r--lib/gitlab/graphql/lazy.rb19
-rw-r--r--lib/gitlab/graphql/loaders/issuable_loader.rb82
-rw-r--r--lib/gitlab/graphql/mount_mutation.rb8
-rw-r--r--lib/gitlab/graphql/query_analyzers/logger_analyzer.rb3
-rw-r--r--lib/gitlab/health_checks/probes/collection.rb6
-rw-r--r--lib/gitlab/import/metrics.rb87
-rw-r--r--lib/gitlab/import_export/json/streaming_serializer.rb14
-rw-r--r--lib/gitlab/import_export/project/import_export.yml10
-rw-r--r--lib/gitlab/import_export/project/relation_factory.rb11
-rw-r--r--lib/gitlab/import_export/snippet_repo_restorer.rb2
-rw-r--r--lib/gitlab/incident_management/pager_duty/incident_issue_description.rb64
-rw-r--r--lib/gitlab/instrumentation/elasticsearch_transport.rb16
-rw-r--r--lib/gitlab/instrumentation/redis.rb4
-rw-r--r--lib/gitlab/instrumentation/redis_base.rb37
-rw-r--r--lib/gitlab/instrumentation/redis_cluster_validator.rb106
-rw-r--r--lib/gitlab/instrumentation/redis_interceptor.rb23
-rw-r--r--lib/gitlab/issuable_metadata.rb14
-rw-r--r--lib/gitlab/jira_import/issue_serializer.rb4
-rw-r--r--lib/gitlab/jira_import/user_mapper.rb53
-rw-r--r--lib/gitlab/json.rb199
-rw-r--r--lib/gitlab/json_logger.rb2
-rw-r--r--lib/gitlab/kubernetes/helm.rb2
-rw-r--r--lib/gitlab/kubernetes/node.rb78
-rw-r--r--lib/gitlab/lograge/custom_options.rb12
-rw-r--r--lib/gitlab/marginalia/comment.rb4
-rw-r--r--lib/gitlab/markdown_cache/redis/extension.rb22
-rw-r--r--lib/gitlab/markdown_cache/redis/store.rb20
-rw-r--r--lib/gitlab/metrics/background_transaction.rb2
-rw-r--r--lib/gitlab/metrics/dashboard/errors.rb6
-rw-r--r--lib/gitlab/metrics/dashboard/finder.rb45
-rw-r--r--lib/gitlab/metrics/dashboard/service_selector.rb4
-rw-r--r--lib/gitlab/metrics/dashboard/stages/base_stage.rb8
-rw-r--r--lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb83
-rw-r--r--lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter.rb (renamed from lib/gitlab/metrics/dashboard/stages/endpoint_inserter.rb)10
-rw-r--r--lib/gitlab/metrics/dashboard/stages/sorter.rb4
-rw-r--r--lib/gitlab/metrics/dashboard/stages/url_validator.rb43
-rw-r--r--lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter.rb34
-rw-r--r--lib/gitlab/metrics/dashboard/url.rb16
-rw-r--r--lib/gitlab/metrics/methods.rb2
-rw-r--r--lib/gitlab/metrics/sidekiq_middleware.rb4
-rw-r--r--lib/gitlab/metrics/subscribers/active_record.rb22
-rw-r--r--lib/gitlab/metrics/transaction.rb8
-rw-r--r--lib/gitlab/metrics/web_transaction.rb11
-rw-r--r--lib/gitlab/middleware/go.rb4
-rw-r--r--lib/gitlab/middleware/multipart.rb19
-rw-r--r--lib/gitlab/project_template.rb1
-rw-r--r--lib/gitlab/prometheus_client.rb20
-rw-r--r--lib/gitlab/regex.rb12
-rw-r--r--lib/gitlab/runtime.rb26
-rw-r--r--lib/gitlab/search_results.rb3
-rw-r--r--lib/gitlab/seeder.rb3
-rw-r--r--lib/gitlab/service_desk.rb16
-rw-r--r--lib/gitlab/service_desk_email.rb22
-rw-r--r--lib/gitlab/set_cache.rb5
-rw-r--r--lib/gitlab/sidekiq_logging/deduplication_logger.rb7
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb3
-rw-r--r--lib/gitlab/static_site_editor/config.rb8
-rw-r--r--lib/gitlab/suggestions/file_suggestion.rb53
-rw-r--r--lib/gitlab/suggestions/suggestion_set.rb22
-rw-r--r--lib/gitlab/template/service_desk_template.rb21
-rw-r--r--lib/gitlab/tracking/incident_management.rb3
-rw-r--r--lib/gitlab/tree_summary.rb2
-rw-r--r--lib/gitlab/updated_notes_paginator.rb74
-rw-r--r--lib/gitlab/url_builder.rb14
-rw-r--r--lib/gitlab/usage_data.rb363
-rw-r--r--lib/gitlab/usage_data/topology.rb258
-rw-r--r--lib/gitlab/usage_data_concerns/topology.rb137
-rw-r--r--lib/gitlab/usage_data_counters/track_unique_actions.rb86
-rw-r--r--lib/gitlab/user_access.rb6
-rw-r--r--lib/gitlab/utils.rb12
-rw-r--r--lib/gitlab/utils/markdown.rb19
-rw-r--r--lib/gitlab/utils/usage_data.rb14
-rw-r--r--lib/gitlab/workhorse.rb4
227 files changed, 4954 insertions, 1639 deletions
diff --git a/lib/gitlab/action_cable/config.rb b/lib/gitlab/action_cable/config.rb
new file mode 100644
index 00000000000..38e870353eb
--- /dev/null
+++ b/lib/gitlab/action_cable/config.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ActionCable
+ class Config
+ class << self
+ def in_app?
+ Gitlab::Utils.to_boolean(ENV.fetch('ACTION_CABLE_IN_APP', false))
+ end
+
+ def worker_pool_size
+ ENV.fetch('ACTION_CABLE_WORKER_POOL_SIZE', 4).to_i
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/alert_management/alert_params.rb b/lib/gitlab/alert_management/alert_params.rb
index 789a4fe246a..84a75e62ecf 100644
--- a/lib/gitlab/alert_management/alert_params.rb
+++ b/lib/gitlab/alert_management/alert_params.rb
@@ -8,7 +8,7 @@ module Gitlab
}.freeze
def self.from_generic_alert(project:, payload:)
- parsed_payload = Gitlab::Alerting::NotificationPayloadParser.call(payload).with_indifferent_access
+ parsed_payload = Gitlab::Alerting::NotificationPayloadParser.call(payload, project).with_indifferent_access
annotations = parsed_payload[:annotations]
{
@@ -34,7 +34,9 @@ module Gitlab
payload: parsed_alert.payload,
started_at: parsed_alert.starts_at,
ended_at: parsed_alert.ends_at,
- fingerprint: parsed_alert.gitlab_fingerprint
+ fingerprint: parsed_alert.gitlab_fingerprint,
+ environment: parsed_alert.environment,
+ prometheus_alert: parsed_alert.gitlab_alert
}
end
end
diff --git a/lib/gitlab/alert_management/fingerprint.rb b/lib/gitlab/alert_management/fingerprint.rb
index 6ab47c88ca1..d7842d3b37d 100644
--- a/lib/gitlab/alert_management/fingerprint.rb
+++ b/lib/gitlab/alert_management/fingerprint.rb
@@ -10,11 +10,14 @@ module Gitlab
def generate(data)
return unless data.present?
- if data.is_a?(Array)
- data = flatten_array(data)
- end
+ string = case data
+ when Array then flatten_array(data)
+ when Hash then flatten_hash(data)
+ else
+ data.to_s
+ end
- Digest::SHA1.hexdigest(data.to_s)
+ Digest::SHA1.hexdigest(string)
end
private
@@ -22,6 +25,11 @@ module Gitlab
def flatten_array(array)
array.flatten.map!(&:to_s).join
end
+
+ def flatten_hash(hash)
+ # Sort hash so SHA generated is the same
+ Gitlab::Utils::SafeInlineHash.merge_keys!(hash).sort.to_s
+ end
end
end
end
diff --git a/lib/gitlab/alerting/notification_payload_parser.rb b/lib/gitlab/alerting/notification_payload_parser.rb
index d98b9296347..f285dcf507f 100644
--- a/lib/gitlab/alerting/notification_payload_parser.rb
+++ b/lib/gitlab/alerting/notification_payload_parser.rb
@@ -8,12 +8,13 @@ module Gitlab
DEFAULT_TITLE = 'New: Incident'
DEFAULT_SEVERITY = 'critical'
- def initialize(payload)
+ def initialize(payload, project)
@payload = payload.to_h.with_indifferent_access
+ @project = project
end
- def self.call(payload)
- new(payload).call
+ def self.call(payload, project)
+ new(payload, project).call
end
def call
@@ -25,7 +26,7 @@ module Gitlab
private
- attr_reader :payload
+ attr_reader :payload, :project
def title
payload[:title].presence || DEFAULT_TITLE
@@ -84,3 +85,5 @@ module Gitlab
end
end
end
+
+Gitlab::Alerting::NotificationPayloadParser.prepend_if_ee('EE::Gitlab::Alerting::NotificationPayloadParser')
diff --git a/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb b/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb
index e7352a23b99..4d47a17545a 100644
--- a/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb
+++ b/lib/gitlab/analytics/cycle_analytics/records_fetcher.rb
@@ -90,9 +90,7 @@ module Gitlab
end
def ordered_and_limited_query
- query
- .reorder(stage.end_event.timestamp_projection.desc)
- .limit(MAX_RECORDS)
+ order_by_end_event(query).limit(MAX_RECORDS)
end
def records
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events.rb b/lib/gitlab/analytics/cycle_analytics/stage_events.rb
index 5146f92f521..39dc706dff5 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events.rb
@@ -60,7 +60,7 @@ module Gitlab
# hash for defining ActiveRecord enum: identifier => number
def self.to_enum
- enum_mapping.each_with_object({}) { |(k, v), hash| hash[k.identifier] = v }
+ enum_mapping.transform_keys { |k| k.identifier }
end
def self.pairing_rules
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start.rb
index 9f0ca80ba50..c5f843d5f1a 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start.rb
@@ -21,6 +21,11 @@ module Gitlab
issue_metrics_table[:first_mentioned_in_commit_at]
end
+ override :column_list
+ def column_list
+ [timestamp_projection]
+ end
+
# rubocop: disable CodeReuse/ActiveRecord
def apply_query_customization(query)
issue_metrics_join = mr_closing_issues_table
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end.rb
index 0ea98e82ecc..7c1f4436c93 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end.rb
@@ -18,10 +18,15 @@ module Gitlab
end
def timestamp_projection
- Arel::Nodes::NamedFunction.new('COALESCE', [
+ Arel::Nodes::NamedFunction.new('COALESCE', column_list)
+ end
+
+ override :column_list
+ def column_list
+ [
issue_metrics_table[:first_associated_with_milestone_at],
issue_metrics_table[:first_added_to_board_at]
- ])
+ ]
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/metrics_based_stage_event.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/metrics_based_stage_event.rb
index 4ca8745abe4..fe477490648 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/metrics_based_stage_event.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/metrics_based_stage_event.rb
@@ -10,6 +10,11 @@ module Gitlab
query.joins(:metrics)
end
# rubocop: enable CodeReuse/ActiveRecord
+
+ override :column_list
+ def column_list
+ [timestamp_projection]
+ end
end
end
end
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start.rb
index 37168a1fb0f..bddc326de71 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start.rb
@@ -18,10 +18,15 @@ module Gitlab
end
def timestamp_projection
- Arel::Nodes::NamedFunction.new('COALESCE', [
+ Arel::Nodes::NamedFunction.new('COALESCE', column_list)
+ end
+
+ override :column_list
+ def column_list
+ [
issue_metrics_table[:first_associated_with_milestone_at],
issue_metrics_table[:first_added_to_board_at]
- ])
+ ]
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/production_stage_end.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/production_stage_end.rb
index 619b45664fa..cf05ebeb706 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/production_stage_end.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/production_stage_end.rb
@@ -21,6 +21,11 @@ module Gitlab
mr_metrics_table[:first_deployed_to_production_at]
end
+ override :column_list
+ def column_list
+ [timestamp_projection]
+ end
+
# rubocop: disable CodeReuse/ActiveRecord
def apply_query_customization(query)
query.joins(merge_requests_closing_issues: { merge_request: [:metrics] }).where(mr_metrics_table[:first_deployed_to_production_at].gteq(mr_table[:created_at]))
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event.rb b/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event.rb
index 0c75a141c3c..79738747e71 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_events/stage_event.rb
@@ -7,6 +7,7 @@ module Gitlab
# Base class for expressing an event that can be used for a stage.
class StageEvent
include Gitlab::CycleAnalytics::MetricsTables
+ extend Gitlab::Utils::Override
delegate :label_based?, to: :class
@@ -32,6 +33,13 @@ module Gitlab
raise NotImplementedError
end
+ # List of columns that are referenced in the `timestamp_projection` expression
+ # Example timestamp projection: COALESCE(issue_metrics.created_at, issue_metrics.updated_at)
+ # Expected column list: issue_metrics.created_at, issue_metrics.updated_at
+ def column_list
+ []
+ end
+
# Optionally a StageEvent may apply additional filtering or join other tables on the base query.
def apply_query_customization(query)
query
diff --git a/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb b/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb
index 29a2d55df1a..c9a75b39959 100644
--- a/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb
+++ b/lib/gitlab/analytics/cycle_analytics/stage_query_helpers.rb
@@ -22,6 +22,29 @@ module Gitlab
stage.start_event.timestamp_projection
)
end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def order_by_end_event(query)
+ ordered_query = query.reorder(stage.end_event.timestamp_projection.desc)
+
+ # When filtering for more than one label, postgres requires the columns in ORDER BY to be present in the GROUP BY clause
+ if requires_grouping?
+ column_list = [
+ ordered_query.arel_table[:id],
+ *stage.end_event.column_list,
+ *stage.start_event.column_list
+ ]
+
+ ordered_query = ordered_query.group(column_list)
+ end
+
+ ordered_query
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def requires_grouping?
+ Array(params[:label_name]).size > 1
+ end
end
end
end
diff --git a/lib/gitlab/analytics/unique_visits.rb b/lib/gitlab/analytics/unique_visits.rb
new file mode 100644
index 00000000000..9dd7d048eec
--- /dev/null
+++ b/lib/gitlab/analytics/unique_visits.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Analytics
+ class UniqueVisits
+ TARGET_IDS = Set[
+ 'g_analytics_contribution',
+ 'g_analytics_insights',
+ 'g_analytics_issues',
+ 'g_analytics_productivity',
+ 'g_analytics_valuestream',
+ 'p_analytics_pipelines',
+ 'p_analytics_code_reviews',
+ 'p_analytics_valuestream',
+ 'p_analytics_insights',
+ 'p_analytics_issues',
+ 'p_analytics_repo',
+ 'u_analytics_todos',
+ 'i_analytics_cohorts',
+ 'i_analytics_dev_ops_score'
+ ].freeze
+
+ KEY_EXPIRY_LENGTH = 28.days
+
+ def track_visit(visitor_id, target_id, time = Time.zone.now)
+ target_key = key(target_id, time)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.multi do |multi|
+ multi.pfadd(target_key, visitor_id)
+ multi.expire(target_key, KEY_EXPIRY_LENGTH)
+ end
+ end
+ end
+
+ def weekly_unique_visits_for_target(target_id, week_of: 7.days.ago)
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.pfcount(key(target_id, week_of))
+ end
+ end
+
+ def weekly_unique_visits_for_any_target(week_of: 7.days.ago)
+ keys = TARGET_IDS.map { |target_id| key(target_id, week_of) }
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.pfcount(*keys)
+ end
+ end
+
+ private
+
+ def key(target_id, time)
+ raise "Invalid target id #{target_id}" unless TARGET_IDS.include?(target_id.to_s)
+
+ year_week = time.strftime('%G-%V')
+ "#{target_id}-{#{year_week}}"
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/application_rate_limiter.rb b/lib/gitlab/application_rate_limiter.rb
index 3277ddd9f49..ed963476524 100644
--- a/lib/gitlab/application_rate_limiter.rb
+++ b/lib/gitlab/application_rate_limiter.rb
@@ -19,17 +19,17 @@ module Gitlab
# and only do that when it's needed.
def rate_limits
{
- issues_create: { threshold: -> { Gitlab::CurrentSettings.current_application_settings.issues_create_limit }, interval: 1.minute },
- project_export: { threshold: 30, interval: 5.minutes },
- project_download_export: { threshold: 10, interval: 10.minutes },
+ issues_create: { threshold: -> { application_settings.issues_create_limit }, interval: 1.minute },
+ project_export: { threshold: -> { application_settings.project_export_limit }, interval: 1.minute },
+ project_download_export: { threshold: -> { application_settings.project_download_export_limit }, interval: 1.minute },
project_repositories_archive: { threshold: 5, interval: 1.minute },
- project_generate_new_export: { threshold: 30, interval: 5.minutes },
- project_import: { threshold: 30, interval: 5.minutes },
- play_pipeline_schedule: { threshold: 1, interval: 1.minute },
- show_raw_controller: { threshold: -> { Gitlab::CurrentSettings.current_application_settings.raw_blob_request_limit }, interval: 1.minute },
- group_export: { threshold: 30, interval: 5.minutes },
- group_download_export: { threshold: 10, interval: 10.minutes },
- group_import: { threshold: 30, interval: 5.minutes }
+ project_generate_new_export: { threshold: -> { application_settings.project_export_limit }, interval: 1.minute },
+ project_import: { threshold: -> { application_settings.project_import_limit }, interval: 1.minute },
+ play_pipeline_schedule: { threshold: 1, interval: 1.minute },
+ show_raw_controller: { threshold: -> { application_settings.raw_blob_request_limit }, interval: 1.minute },
+ group_export: { threshold: -> { application_settings.group_export_limit }, interval: 1.minute },
+ group_download_export: { threshold: -> { application_settings.group_download_export_limit }, interval: 1.minute },
+ group_import: { threshold: -> { application_settings.group_import_limit }, interval: 1.minute }
}.freeze
end
@@ -130,6 +130,10 @@ module Gitlab
"application_rate_limiter:#{serialized}"
end
+
+ def application_settings
+ Gitlab::CurrentSettings.current_application_settings
+ end
end
end
end
diff --git a/lib/gitlab/auth/auth_finders.rb b/lib/gitlab/auth/auth_finders.rb
index 93342fbad51..bd5aed0d964 100644
--- a/lib/gitlab/auth/auth_finders.rb
+++ b/lib/gitlab/auth/auth_finders.rb
@@ -54,6 +54,11 @@ module Gitlab
User.find_by_feed_token(token) || raise(UnauthorizedError)
end
+ def find_user_from_bearer_token
+ find_user_from_job_bearer_token ||
+ find_user_from_access_token
+ end
+
def find_user_from_job_token
return unless route_authentication_setting[:job_token_allowed]
return find_user_from_basic_auth_job if route_authentication_setting[:job_token_allowed] == :basic_auth
@@ -92,6 +97,8 @@ module Gitlab
validate_access_token!(scopes: [:api])
+ ::PersonalAccessTokens::LastUsedService.new(access_token).execute
+
access_token.user || raise(UnauthorizedError)
end
@@ -100,6 +107,8 @@ module Gitlab
validate_access_token!
+ ::PersonalAccessTokens::LastUsedService.new(access_token).execute
+
access_token.user || raise(UnauthorizedError)
end
@@ -132,6 +141,9 @@ module Gitlab
end
def validate_access_token!(scopes: [])
+ # return early if we've already authenticated via a job token
+ return if @current_authenticated_job.present? # rubocop:disable Gitlab/ModuleWithInstanceVariables
+
# return early if we've already authenticated via a deploy token
return if @current_authenticated_deploy_token.present? # rubocop:disable Gitlab/ModuleWithInstanceVariables
@@ -151,6 +163,20 @@ module Gitlab
private
+ def find_user_from_job_bearer_token
+ return unless route_authentication_setting[:job_token_allowed]
+
+ token = parsed_oauth_token
+ return unless token
+
+ job = ::Ci::Build.find_by_token(token)
+ return unless job
+
+ @current_authenticated_job = job # rubocop:disable Gitlab/ModuleWithInstanceVariables
+
+ job.user
+ end
+
def route_authentication_setting
return {} unless respond_to?(:route_setting)
diff --git a/lib/gitlab/background_migration.rb b/lib/gitlab/background_migration.rb
index 6a16c37e880..ea0307e8bd6 100644
--- a/lib/gitlab/background_migration.rb
+++ b/lib/gitlab/background_migration.rb
@@ -33,6 +33,7 @@ module Gitlab
next unless job.queue == self.queue
next unless migration_class == steal_class
+ next if block_given? && !(yield migration_args)
begin
perform(migration_class, migration_args) if job.delete
diff --git a/lib/gitlab/background_migration/backfill_namespace_settings.rb b/lib/gitlab/background_migration/backfill_namespace_settings.rb
new file mode 100644
index 00000000000..a391d5f4ebe
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_namespace_settings.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Backfillnamespace_settings for a range of namespaces
+ class BackfillNamespaceSettings
+ def perform(start_id, end_id)
+ ActiveRecord::Base.connection.execute <<~SQL
+ INSERT INTO namespace_settings (namespace_id, created_at, updated_at)
+ SELECT namespaces.id, now(), now()
+ FROM namespaces
+ WHERE namespaces.id BETWEEN #{start_id} AND #{end_id}
+ ON CONFLICT (namespace_id) DO NOTHING;
+ SQL
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/cleanup_concurrent_schema_change.rb b/lib/gitlab/background_migration/cleanup_concurrent_schema_change.rb
index 54f77f184d5..91b50c1a493 100644
--- a/lib/gitlab/background_migration/cleanup_concurrent_schema_change.rb
+++ b/lib/gitlab/background_migration/cleanup_concurrent_schema_change.rb
@@ -2,7 +2,7 @@
module Gitlab
module BackgroundMigration
- # Base class for cleaning up concurrent schema changes.
+ # Base class for background migration for rename/type changes.
class CleanupConcurrentSchemaChange
include Database::MigrationHelpers
@@ -10,7 +10,7 @@ module Gitlab
# old_column - The name of the old (to drop) column.
# new_column - The name of the new column.
def perform(table, old_column, new_column)
- return unless column_exists?(table, new_column)
+ return unless column_exists?(table, new_column) && column_exists?(table, old_column)
rows_to_migrate = define_model_for(table)
.where(new_column => nil)
@@ -28,6 +28,10 @@ module Gitlab
end
end
+ def cleanup_concurrent_schema_change(_table, _old_column, _new_column)
+ raise NotImplementedError
+ end
+
# These methods are necessary so we can re-use the migration helpers in
# this class.
def connection
diff --git a/lib/gitlab/background_migration/digest_column.rb b/lib/gitlab/background_migration/digest_column.rb
deleted file mode 100644
index 22a3bb8f8f3..00000000000
--- a/lib/gitlab/background_migration/digest_column.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-# rubocop:disable Style/Documentation
-module Gitlab
- module BackgroundMigration
- class DigestColumn
- class PersonalAccessToken < ActiveRecord::Base
- self.table_name = 'personal_access_tokens'
- end
-
- def perform(model, attribute_from, attribute_to, start_id, stop_id)
- model = model.constantize if model.is_a?(String)
-
- model.transaction do
- relation = model.where(id: start_id..stop_id).where.not(attribute_from => nil).lock
-
- relation.each do |instance|
- instance.update_columns(attribute_to => Gitlab::CryptoHelper.sha256(instance.read_attribute(attribute_from)),
- attribute_from => nil)
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/encrypt_columns.rb b/lib/gitlab/background_migration/encrypt_columns.rb
deleted file mode 100644
index 173543b7c25..00000000000
--- a/lib/gitlab/background_migration/encrypt_columns.rb
+++ /dev/null
@@ -1,104 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # EncryptColumn migrates data from an unencrypted column - `foo`, say - to
- # an encrypted column - `encrypted_foo`, say.
- #
- # To avoid depending on a particular version of the model in app/, add a
- # model to `lib/gitlab/background_migration/models/encrypt_columns` and use
- # it in the migration that enqueues the jobs, so code can be shared.
- #
- # For this background migration to work, the table that is migrated _has_ to
- # have an `id` column as the primary key. Additionally, the encrypted column
- # should be managed by attr_encrypted, and map to an attribute with the same
- # name as the unencrypted column (i.e., the unencrypted column should be
- # shadowed), unless you want to define specific methods / accessors in the
- # temporary model in `/models/encrypt_columns/your_model.rb`.
- #
- class EncryptColumns
- def perform(model, attributes, from, to)
- model = model.constantize if model.is_a?(String)
-
- # If sidekiq hasn't undergone a restart, its idea of what columns are
- # present may be inaccurate, so ensure this is as fresh as possible
- model.reset_column_information
- model.define_attribute_methods
-
- attributes = expand_attributes(model, Array(attributes).map(&:to_sym))
-
- model.transaction do
- # Use SELECT ... FOR UPDATE to prevent the value being changed while
- # we are encrypting it
- relation = model.where(id: from..to).lock
-
- relation.each do |instance|
- encrypt!(instance, attributes)
- end
- end
- end
-
- def clear_migrated_values?
- true
- end
-
- private
-
- # Build a hash of { attribute => encrypted column name }
- def expand_attributes(klass, attributes)
- expanded = attributes.flat_map do |attribute|
- attr_config = klass.encrypted_attributes[attribute]
- crypt_column_name = attr_config&.fetch(:attribute)
-
- raise "Couldn't determine encrypted column for #{klass}##{attribute}" if
- crypt_column_name.nil?
-
- raise "#{klass} source column: #{attribute} is missing" unless
- klass.column_names.include?(attribute.to_s)
-
- # Running the migration without the destination column being present
- # leads to data loss
- raise "#{klass} destination column: #{crypt_column_name} is missing" unless
- klass.column_names.include?(crypt_column_name.to_s)
-
- [attribute, crypt_column_name]
- end
-
- Hash[*expanded]
- end
-
- # Generate ciphertext for each column and update the database
- def encrypt!(instance, attributes)
- to_clear = attributes
- .map { |plain, crypt| apply_attribute!(instance, plain, crypt) }
- .compact
- .flat_map { |plain| [plain, nil] }
-
- to_clear = Hash[*to_clear]
-
- if instance.changed?
- instance.save!
-
- if clear_migrated_values?
- instance.update_columns(to_clear)
- end
- end
- end
-
- def apply_attribute!(instance, plain_column, crypt_column)
- plaintext = instance[plain_column]
- ciphertext = instance[crypt_column]
-
- # No need to do anything if the plaintext is nil, or an encrypted
- # value already exists
- return unless plaintext.present?
- return if ciphertext.present?
-
- # attr_encrypted will calculate and set the expected value for us
- instance.public_send("#{plain_column}=", plaintext) # rubocop:disable GitlabSecurity/PublicSend
-
- plain_column
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/encrypt_runners_tokens.rb b/lib/gitlab/background_migration/encrypt_runners_tokens.rb
deleted file mode 100644
index ec64a73542e..00000000000
--- a/lib/gitlab/background_migration/encrypt_runners_tokens.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # EncryptColumn migrates data from an unencrypted column - `foo`, say - to
- # an encrypted column - `encrypted_foo`, say.
- #
- # We only create a subclass here because we want to isolate this migration
- # (migrating unencrypted runner registration tokens to encrypted columns)
- # from other `EncryptColumns` migration. This class name is going to be
- # serialized and stored in Redis and later picked by Sidekiq, so we need to
- # create a separate class name in order to isolate these migration tasks.
- #
- # We can solve this differently, see tech debt issue:
- #
- # https://gitlab.com/gitlab-org/gitlab-foss/issues/54328
- #
- class EncryptRunnersTokens < EncryptColumns
- def perform(model, from, to)
- resource = "::Gitlab::BackgroundMigration::Models::EncryptColumns::#{model.to_s.capitalize}"
- model = resource.constantize
- attributes = model.encrypted_attributes.keys
-
- super(model, attributes, from, to)
- end
-
- def clear_migrated_values?
- false
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/fix_pages_access_level.rb b/lib/gitlab/background_migration/fix_pages_access_level.rb
index 0d49f3dd8c5..31d2e78b2d2 100644
--- a/lib/gitlab/background_migration/fix_pages_access_level.rb
+++ b/lib/gitlab/background_migration/fix_pages_access_level.rb
@@ -16,7 +16,7 @@ module Gitlab
end
# Namespace
- class Namespace < ApplicationRecord
+ class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
self.inheritance_column = :_type_disabled
diff --git a/lib/gitlab/background_migration/mailers/unconfirm_mailer.rb b/lib/gitlab/background_migration/mailers/unconfirm_mailer.rb
new file mode 100644
index 00000000000..c096dae0631
--- /dev/null
+++ b/lib/gitlab/background_migration/mailers/unconfirm_mailer.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ module Mailers
+ class UnconfirmMailer < ::Notify
+ prepend_view_path(File.join(__dir__, 'views'))
+
+ def unconfirm_notification_email(user)
+ @user = user
+ @verification_from_mail = Gitlab.config.gitlab.email_from
+
+ mail(
+ template_path: 'unconfirm_mailer',
+ template_name: 'unconfirm_notification_email',
+ to: @user.notification_email,
+ subject: subject('GitLab email verification request')
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml
new file mode 100644
index 00000000000..d8f7466a1ca
--- /dev/null
+++ b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml
@@ -0,0 +1,19 @@
+-# haml-lint:disable NoPlainNodes
+%p
+ Dear GitLab user,
+
+%p
+ As part of our commitment to keeping GitLab secure, we have identified and addressed a vulnerability in GitLab that allowed some users to bypass the email verification process in a #{link_to("recent security release", "https://about.gitlab.com/releases/2020/05/27/security-release-13-0-1-released", target: '_blank')}.
+
+%p
+ As a precautionary measure, you will need to re-verify some of your account's email addresses before continuing to use GitLab. Sorry for the inconvenience!
+
+%p
+ We have already sent the re-verification email with a subject line of "Confirmation instructions" from #{@verification_from_mail}. Please feel free to contribute any questions or comments to #{link_to("this issue", "https://gitlab.com/gitlab-com/www-gitlab-com/-/issues/7942", target: '_blank')}.
+
+%p
+ If you are not "#{@user.username}", please #{link_to 'report this to our administrator', new_abuse_report_url(user_id: @user.id)}
+
+%p
+ Thank you for being a GitLab user!
+-# haml-lint:enable NoPlainNodes
diff --git a/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb
new file mode 100644
index 00000000000..d20af9b9803
--- /dev/null
+++ b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb
@@ -0,0 +1,14 @@
+Dear GitLab user,
+
+As part of our commitment to keeping GitLab secure, we have identified and addressed a vulnerability in GitLab that allowed some users to bypass the email verification process in a recent security release.
+
+Security release: https://about.gitlab.com/releases/2020/05/27/security-release-13-0-1-released
+
+As a precautionary measure, you will need to re-verify some of your account's email addresses before continuing to use GitLab. Sorry for the inconvenience!
+
+We have already sent the re-verification email with a subject line of "Confirmation instructions" from <%= @verification_from_mail %>.
+Please feel free to contribute any questions or comments to this issue: https://gitlab.com/gitlab-com/www-gitlab-com/-/issues/7942
+
+If you are not "<%= @user.username %>", please report this to our administrator. Report link: <%= new_abuse_report_url(user_id: @user.id) %>
+
+Thank you for being a GitLab user!
diff --git a/lib/gitlab/background_migration/models/encrypt_columns/namespace.rb b/lib/gitlab/background_migration/models/encrypt_columns/namespace.rb
deleted file mode 100644
index 41f18979d76..00000000000
--- a/lib/gitlab/background_migration/models/encrypt_columns/namespace.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- module Models
- module EncryptColumns
- # This model is shared between synchronous and background migrations to
- # encrypt the `runners_token` column in `namespaces` table.
- #
- class Namespace < ActiveRecord::Base
- include ::EachBatch
-
- self.table_name = 'namespaces'
- self.inheritance_column = :_type_disabled
-
- def runners_token=(value)
- self.runners_token_encrypted =
- ::Gitlab::CryptoHelper.aes256_gcm_encrypt(value)
- end
-
- def self.encrypted_attributes
- { runners_token: { attribute: :runners_token_encrypted } }
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/models/encrypt_columns/project.rb b/lib/gitlab/background_migration/models/encrypt_columns/project.rb
deleted file mode 100644
index bfeae14584d..00000000000
--- a/lib/gitlab/background_migration/models/encrypt_columns/project.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- module Models
- module EncryptColumns
- # This model is shared between synchronous and background migrations to
- # encrypt the `runners_token` column in `projects` table.
- #
- class Project < ActiveRecord::Base
- include ::EachBatch
-
- self.table_name = 'projects'
- self.inheritance_column = :_type_disabled
-
- def runners_token=(value)
- self.runners_token_encrypted =
- ::Gitlab::CryptoHelper.aes256_gcm_encrypt(value)
- end
-
- def self.encrypted_attributes
- { runners_token: { attribute: :runners_token_encrypted } }
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/models/encrypt_columns/runner.rb b/lib/gitlab/background_migration/models/encrypt_columns/runner.rb
deleted file mode 100644
index 14ddce4b147..00000000000
--- a/lib/gitlab/background_migration/models/encrypt_columns/runner.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- module Models
- module EncryptColumns
- # This model is shared between synchronous and background migrations to
- # encrypt the `token` column in `ci_runners` table.
- #
- class Runner < ActiveRecord::Base
- include ::EachBatch
-
- self.table_name = 'ci_runners'
- self.inheritance_column = :_type_disabled
-
- def token=(value)
- self.token_encrypted =
- ::Gitlab::CryptoHelper.aes256_gcm_encrypt(value)
- end
-
- def self.encrypted_attributes
- { token: { attribute: :token_encrypted } }
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/models/encrypt_columns/settings.rb b/lib/gitlab/background_migration/models/encrypt_columns/settings.rb
deleted file mode 100644
index 08ae35c0671..00000000000
--- a/lib/gitlab/background_migration/models/encrypt_columns/settings.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- module Models
- module EncryptColumns
- # This model is shared between synchronous and background migrations to
- # encrypt the `runners_token` column in `application_settings` table.
- #
- class Settings < ActiveRecord::Base
- include ::EachBatch
- include ::CacheableAttributes
-
- self.table_name = 'application_settings'
- self.inheritance_column = :_type_disabled
-
- after_commit do
- ::ApplicationSetting.expire
- end
-
- def runners_registration_token=(value)
- self.runners_registration_token_encrypted =
- ::Gitlab::CryptoHelper.aes256_gcm_encrypt(value)
- end
-
- def self.encrypted_attributes
- {
- runners_registration_token: {
- attribute: :runners_registration_token_encrypted
- }
- }
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/models/encrypt_columns/web_hook.rb b/lib/gitlab/background_migration/models/encrypt_columns/web_hook.rb
deleted file mode 100644
index 34e72fd9f34..00000000000
--- a/lib/gitlab/background_migration/models/encrypt_columns/web_hook.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- module Models
- module EncryptColumns
- # This model is shared between synchronous and background migrations to
- # encrypt the `token` and `url` columns
- class WebHook < ActiveRecord::Base
- include ::EachBatch
-
- self.table_name = 'web_hooks'
- self.inheritance_column = :_type_disabled
-
- attr_encrypted :token,
- mode: :per_attribute_iv,
- algorithm: 'aes-256-gcm',
- key: ::Settings.attr_encrypted_db_key_base_32
-
- attr_encrypted :url,
- mode: :per_attribute_iv,
- algorithm: 'aes-256-gcm',
- key: ::Settings.attr_encrypted_db_key_base_32
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/populate_project_snippet_statistics.rb b/lib/gitlab/background_migration/populate_project_snippet_statistics.rb
new file mode 100644
index 00000000000..7659b63271f
--- /dev/null
+++ b/lib/gitlab/background_migration/populate_project_snippet_statistics.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # This class creates/updates those project snippets statistics
+ # that haven't been created nor initialized.
+ # It also updates the related project statistics and its root storage namespace stats
+ class PopulateProjectSnippetStatistics
+ def perform(snippet_ids)
+ project_snippets(snippet_ids).group_by(&:namespace_id).each do |namespace_id, namespace_snippets|
+ namespace_snippets.group_by(&:project).each do |project, snippets|
+ upsert_snippet_statistics(snippets)
+ update_project_statistics(project)
+ rescue
+ error_message("Error updating statistics for project #{project.id}")
+ end
+
+ update_namespace_statistics(namespace_snippets.first.project.root_namespace)
+ rescue => e
+ error_message("Error updating statistics for namespace #{namespace_id}: #{e.message}")
+ end
+ end
+
+ private
+
+ def project_snippets(snippet_ids)
+ ProjectSnippet
+ .select('snippets.*, projects.namespace_id')
+ .where(id: snippet_ids)
+ .joins(:project)
+ .includes(:statistics)
+ .includes(snippet_repository: :shard)
+ .includes(project: [:route, :statistics, :namespace])
+ end
+
+ def upsert_snippet_statistics(snippets)
+ snippets.each do |snippet|
+ response = Snippets::UpdateStatisticsService.new(snippet).execute
+
+ error_message("#{response.message} snippet: #{snippet.id}") if response.error?
+ end
+ end
+
+ def logger
+ @logger ||= Gitlab::BackgroundMigration::Logger.build
+ end
+
+ def error_message(message)
+ logger.error(message: "Snippet Statistics Migration: #{message}")
+ end
+
+ def update_project_statistics(project)
+ project.statistics&.refresh!(only: [:snippets_size])
+ end
+
+ def update_namespace_statistics(namespace)
+ Namespaces::StatisticsRefresherService.new.execute(namespace)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/update_vulnerabilities_from_dismissal_feedback.rb b/lib/gitlab/background_migration/update_vulnerabilities_from_dismissal_feedback.rb
new file mode 100644
index 00000000000..bfe9f673b53
--- /dev/null
+++ b/lib/gitlab/background_migration/update_vulnerabilities_from_dismissal_feedback.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # rubocop: disable Style/Documentation
+ class UpdateVulnerabilitiesFromDismissalFeedback
+ def perform(project_id)
+ end
+ end
+ end
+end
+
+Gitlab::BackgroundMigration::UpdateVulnerabilitiesFromDismissalFeedback.prepend_if_ee('EE::Gitlab::BackgroundMigration::UpdateVulnerabilitiesFromDismissalFeedback')
diff --git a/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb b/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb
new file mode 100644
index 00000000000..5f63cf5836e
--- /dev/null
+++ b/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class WrongfullyConfirmedEmailUnconfirmer
+ class UserModel < ActiveRecord::Base
+ alias_method :reset, :reload
+
+ self.table_name = 'users'
+
+ scope :active, -> { where(state: 'active', user_type: nil) } # only humans, skip bots
+
+ devise :confirmable
+ end
+
+ class EmailModel < ActiveRecord::Base
+ alias_method :reset, :reload
+
+ self.table_name = 'emails'
+
+ belongs_to :user
+
+ devise :confirmable
+
+ def self.wrongfully_confirmed_emails(start_id, stop_id)
+ joins(:user)
+ .merge(UserModel.active)
+ .where(id: (start_id..stop_id))
+ .where('emails.confirmed_at IS NOT NULL')
+ .where('emails.confirmed_at = users.confirmed_at')
+ .where('emails.email <> users.email')
+ end
+ end
+
+ def perform(start_id, stop_id)
+ email_records = EmailModel
+ .wrongfully_confirmed_emails(start_id, stop_id)
+ .to_a
+
+ user_ids = email_records.map(&:user_id).uniq
+
+ ActiveRecord::Base.transaction do
+ update_email_records(start_id, stop_id)
+ update_user_records(user_ids)
+ end
+
+ # Refind the records with the "real" Email model so devise will notice that the user / email is unconfirmed
+ unconfirmed_email_records = ::Email.where(id: email_records.map(&:id))
+ ActiveRecord::Associations::Preloader.new.preload(unconfirmed_email_records, [:user])
+
+ send_emails(unconfirmed_email_records)
+ end
+
+ private
+
+ def update_email_records(start_id, stop_id)
+ EmailModel.connection.execute <<-SQL
+ WITH md5_strings as (
+ #{email_query_for_update(start_id, stop_id).to_sql}
+ )
+ UPDATE #{EmailModel.connection.quote_table_name(EmailModel.table_name)}
+ SET confirmed_at = NULL,
+ confirmation_token = md5_strings.md5_string,
+ confirmation_sent_at = NOW()
+ FROM md5_strings
+ WHERE id = md5_strings.email_id
+ SQL
+ end
+
+ def update_user_records(user_ids)
+ UserModel
+ .where(id: user_ids)
+ .update_all("confirmed_at = NULL, confirmation_sent_at = NOW(), unconfirmed_email = NULL, confirmation_token=md5(users.id::varchar || users.created_at || users.encrypted_password || '#{Integer(Time.now.to_i)}')")
+ end
+
+ def email_query_for_update(start_id, stop_id)
+ EmailModel
+ .wrongfully_confirmed_emails(start_id, stop_id)
+ .select('emails.id as email_id', "md5(emails.id::varchar || emails.created_at || users.encrypted_password || '#{Integer(Time.now.to_i)}') as md5_string")
+ end
+
+ def send_emails(email_records)
+ user_records = email_records.map(&:user).uniq
+
+ user_records.each do |user|
+ Gitlab::BackgroundMigration::Mailers::UnconfirmMailer.unconfirm_notification_email(user).deliver_later
+ DeviseMailer.confirmation_instructions(user, user.confirmation_token).deliver_later(wait: 1.minute)
+ end
+
+ email_records.each do |email|
+ DeviseMailer.confirmation_instructions(email, email.confirmation_token).deliver_later(wait: 1.minute)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/bitbucket_import/importer.rb b/lib/gitlab/bitbucket_import/importer.rb
index 5a9fad3be56..e59494c9d9c 100644
--- a/lib/gitlab/bitbucket_import/importer.rb
+++ b/lib/gitlab/bitbucket_import/importer.rb
@@ -3,8 +3,6 @@
module Gitlab
module BitbucketImport
class Importer
- include Gitlab::BitbucketImport::Metrics
-
LABELS = [{ title: 'bug', color: '#FF0000' },
{ title: 'enhancement', color: '#428BCA' },
{ title: 'proposal', color: '#69D100' },
@@ -26,6 +24,7 @@ module Gitlab
import_issues
import_pull_requests
handle_errors
+ metrics.track_finished_import
true
end
@@ -115,6 +114,8 @@ module Gitlab
updated_at: issue.updated_at
)
+ metrics.issues_counter.increment
+
gitlab_issue.labels << @labels[label_name]
import_issue_comments(issue, gitlab_issue) if gitlab_issue.persisted?
@@ -195,6 +196,8 @@ module Gitlab
updated_at: pull_request.updated_at
)
+ metrics.merge_requests_counter.increment
+
import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
rescue StandardError => e
store_pull_request_error(pull_request, e)
@@ -288,6 +291,10 @@ module Gitlab
project_path: project.full_path
}
end
+
+ def metrics
+ @metrics ||= Gitlab::Import::Metrics.new(:bitbucket_importer, @project)
+ end
end
end
end
diff --git a/lib/gitlab/bitbucket_import/metrics.rb b/lib/gitlab/bitbucket_import/metrics.rb
deleted file mode 100644
index 25e2d9b211e..00000000000
--- a/lib/gitlab/bitbucket_import/metrics.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BitbucketImport
- module Metrics
- extend ActiveSupport::Concern
-
- IMPORTER = :bitbucket_importer
-
- included do
- prepend Gitlab::Import::Metrics
-
- Gitlab::Import::Metrics.measure(:execute, metrics: {
- "#{IMPORTER}_imported_projects": {
- type: :counter,
- description: 'The number of imported Bitbucket projects'
- },
- "#{IMPORTER}_total_duration_seconds": {
- type: :histogram,
- labels: { importer: IMPORTER },
- description: 'Total time spent importing Bitbucket projects, in seconds'
- }
- })
-
- Gitlab::Import::Metrics.measure(:import_issue, metrics: {
- "#{IMPORTER}_imported_issues": {
- type: :counter,
- description: 'The number of imported Bitbucket issues'
- }
- })
-
- Gitlab::Import::Metrics.measure(:import_pull_request, metrics: {
- "#{IMPORTER}_imported_pull_requests": {
- type: :counter,
- description: 'The number of imported Bitbucket pull requests'
- }
- })
- end
- end
- end
-end
diff --git a/lib/gitlab/bitbucket_server_import/importer.rb b/lib/gitlab/bitbucket_server_import/importer.rb
index 16fe5b46b1f..18a1b64729e 100644
--- a/lib/gitlab/bitbucket_server_import/importer.rb
+++ b/lib/gitlab/bitbucket_server_import/importer.rb
@@ -43,6 +43,7 @@ module Gitlab
import_pull_requests
delete_temp_branches
handle_errors
+ metrics.track_finished_import
log_info(stage: "complete")
@@ -219,7 +220,11 @@ module Gitlab
creator = Gitlab::Import::MergeRequestCreator.new(project)
merge_request = creator.execute(attributes)
- import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
+ if merge_request.persisted?
+ import_pull_request_comments(pull_request, merge_request)
+
+ metrics.merge_requests_counter.increment
+ end
log_info(stage: 'import_bitbucket_pull_requests', message: 'finished', iid: pull_request.iid)
end
@@ -388,6 +393,10 @@ module Gitlab
project_path: project.full_path
}
end
+
+ def metrics
+ @metrics ||= Gitlab::Import::Metrics.new(:bitbucket_server_importer, @project)
+ end
end
end
end
diff --git a/lib/gitlab/cache/ci/project_pipeline_status.rb b/lib/gitlab/cache/ci/project_pipeline_status.rb
index e7a7d23ef7e..d981f263c5e 100644
--- a/lib/gitlab/cache/ci/project_pipeline_status.rb
+++ b/lib/gitlab/cache/ci/project_pipeline_status.rb
@@ -49,7 +49,8 @@ module Gitlab
def load_status
return if loaded?
- return unless commit
+
+ return unless Gitlab::Ci::Features.pipeline_status_omit_commit_sha_in_cache_key?(project) || commit
if has_cache?
load_from_cache
@@ -66,6 +67,8 @@ module Gitlab
end
def load_from_project
+ return unless commit
+
self.sha, self.status, self.ref = commit.sha, commit.status, project.default_branch
end
@@ -114,7 +117,11 @@ module Gitlab
end
def cache_key
- "#{Gitlab::Redis::Cache::CACHE_NAMESPACE}:project:#{project.id}:pipeline_status:#{commit&.sha}"
+ if Gitlab::Ci::Features.pipeline_status_omit_commit_sha_in_cache_key?(project)
+ "#{Gitlab::Redis::Cache::CACHE_NAMESPACE}:project:#{project.id}:pipeline_status"
+ else
+ "#{Gitlab::Redis::Cache::CACHE_NAMESPACE}:project:#{project.id}:pipeline_status:#{commit&.sha}"
+ end
end
def commit
diff --git a/lib/gitlab/ci/build/releaser.rb b/lib/gitlab/ci/build/releaser.rb
index ba6c7857e96..facb5f619bd 100644
--- a/lib/gitlab/ci/build/releaser.rb
+++ b/lib/gitlab/ci/build/releaser.rb
@@ -5,6 +5,8 @@ module Gitlab
module Build
class Releaser
BASE_COMMAND = 'release-cli create'
+ SINGLE_FLAGS = %i[name description tag_name ref released_at].freeze
+ ARRAY_FLAGS = %i[milestones].freeze
attr_reader :config
@@ -14,9 +16,20 @@ module Gitlab
def script
command = BASE_COMMAND.dup
- config.each { |k, v| command.concat(" --#{k.to_s.dasherize} \"#{v}\"") }
+ single_flags.each { |k, v| command.concat(" --#{k.to_s.dasherize} \"#{v}\"") }
+ array_commands.each { |k, v| v.each { |elem| command.concat(" --#{k.to_s.singularize.dasherize} \"#{elem}\"") } }
- command
+ [command]
+ end
+
+ private
+
+ def single_flags
+ config.slice(*SINGLE_FLAGS)
+ end
+
+ def array_commands
+ config.slice(*ARRAY_FLAGS)
end
end
end
diff --git a/lib/gitlab/ci/config.rb b/lib/gitlab/ci/config.rb
index 10e0f4b8e4d..d81a3fef1f5 100644
--- a/lib/gitlab/ci/config.rb
+++ b/lib/gitlab/ci/config.rb
@@ -39,6 +39,10 @@ module Gitlab
@root.errors
end
+ def warnings
+ @root.warnings
+ end
+
def to_hash
@config
end
@@ -93,7 +97,7 @@ module Gitlab
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error, @context.sentry_payload)
end
- # Overriden in EE
+ # Overridden in EE
def rescue_errors
RESCUE_ERRORS
end
diff --git a/lib/gitlab/ci/config/entry/environment.rb b/lib/gitlab/ci/config/entry/environment.rb
index fc62cca58ff..64e6d48133f 100644
--- a/lib/gitlab/ci/config/entry/environment.rb
+++ b/lib/gitlab/ci/config/entry/environment.rb
@@ -44,7 +44,7 @@ module Gitlab
validates :action,
type: String,
- inclusion: { in: %w[start stop], message: 'should be start or stop' },
+ inclusion: { in: %w[start stop prepare], message: 'should be start, stop or prepare' },
allow_nil: true
validates :on_stop, type: String, allow_nil: true
diff --git a/lib/gitlab/ci/config/entry/job.rb b/lib/gitlab/ci/config/entry/job.rb
index 66050a7bbe0..a615cab1a80 100644
--- a/lib/gitlab/ci/config/entry/job.rb
+++ b/lib/gitlab/ci/config/entry/job.rb
@@ -15,7 +15,7 @@ module Gitlab
allow_failure type when start_in artifacts cache
dependencies before_script needs after_script
environment coverage retry parallel interruptible timeout
- resource_group release].freeze
+ resource_group release secrets].freeze
REQUIRED_BY_NEEDS = %i[stage].freeze
@@ -191,3 +191,5 @@ module Gitlab
end
end
end
+
+::Gitlab::Ci::Config::Entry::Job.prepend_if_ee('::EE::Gitlab::Ci::Config::Entry::Job')
diff --git a/lib/gitlab/ci/config/entry/processable.rb b/lib/gitlab/ci/config/entry/processable.rb
index 81211acbec7..b4539475d88 100644
--- a/lib/gitlab/ci/config/entry/processable.rb
+++ b/lib/gitlab/ci/config/entry/processable.rb
@@ -82,6 +82,10 @@ module Gitlab
@entries.delete(:except) unless except_defined? # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
+ if has_rules? && !has_workflow_rules && Gitlab::Ci::Features.raise_job_rules_without_workflow_rules_warning?
+ add_warning('uses `rules` without defining `workflow:rules`')
+ end
+
# inherit root variables
@root_variables_value = deps&.variables_value # rubocop:disable Gitlab/ModuleWithInstanceVariables
diff --git a/lib/gitlab/ci/config/entry/release.rb b/lib/gitlab/ci/config/entry/release.rb
index b4e4c149730..7e504c24ade 100644
--- a/lib/gitlab/ci/config/entry/release.rb
+++ b/lib/gitlab/ci/config/entry/release.rb
@@ -12,8 +12,9 @@ module Gitlab
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
- ALLOWED_KEYS = %i[tag_name name description assets].freeze
- attributes %i[tag_name name assets].freeze
+ ALLOWED_KEYS = %i[tag_name name description ref released_at milestones assets].freeze
+ attributes %i[tag_name name ref milestones assets].freeze
+ attr_reader :released_at
# Attributable description conflicts with
# ::Gitlab::Config::Entry::Node.description
@@ -29,8 +30,25 @@ module Gitlab
validations do
validates :config, allowed_keys: ALLOWED_KEYS
- validates :tag_name, presence: true
+ validates :tag_name, type: String, presence: true
validates :description, type: String, presence: true
+ validates :milestones, array_of_strings_or_string: true, allow_blank: true
+ validate do
+ next unless config[:released_at]
+
+ begin
+ @released_at = DateTime.iso8601(config[:released_at])
+ rescue ArgumentError
+ errors.add(:released_at, "must be a valid datetime")
+ end
+ end
+ validate do
+ next unless config[:ref]
+ next if Commit.reference_valid?(config[:ref])
+ next if Gitlab::GitRefValidator.validate(config[:ref])
+
+ errors.add(:ref, "must be a valid ref")
+ end
end
def value
diff --git a/lib/gitlab/ci/config/entry/reports.rb b/lib/gitlab/ci/config/entry/reports.rb
index 74736b24d73..0ae65f43723 100644
--- a/lib/gitlab/ci/config/entry/reports.rb
+++ b/lib/gitlab/ci/config/entry/reports.rb
@@ -13,9 +13,9 @@ module Gitlab
ALLOWED_KEYS =
%i[junit codequality sast secret_detection dependency_scanning container_scanning
- dast performance license_management license_scanning metrics lsif
+ dast performance browser_performance load_performance license_management license_scanning metrics lsif
dotenv cobertura terraform accessibility cluster_applications
- requirements].freeze
+ requirements coverage_fuzzing].freeze
attributes ALLOWED_KEYS
@@ -25,13 +25,16 @@ module Gitlab
with_options allow_nil: true do
validates :junit, array_of_strings_or_string: true
- validates :codequality, array_of_strings_or_string: true
+ validates :coverage_fuzzing, array_of_strings_or_string: true
+ validates :sast, array_of_strings_or_string: true
validates :sast, array_of_strings_or_string: true
validates :secret_detection, array_of_strings_or_string: true
validates :dependency_scanning, array_of_strings_or_string: true
validates :container_scanning, array_of_strings_or_string: true
validates :dast, array_of_strings_or_string: true
validates :performance, array_of_strings_or_string: true
+ validates :browser_performance, array_of_strings_or_string: true
+ validates :load_performance, array_of_strings_or_string: true
validates :license_management, array_of_strings_or_string: true
validates :license_scanning, array_of_strings_or_string: true
validates :metrics, array_of_strings_or_string: true
diff --git a/lib/gitlab/ci/features.rb b/lib/gitlab/ci/features.rb
index a2eb31369c7..6130baeb9d5 100644
--- a/lib/gitlab/ci/features.rb
+++ b/lib/gitlab/ci/features.rb
@@ -10,20 +10,12 @@ module Gitlab
::Feature.enabled?(:ci_artifacts_exclude, default_enabled: true)
end
- def self.ensure_scheduling_type_enabled?
- ::Feature.enabled?(:ci_ensure_scheduling_type, default_enabled: true)
- end
-
def self.job_heartbeats_runner?(project)
::Feature.enabled?(:ci_job_heartbeats_runner, project, default_enabled: true)
end
- def self.instance_level_variables_limit_enabled?
- ::Feature.enabled?(:ci_instance_level_variables_limit, default_enabled: true)
- end
-
def self.pipeline_fixed_notifications?
- ::Feature.enabled?(:ci_pipeline_fixed_notifications)
+ ::Feature.enabled?(:ci_pipeline_fixed_notifications, default_enabled: true)
end
def self.instance_variables_ui_enabled?
@@ -38,9 +30,51 @@ module Gitlab
::Feature.enabled?(:ci_atomic_processing, project, default_enabled: true)
end
+ def self.pipeline_latest?
+ ::Feature.enabled?(:ci_pipeline_latest, default_enabled: true)
+ end
+
+ def self.pipeline_status_omit_commit_sha_in_cache_key?(project)
+ Feature.enabled?(:ci_pipeline_status_omit_commit_sha_in_cache_key, project)
+ end
+
def self.release_generation_enabled?
- ::Feature.enabled?(:ci_release_generation)
+ ::Feature.enabled?(:ci_release_generation, default_enabled: true)
+ end
+
+ # Remove in https://gitlab.com/gitlab-org/gitlab/-/issues/224199
+ def self.store_pipeline_messages?(project)
+ ::Feature.enabled?(:ci_store_pipeline_messages, project, default_enabled: true)
+ end
+
+ # Remove in https://gitlab.com/gitlab-org/gitlab/-/issues/227052
+ def self.variables_api_filter_environment_scope?
+ ::Feature.enabled?(:ci_variables_api_filter_environment_scope, default_enabled: false)
+ end
+
+ # This FF is only used for development purpose to test that warnings can be
+ # raised and propagated to the UI.
+ def self.raise_job_rules_without_workflow_rules_warning?
+ ::Feature.enabled?(:ci_raise_job_rules_without_workflow_rules_warning)
+ end
+
+ def self.keep_latest_artifacts_for_ref_enabled?(project)
+ ::Feature.enabled?(:keep_latest_artifacts_for_ref, project, default_enabled: false)
+ end
+
+ def self.destroy_only_unlocked_expired_artifacts_enabled?
+ ::Feature.enabled?(:destroy_only_unlocked_expired_artifacts, default_enabled: false)
+ end
+
+ def self.bulk_insert_on_create?(project)
+ ::Feature.enabled?(:ci_bulk_insert_on_create, project, default_enabled: true)
+ end
+
+ def self.allow_to_create_merge_request_pipelines_in_target_project?(target_project)
+ ::Feature.enabled?(:ci_allow_to_create_merge_request_pipelines_in_target_project, target_project)
end
end
end
end
+
+::Gitlab::Ci::Features.prepend_if_ee('::EE::Gitlab::Ci::Features')
diff --git a/lib/gitlab/ci/parsers/terraform/tfplan.rb b/lib/gitlab/ci/parsers/terraform/tfplan.rb
index 19f724b79af..abfbe18e23f 100644
--- a/lib/gitlab/ci/parsers/terraform/tfplan.rb
+++ b/lib/gitlab/ci/parsers/terraform/tfplan.rb
@@ -8,15 +8,21 @@ module Gitlab
TfplanParserError = Class.new(Gitlab::Ci::Parsers::ParserError)
def parse!(json_data, terraform_reports, artifact:)
+ job_details = job_details(artifact.job)
+ job_id = job_details['job_id']
plan_data = Gitlab::Json.parse(json_data)
- raise TfplanParserError, 'Tfplan missing required key' unless has_required_keys?(plan_data)
-
- terraform_reports.add_plan(artifact.job.id.to_s, tfplan(plan_data, artifact.job))
+ if has_required_keys?(plan_data)
+ terraform_reports.add_plan(job_id, valid_tfplan(plan_data, job_details))
+ else
+ terraform_reports.add_plan(job_id, invalid_tfplan(:missing_json_keys, job_details))
+ end
rescue JSON::ParserError
- raise TfplanParserError, 'JSON parsing failed'
+ terraform_reports.add_plan(job_id, invalid_tfplan(:invalid_json_format, job_details))
rescue
- raise TfplanParserError, 'Tfplan parsing failed'
+ details = job_details || {}
+ plan_name = job_id || 'failed_tf_plan'
+ terraform_reports.add_plan(plan_name, invalid_tfplan(:unknown_error, details))
end
private
@@ -25,14 +31,24 @@ module Gitlab
(%w[create update delete] - plan_data.keys).empty?
end
- def tfplan(plan_data, artifact_job)
+ def job_details(job)
{
+ 'job_id' => job.id.to_s,
+ 'job_name' => job.options.dig(:artifacts, :name).to_s,
+ 'job_path' => Gitlab::Routing.url_helpers.project_job_path(job.project, job)
+ }
+ end
+
+ def invalid_tfplan(error_type, job_details)
+ job_details.merge('tf_report_error' => error_type)
+ end
+
+ def valid_tfplan(plan_data, job_details)
+ job_details.merge(
'create' => plan_data['create'].to_i,
'delete' => plan_data['delete'].to_i,
- 'job_name' => artifact_job.options.dig(:artifacts, :name).to_s,
- 'job_path' => Gitlab::Routing.url_helpers.project_job_path(artifact_job.project, artifact_job),
'update' => plan_data['update'].to_i
- }
+ )
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/build.rb b/lib/gitlab/ci/pipeline/chain/build.rb
index 9662209f88e..4190c40eb66 100644
--- a/lib/gitlab/ci/pipeline/chain/build.rb
+++ b/lib/gitlab/ci/pipeline/chain/build.rb
@@ -20,7 +20,11 @@ module Gitlab
pipeline_schedule: @command.schedule,
merge_request: @command.merge_request,
external_pull_request: @command.external_pull_request,
- variables_attributes: Array(@command.variables_attributes)
+ variables_attributes: Array(@command.variables_attributes),
+ # This should be removed and set on the database column default
+ # level when the keep_latest_artifacts_for_ref feature flag is
+ # removed.
+ locked: ::Gitlab::Ci::Features.keep_latest_artifacts_for_ref_enabled?(@command.project) ? :artifacts_locked : :unlocked
)
end
diff --git a/lib/gitlab/ci/pipeline/chain/command.rb b/lib/gitlab/ci/pipeline/chain/command.rb
index 8118e7b2487..74b28b181bc 100644
--- a/lib/gitlab/ci/pipeline/chain/command.rb
+++ b/lib/gitlab/ci/pipeline/chain/command.rb
@@ -10,7 +10,7 @@ module Gitlab
:trigger_request, :schedule, :merge_request, :external_pull_request,
:ignore_skip_ci, :save_incompleted,
:seeds_block, :variables_attributes, :push_options,
- :chat_data, :allow_mirror_update, :bridge,
+ :chat_data, :allow_mirror_update, :bridge, :content,
# These attributes are set by Chains during processing:
:config_content, :config_processor, :stage_seeds
) do
@@ -78,7 +78,7 @@ module Gitlab
end
def metrics
- @metrics ||= Chain::Metrics.new
+ @metrics ||= ::Gitlab::Ci::Pipeline::Metrics.new
end
def observe_creation_duration(duration)
@@ -90,6 +90,10 @@ module Gitlab
metrics.pipeline_size_histogram
.observe({ source: pipeline.source.to_s }, pipeline.total_size)
end
+
+ def dangling_build?
+ %i[ondemand_dast_scan webide].include?(source)
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/config/content.rb b/lib/gitlab/ci/pipeline/chain/config/content.rb
index 2008010b523..5314fd471c3 100644
--- a/lib/gitlab/ci/pipeline/chain/config/content.rb
+++ b/lib/gitlab/ci/pipeline/chain/config/content.rb
@@ -9,6 +9,7 @@ module Gitlab
include Chain::Helpers
SOURCES = [
+ Gitlab::Ci::Pipeline::Chain::Config::Content::Parameter,
Gitlab::Ci::Pipeline::Chain::Config::Content::Bridge,
Gitlab::Ci::Pipeline::Chain::Config::Content::Repository,
Gitlab::Ci::Pipeline::Chain::Config::Content::ExternalProject,
diff --git a/lib/gitlab/ci/pipeline/chain/config/content/parameter.rb b/lib/gitlab/ci/pipeline/chain/config/content/parameter.rb
new file mode 100644
index 00000000000..3dd216b33d1
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/config/content/parameter.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ module Config
+ class Content
+ class Parameter < Source
+ UnsupportedSourceError = Class.new(StandardError)
+
+ def content
+ strong_memoize(:content) do
+ next unless command.content.present?
+ raise UnsupportedSourceError, "#{command.source} not a dangling build" unless command.dangling_build?
+
+ command.content
+ end
+ end
+
+ def source
+ :parameter_source
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/config/content/source.rb b/lib/gitlab/ci/pipeline/chain/config/content/source.rb
index 3389187473b..8bc172f93d3 100644
--- a/lib/gitlab/ci/pipeline/chain/config/content/source.rb
+++ b/lib/gitlab/ci/pipeline/chain/config/content/source.rb
@@ -11,6 +11,8 @@ module Gitlab
DEFAULT_YAML_FILE = '.gitlab-ci.yml'
+ attr_reader :command
+
def initialize(pipeline, command)
@pipeline = pipeline
@command = command
diff --git a/lib/gitlab/ci/pipeline/chain/config/process.rb b/lib/gitlab/ci/pipeline/chain/config/process.rb
index 1e47be21b93..2cfcb295407 100644
--- a/lib/gitlab/ci/pipeline/chain/config/process.rb
+++ b/lib/gitlab/ci/pipeline/chain/config/process.rb
@@ -19,7 +19,11 @@ module Gitlab
parent_pipeline: parent_pipeline
}
)
+
+ add_warnings_to_pipeline(@command.config_processor.warnings)
rescue Gitlab::Ci::YamlProcessor::ValidationError => ex
+ add_warnings_to_pipeline(ex.warnings)
+
error(ex.message, config_error: true)
rescue => ex
Gitlab::ErrorTracking.track_exception(ex,
@@ -34,6 +38,14 @@ module Gitlab
def break?
@pipeline.errors.any? || @pipeline.persisted?
end
+
+ private
+
+ def add_warnings_to_pipeline(warnings)
+ return unless warnings.present?
+
+ warnings.each { |message| warning(message) }
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/create.rb b/lib/gitlab/ci/pipeline/chain/create.rb
index aa627bdb009..34649fe16f3 100644
--- a/lib/gitlab/ci/pipeline/chain/create.rb
+++ b/lib/gitlab/ci/pipeline/chain/create.rb
@@ -8,7 +8,9 @@ module Gitlab
include Chain::Helpers
def perform!
- pipeline.save!
+ BulkInsertableAssociations.with_bulk_insert(enabled: ::Gitlab::Ci::Features.bulk_insert_on_create?(project)) do
+ pipeline.save!
+ end
rescue ActiveRecord::RecordInvalid => e
error("Failed to persist the pipeline: #{e}")
end
diff --git a/lib/gitlab/ci/pipeline/chain/helpers.rb b/lib/gitlab/ci/pipeline/chain/helpers.rb
index 982ecc0ff51..aba7dab508d 100644
--- a/lib/gitlab/ci/pipeline/chain/helpers.rb
+++ b/lib/gitlab/ci/pipeline/chain/helpers.rb
@@ -11,9 +11,18 @@ module Gitlab
pipeline.yaml_errors = message
end
+ pipeline.add_error_message(message)
pipeline.drop!(drop_reason) if drop_reason
+
+ # TODO: consider not to rely on AR errors directly as they can be
+ # polluted with other unrelated errors (e.g. state machine)
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/220823
pipeline.errors.add(:base, message)
end
+
+ def warning(message)
+ pipeline.add_warning_message(message)
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/metrics.rb b/lib/gitlab/ci/pipeline/chain/metrics.rb
deleted file mode 100644
index 980ab2de9b0..00000000000
--- a/lib/gitlab/ci/pipeline/chain/metrics.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Ci
- module Pipeline
- module Chain
- class Metrics
- include Gitlab::Utils::StrongMemoize
-
- def pipeline_creation_duration_histogram
- strong_memoize(:pipeline_creation_duration_histogram) do
- name = :gitlab_ci_pipeline_creation_duration_seconds
- comment = 'Pipeline creation duration'
- labels = {}
- buckets = [0.01, 0.05, 0.1, 0.5, 1.0, 2.0, 5.0, 20.0, 50.0, 240.0]
-
- ::Gitlab::Metrics.histogram(name, comment, labels, buckets)
- end
- end
-
- def pipeline_size_histogram
- strong_memoize(:pipeline_size_histogram) do
- name = :gitlab_ci_pipeline_size_builds
- comment = 'Pipeline size'
- labels = { source: nil }
- buckets = [0, 1, 5, 10, 20, 50, 100, 200, 500, 1000]
-
- ::Gitlab::Metrics.histogram(name, comment, labels, buckets)
- end
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/ci/pipeline/chain/validate/abilities.rb b/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
index a30b6c6ef0e..769d0dffd0b 100644
--- a/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
+++ b/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
@@ -19,7 +19,7 @@ module Gitlab
end
unless allowed_to_write_ref?
- return error("Insufficient permissions for protected ref '#{command.ref}'")
+ error("Insufficient permissions for protected ref '#{command.ref}'")
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/validate/repository.rb b/lib/gitlab/ci/pipeline/chain/validate/repository.rb
index 8f5445850d7..7977ce90443 100644
--- a/lib/gitlab/ci/pipeline/chain/validate/repository.rb
+++ b/lib/gitlab/ci/pipeline/chain/validate/repository.rb
@@ -18,7 +18,7 @@ module Gitlab
end
if @command.ambiguous_ref?
- return error('Ref is ambiguous')
+ error('Ref is ambiguous')
end
end
diff --git a/lib/gitlab/ci/pipeline/metrics.rb b/lib/gitlab/ci/pipeline/metrics.rb
new file mode 100644
index 00000000000..649da745eea
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/metrics.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Pipeline
+ class Metrics
+ include Gitlab::Utils::StrongMemoize
+
+ def pipeline_creation_duration_histogram
+ strong_memoize(:pipeline_creation_duration_histogram) do
+ name = :gitlab_ci_pipeline_creation_duration_seconds
+ comment = 'Pipeline creation duration'
+ labels = {}
+ buckets = [0.01, 0.05, 0.1, 0.5, 1.0, 2.0, 5.0, 20.0, 50.0, 240.0]
+
+ ::Gitlab::Metrics.histogram(name, comment, labels, buckets)
+ end
+ end
+
+ def pipeline_size_histogram
+ strong_memoize(:pipeline_size_histogram) do
+ name = :gitlab_ci_pipeline_size_builds
+ comment = 'Pipeline size'
+ labels = { source: nil }
+ buckets = [0, 1, 5, 10, 20, 50, 100, 200, 500, 1000]
+
+ ::Gitlab::Metrics.histogram(name, comment, labels, buckets)
+ end
+ end
+
+ def pipeline_processing_events_counter
+ strong_memoize(:pipeline_processing_events_counter) do
+ name = :gitlab_ci_pipeline_processing_events_total
+ comment = 'Total amount of pipeline processing events'
+
+ Gitlab::Metrics.counter(name, comment)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/preloader.rb b/lib/gitlab/ci/pipeline/preloader.rb
index db0a1ea4dab..7befc126ca9 100644
--- a/lib/gitlab/ci/pipeline/preloader.rb
+++ b/lib/gitlab/ci/pipeline/preloader.rb
@@ -17,6 +17,7 @@ module Gitlab
pipelines.each do |pipeline|
self.new(pipeline).tap do |preloader|
preloader.preload_commit_authors
+ preloader.preload_ref_commits
preloader.preload_pipeline_warnings
preloader.preload_stages_warnings
end
@@ -27,12 +28,19 @@ module Gitlab
@pipeline = pipeline
end
+ # This also preloads the author of every commit. We're using "lazy_author"
+ # here since "author" immediately loads the data on the first call.
def preload_commit_authors
- # This also preloads the author of every commit. We're using "lazy_author"
- # here since "author" immediately loads the data on the first call.
@pipeline.commit.try(:lazy_author)
end
+ # This preloads latest commits for given refs and therefore makes it
+ # much less expensive to check if a pipeline is a latest one for
+ # given branch.
+ def preload_ref_commits
+ @pipeline.lazy_ref_commit
+ end
+
def preload_pipeline_warnings
# This preloads the number of warnings for every pipeline, ensuring
# that Ci::Pipeline#has_warnings? doesn't execute any additional
@@ -40,10 +48,10 @@ module Gitlab
@pipeline.number_of_warnings
end
+ # This preloads the number of warnings for every stage, ensuring
+ # that Ci::Stage#has_warnings? doesn't execute any additional
+ # queries.
def preload_stages_warnings
- # This preloads the number of warnings for every stage, ensuring
- # that Ci::Stage#has_warnings? doesn't execute any additional
- # queries.
@pipeline.stages.each { |stage| stage.number_of_warnings }
end
end
diff --git a/lib/gitlab/ci/reports/test_report_summary.rb b/lib/gitlab/ci/reports/test_report_summary.rb
new file mode 100644
index 00000000000..85b83b790e7
--- /dev/null
+++ b/lib/gitlab/ci/reports/test_report_summary.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Reports
+ class TestReportSummary
+ attr_reader :all_results
+
+ def initialize(all_results)
+ @all_results = all_results
+ end
+
+ def total
+ TestSuiteSummary.new(all_results)
+ end
+
+ def total_time
+ total.total_time
+ end
+
+ def total_count
+ total.total_count
+ end
+
+ def success_count
+ total.success_count
+ end
+
+ def failed_count
+ total.failed_count
+ end
+
+ def skipped_count
+ total.skipped_count
+ end
+
+ def error_count
+ total.error_count
+ end
+
+ def test_suites
+ all_results
+ .group_by(&:tests_name)
+ .transform_values { |results| TestSuiteSummary.new(results) }
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/reports/test_suite.rb b/lib/gitlab/ci/reports/test_suite.rb
index 8bbf2e0f6cf..28b81e7a471 100644
--- a/lib/gitlab/ci/reports/test_suite.rb
+++ b/lib/gitlab/ci/reports/test_suite.rb
@@ -4,9 +4,9 @@ module Gitlab
module Ci
module Reports
class TestSuite
- attr_reader :name
- attr_reader :test_cases
- attr_reader :total_time
+ attr_accessor :name
+ attr_accessor :test_cases
+ attr_accessor :total_time
attr_reader :suite_error
def initialize(name = nil)
@@ -70,6 +70,14 @@ module Gitlab
@suite_error = msg
end
+ def +(other)
+ self.class.new.tap do |test_suite|
+ test_suite.name = self.name
+ test_suite.test_cases = self.test_cases.deep_merge(other.test_cases)
+ test_suite.total_time = self.total_time + other.total_time
+ end
+ end
+
private
def existing_key?(test_case)
diff --git a/lib/gitlab/ci/reports/test_suite_summary.rb b/lib/gitlab/ci/reports/test_suite_summary.rb
new file mode 100644
index 00000000000..f9b0bedb712
--- /dev/null
+++ b/lib/gitlab/ci/reports/test_suite_summary.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Reports
+ class TestSuiteSummary
+ attr_reader :results
+
+ def initialize(results)
+ @results = results
+ end
+
+ def name
+ @name ||= results.first.tests_name
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def build_ids
+ results.pluck(:build_id)
+ end
+
+ def total_time
+ @total_time ||= results.sum(&:tests_duration)
+ end
+
+ def success_count
+ @success_count ||= results.sum(&:tests_success)
+ end
+
+ def failed_count
+ @failed_count ||= results.sum(&:tests_failed)
+ end
+
+ def skipped_count
+ @skipped_count ||= results.sum(&:tests_skipped)
+ end
+
+ def error_count
+ @error_count ||= results.sum(&:tests_errored)
+ end
+
+ def total_count
+ @total_count ||= [success_count, failed_count, skipped_count, error_count].sum
+ end
+ # rubocop: disable CodeReuse/ActiveRecord
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/status/composite.rb b/lib/gitlab/ci/status/composite.rb
index 074651f1040..04a9fc29802 100644
--- a/lib/gitlab/ci/status/composite.rb
+++ b/lib/gitlab/ci/status/composite.rb
@@ -112,13 +112,13 @@ module Gitlab
def success_with_warnings?(status)
@allow_failure_key &&
status[@allow_failure_key] &&
- HasStatus::PASSED_WITH_WARNINGS_STATUSES.include?(status[@status_key])
+ ::Ci::HasStatus::PASSED_WITH_WARNINGS_STATUSES.include?(status[@status_key])
end
def ignored_status?(status)
@allow_failure_key &&
status[@allow_failure_key] &&
- HasStatus::EXCLUDE_IGNORED_STATUSES.include?(status[@status_key])
+ ::Ci::HasStatus::EXCLUDE_IGNORED_STATUSES.include?(status[@status_key])
end
end
end
diff --git a/lib/gitlab/ci/status/factory.rb b/lib/gitlab/ci/status/factory.rb
index 73c73a3b3fc..4a384531c57 100644
--- a/lib/gitlab/ci/status/factory.rb
+++ b/lib/gitlab/ci/status/factory.rb
@@ -7,7 +7,7 @@ module Gitlab
def initialize(subject, user)
@subject = subject
@user = user
- @status = subject.status || HasStatus::DEFAULT_STATUS
+ @status = subject.status || ::Ci::HasStatus::DEFAULT_STATUS
end
def fabricate!
diff --git a/lib/gitlab/ci/status/stage/play_manual.rb b/lib/gitlab/ci/status/stage/play_manual.rb
index ac3fc0912fa..58859a8f191 100644
--- a/lib/gitlab/ci/status/stage/play_manual.rb
+++ b/lib/gitlab/ci/status/stage/play_manual.rb
@@ -18,7 +18,7 @@ module Gitlab
def action_path
pipeline = subject.pipeline
- project_stage_play_manual_path(pipeline.project, pipeline, subject.name)
+ project_pipeline_stage_play_manual_path(pipeline.project, pipeline, subject.name)
end
def action_method
diff --git a/lib/gitlab/ci/templates/Android-Fastlane.gitlab-ci.yml b/lib/gitlab/ci/templates/Android-Fastlane.gitlab-ci.yml
index be584814271..5ebbbf15682 100644
--- a/lib/gitlab/ci/templates/Android-Fastlane.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Android-Fastlane.gitlab-ci.yml
@@ -20,7 +20,7 @@ stages:
- docker:dind
script:
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
- - docker pull $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG || true
+ - docker pull --quiet $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG || true
- docker build --cache-from $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG -t $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG .
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
diff --git a/lib/gitlab/ci/templates/Android.gitlab-ci.yml b/lib/gitlab/ci/templates/Android.gitlab-ci.yml
index b7194110002..d20dabc0b00 100644
--- a/lib/gitlab/ci/templates/Android.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Android.gitlab-ci.yml
@@ -4,32 +4,65 @@
image: openjdk:8-jdk
variables:
- ANDROID_COMPILE_SDK: "28"
- ANDROID_BUILD_TOOLS: "28.0.2"
- ANDROID_SDK_TOOLS: "4333796"
+ # ANDROID_COMPILE_SDK is the version of Android you're compiling with.
+ # It should match compileSdkVersion.
+ ANDROID_COMPILE_SDK: "29"
+
+ # ANDROID_BUILD_TOOLS is the version of the Android build tools you are using.
+ # It should match buildToolsVersion.
+ ANDROID_BUILD_TOOLS: "29.0.3"
+
+ # It's what version of the command line tools we're going to download from the official site.
+ # Official Site-> https://developer.android.com/studio/index.html
+ # There, look down below at the cli tools only, sdk tools package is of format:
+ # commandlinetools-os_type-ANDROID_SDK_TOOLS_latest.zip
+ # when the script was last modified for latest compileSdkVersion, it was which is written down below
+ ANDROID_SDK_TOOLS: "6514223"
+
+# Packages installation before running script
before_script:
- apt-get --quiet update --yes
- apt-get --quiet install --yes wget tar unzip lib32stdc++6 lib32z1
- - wget --quiet --output-document=android-sdk.zip https://dl.google.com/android/repository/sdk-tools-linux-${ANDROID_SDK_TOOLS}.zip
- - unzip -d android-sdk-linux android-sdk.zip
- - echo y | android-sdk-linux/tools/bin/sdkmanager "platforms;android-${ANDROID_COMPILE_SDK}" >/dev/null
- - echo y | android-sdk-linux/tools/bin/sdkmanager "platform-tools" >/dev/null
- - echo y | android-sdk-linux/tools/bin/sdkmanager "build-tools;${ANDROID_BUILD_TOOLS}" >/dev/null
- - export ANDROID_HOME=$PWD/android-sdk-linux
- - export PATH=$PATH:$PWD/android-sdk-linux/platform-tools/
+
+ # Setup path as android_home for moving/exporting the downloaded sdk into it
+ - export ANDROID_HOME="${PWD}/android-home"
+ # Create a new directory at specified location
+ - install -d $ANDROID_HOME
+ # Here we are installing androidSDK tools from official source,
+ # (the key thing here is the url from where you are downloading these sdk tool for command line, so please do note this url pattern there and here as well)
+ # after that unzipping those tools and
+ # then running a series of SDK manager commands to install necessary android SDK packages that'll allow the app to build
+ - wget --output-document=$ANDROID_HOME/cmdline-tools.zip https://dl.google.com/android/repository/commandlinetools-linux-${ANDROID_SDK_TOOLS}_latest.zip
+ # move to the archive at ANDROID_HOME
+ - pushd $ANDROID_HOME
+ - unzip -d cmdline-tools cmdline-tools.zip
+ - popd
+ - export PATH=$PATH:${ANDROID_HOME}/cmdline-tools/tools/bin/
+
+ # Nothing fancy here, just checking sdkManager version
+ - sdkmanager --version
+
+ # use yes to accept all licenses
+ - yes | sdkmanager --sdk_root=${ANDROID_HOME} --licenses || true
+ - sdkmanager --sdk_root=${ANDROID_HOME} "platforms;android-${ANDROID_COMPILE_SDK}"
+ - sdkmanager --sdk_root=${ANDROID_HOME} "platform-tools"
+ - sdkmanager --sdk_root=${ANDROID_HOME} "build-tools;${ANDROID_BUILD_TOOLS}"
+
+ # Not necessary, but just for surity
- chmod +x ./gradlew
- # temporarily disable checking for EPIPE error and use yes to accept all licenses
- - set +o pipefail
- - yes | android-sdk-linux/tools/bin/sdkmanager --licenses
- - set -o pipefail
+# Basic android and gradle stuff
+# Check linting
lintDebug:
+ interruptible: true
stage: build
script:
- ./gradlew -Pci --console=plain :app:lintDebug -PbuildDir=lint
+# Make Project
assembleDebug:
+ interruptible: true
stage: build
script:
- ./gradlew assembleDebug
@@ -37,7 +70,9 @@ assembleDebug:
paths:
- app/build/outputs/
+# Run all tests, if any fails, interrupt the pipeline(fail it)
debugTests:
+ interruptible: true
stage: test
script:
- ./gradlew -Pci --console=plain :app:testDebug
diff --git a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
index e37cd14d1d1..c10d87a537b 100644
--- a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
@@ -12,6 +12,7 @@
# * code_quality: CODE_QUALITY_DISABLED
# * license_management: LICENSE_MANAGEMENT_DISABLED
# * performance: PERFORMANCE_DISABLED
+# * load_performance: LOAD_PERFORMANCE_DISABLED
# * sast: SAST_DISABLED
# * secret_detection: SECRET_DETECTION_DISABLED
# * dependency_scanning: DEPENDENCY_SCANNING_DISABLED
@@ -74,7 +75,7 @@ stages:
workflow:
rules:
- - if: '$BUILDPACK_URL || $AUTO_DEVOPS_EXPLICITLY_ENABLED == "1"'
+ - if: '$BUILDPACK_URL || $AUTO_DEVOPS_EXPLICITLY_ENABLED == "1" || $DOCKERFILE_PATH'
- exists:
- Dockerfile
diff --git a/lib/gitlab/ci/templates/Composer.gitlab-ci.yml b/lib/gitlab/ci/templates/Composer.gitlab-ci.yml
new file mode 100644
index 00000000000..5d9c68d3031
--- /dev/null
+++ b/lib/gitlab/ci/templates/Composer.gitlab-ci.yml
@@ -0,0 +1,19 @@
+# Publishes a tag/branch to Composer Packages of the current project
+publish:
+ image: curlimages/curl:latest
+ stage: build
+ variables:
+ URL: "$CI_SERVER_PROTOCOL://$CI_SERVER_HOST:$CI_SERVER_PORT/api/v4/projects/$CI_PROJECT_ID/packages/composer?job_token=$CI_JOB_TOKEN"
+ script:
+ - version=$([[ -z "$CI_COMMIT_TAG" ]] && echo "branch=$CI_COMMIT_REF_NAME" || echo "tag=$CI_COMMIT_TAG")
+ - insecure=$([ "$CI_SERVER_PROTOCOL" = "http" ] && echo "--insecure" || echo "")
+ - response=$(curl -s -w "\n%{http_code}" $insecure --data $version $URL)
+ - code=$(echo "$response" | tail -n 1)
+ - body=$(echo "$response" | head -n 1)
+ # Output state information
+ - if [ $code -eq 201 ]; then
+ echo "Package created - Code $code - $body";
+ else
+ echo "Could not create package - Code $code - $body";
+ exit 1;
+ fi
diff --git a/lib/gitlab/ci/templates/Dart.gitlab-ci.yml b/lib/gitlab/ci/templates/Dart.gitlab-ci.yml
new file mode 100644
index 00000000000..cc383f89b0c
--- /dev/null
+++ b/lib/gitlab/ci/templates/Dart.gitlab-ci.yml
@@ -0,0 +1,22 @@
+# https://hub.docker.com/r/google/dart
+image: google/dart:2.8.4
+
+variables:
+ # Use to learn more:
+ # pub run test --help
+ PUB_VARS: "--platform vm --timeout 30s --concurrency=6 --test-randomize-ordering-seed=random --reporter=expanded"
+
+# Cache downloaded dependencies and plugins between builds.
+# To keep cache across branches add 'key: "$CI_JOB_NAME"'
+cache:
+ paths:
+ - .pub-cache/global_packages
+
+before_script:
+ - export PATH="$PATH":"~/.pub-cache/bin"
+ - pub get --no-precompile
+
+test:
+ stage: test
+ script:
+ - pub run test $PUB_VARS
diff --git a/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
index 9a34f8cb113..8553a940bd7 100644
--- a/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
@@ -1,11 +1,16 @@
+# Read more about the feature here: https://docs.gitlab.com/ee/user/project/merge_requests/browser_performance_testing.html
+
performance:
stage: performance
- image: docker:19.03.11
+ image: docker:19.03.12
allow_failure: true
variables:
DOCKER_TLS_CERTDIR: ""
+ SITESPEED_IMAGE: sitespeedio/sitespeed.io
+ SITESPEED_VERSION: 13.3.0
+ SITESPEED_OPTIONS: ''
services:
- - docker:19.03.11-dind
+ - docker:19.03.12-dind
script:
- |
if ! docker info &>/dev/null; then
@@ -15,21 +20,22 @@ performance:
fi
- export CI_ENVIRONMENT_URL=$(cat environment_url.txt)
- mkdir gitlab-exporter
- - wget -O gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/1.0.0/index.js
+ - wget -O gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/1.0.1/index.js
- mkdir sitespeed-results
- |
if [ -f .gitlab-urls.txt ]
then
sed -i -e 's@^@'"$CI_ENVIRONMENT_URL"'@' .gitlab-urls.txt
- docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:11.2.0 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results .gitlab-urls.txt
+ docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io $SITESPEED_IMAGE:$SITESPEED_VERSION --plugins.add ./gitlab-exporter --outputFolder sitespeed-results .gitlab-urls.txt $SITESPEED_OPTIONS
else
- docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:11.2.0 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results "$CI_ENVIRONMENT_URL"
+ docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io $SITESPEED_IMAGE:$SITESPEED_VERSION --plugins.add ./gitlab-exporter --outputFolder sitespeed-results "$CI_ENVIRONMENT_URL" $SITESPEED_OPTIONS
fi
- - mv sitespeed-results/data/performance.json performance.json
+ - mv sitespeed-results/data/performance.json browser-performance.json
artifacts:
paths:
- - performance.json
- sitespeed-results/
+ reports:
+ browser_performance: browser-performance.json
rules:
- if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
when: never
diff --git a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml
index b5550461482..dbe870953ae 100644
--- a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml
@@ -1,10 +1,10 @@
build:
stage: build
- image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-build-image:v0.2.3"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-build-image:v0.3.1"
variables:
DOCKER_TLS_CERTDIR: ""
services:
- - docker:19.03.11-dind
+ - docker:19.03.12-dind
script:
- |
if [[ -z "$CI_COMMIT_TAG" ]]; then
diff --git a/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml
index bde6f185d3a..6b76d7e0c9b 100644
--- a/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml
@@ -1,13 +1,14 @@
code_quality:
stage: test
- image: docker:19.03.11
+ image: docker:19.03.12
allow_failure: true
services:
- - docker:19.03.11-dind
+ - docker:19.03.12-dind
variables:
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
- CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/ci-cd/codequality:0.85.9"
+ CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/ci-cd/codequality:0.85.10"
+ needs: []
script:
- |
if ! docker info &>/dev/null; then
diff --git a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
index bab4fae67f0..d7d927ac8ee 100644
--- a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml
@@ -1,5 +1,5 @@
.dast-auto-deploy:
- image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.17.0"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.17.2"
dast_environment_deploy:
extends: .dast-auto-deploy
@@ -51,3 +51,4 @@ stop_dast_environment:
- if: $CI_COMMIT_BRANCH &&
$CI_KUBERNETES_ACTIVE &&
$GITLAB_FEATURES =~ /\bdast\b/
+ when: always
diff --git a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
index 97b5f3fd7f5..66c60e85892 100644
--- a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
@@ -1,5 +1,6 @@
.auto-deploy:
- image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.17.0"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.17.2"
+ dependencies: []
include:
- template: Jobs/Deploy/ECS.gitlab-ci.yml
@@ -20,7 +21,8 @@ review:
url: http://$CI_PROJECT_ID-$CI_ENVIRONMENT_SLUG.$KUBE_INGRESS_BASE_DOMAIN
on_stop: stop_review
artifacts:
- paths: [environment_url.txt]
+ paths: [environment_url.txt, tiller.log]
+ when: always
rules:
- if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
when: never
@@ -41,7 +43,6 @@ stop_review:
environment:
name: review/$CI_COMMIT_REF_NAME
action: stop
- dependencies: []
allow_failure: true
rules:
- if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
@@ -122,7 +123,8 @@ canary:
name: production
url: http://$CI_PROJECT_PATH_SLUG.$KUBE_INGRESS_BASE_DOMAIN
artifacts:
- paths: [environment_url.txt]
+ paths: [environment_url.txt, tiller.log]
+ when: always
production:
<<: *production_template
@@ -172,7 +174,8 @@ production_manual:
name: production
url: http://$CI_PROJECT_PATH_SLUG.$KUBE_INGRESS_BASE_DOMAIN
artifacts:
- paths: [environment_url.txt]
+ paths: [environment_url.txt, tiller.log]
+ when: always
.manual_rollout_template: &manual_rollout_template
<<: *rollout_template
diff --git a/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml
index bb3d5526f3a..da474f8ac88 100644
--- a/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml
@@ -13,11 +13,20 @@
script:
- ecs update-task-definition
-review_ecs:
- extends: .deploy_to_ecs
+.review_ecs_base:
stage: review
+ extends: .deploy_to_ecs
environment:
name: review/$CI_COMMIT_REF_NAME
+
+.production_ecs_base:
+ stage: production
+ extends: .deploy_to_ecs
+ environment:
+ name: production
+
+review_ecs:
+ extends: .review_ecs_base
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "ECS"'
when: never
@@ -29,11 +38,21 @@ review_ecs:
when: never
- if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
+review_fargate:
+ extends: .review_ecs_base
+ rules:
+ - if: '$AUTO_DEVOPS_PLATFORM_TARGET != "FARGATE"'
+ when: never
+ - if: '$CI_KUBERNETES_ACTIVE'
+ when: never
+ - if: '$REVIEW_DISABLED'
+ when: never
+ - if: '$CI_COMMIT_BRANCH == "master"'
+ when: never
+ - if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
+
production_ecs:
- extends: .deploy_to_ecs
- stage: production
- environment:
- name: production
+ extends: .production_ecs_base
rules:
- if: '$AUTO_DEVOPS_PLATFORM_TARGET != "ECS"'
when: never
@@ -42,3 +61,14 @@ production_ecs:
- if: '$CI_COMMIT_BRANCH != "master"'
when: never
- if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
+
+production_fargate:
+ extends: .production_ecs_base
+ rules:
+ - if: '$AUTO_DEVOPS_PLATFORM_TARGET != "FARGATE"'
+ when: never
+ - if: '$CI_KUBERNETES_ACTIVE'
+ when: never
+ - if: '$CI_COMMIT_BRANCH != "master"'
+ when: never
+ - if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
diff --git a/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml
new file mode 100644
index 00000000000..b437ddbd734
--- /dev/null
+++ b/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml
@@ -0,0 +1,29 @@
+load_performance:
+ stage: performance
+ image: docker:19.03.11
+ allow_failure: true
+ variables:
+ DOCKER_TLS_CERTDIR: ""
+ K6_IMAGE: loadimpact/k6
+ K6_VERSION: 0.26.2
+ K6_TEST_FILE: github.com/loadimpact/k6/samples/http_get.js
+ K6_OPTIONS: ''
+ services:
+ - docker:19.03.11-dind
+ script:
+ - |
+ if ! docker info &>/dev/null; then
+ if [ -z "$DOCKER_HOST" -a "$KUBERNETES_PORT" ]; then
+ export DOCKER_HOST='tcp://localhost:2375'
+ fi
+ fi
+ - docker run --rm -v "$(pwd)":/k6 -w /k6 $K6_IMAGE:$K6_VERSION run $K6_TEST_FILE --summary-export=load-performance.json $K6_OPTIONS
+ artifacts:
+ reports:
+ load_performance: load-performance.json
+ rules:
+ - if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
+ when: never
+ - if: '$LOAD_PERFORMANCE_DISABLED'
+ when: never
+ - if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH'
diff --git a/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml b/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml
index 316647b5921..3d0bacda853 100644
--- a/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Managed-Cluster-Applications.gitlab-ci.yml
@@ -1,6 +1,6 @@
apply:
stage: deploy
- image: "registry.gitlab.com/gitlab-org/cluster-integration/cluster-applications:v0.20.0"
+ image: "registry.gitlab.com/gitlab-org/cluster-integration/cluster-applications:v0.24.2"
environment:
name: production
variables:
diff --git a/lib/gitlab/ci/templates/Security/Coverage-Fuzzing.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Coverage-Fuzzing.gitlab-ci.yml
new file mode 100644
index 00000000000..2fab8b95a3d
--- /dev/null
+++ b/lib/gitlab/ci/templates/Security/Coverage-Fuzzing.gitlab-ci.yml
@@ -0,0 +1,34 @@
+# Read more about this feature https://docs.gitlab.com/ee/user/application_security/coverage_fuzzing
+
+variables:
+ # Which branch we want to run full fledged long running fuzzing jobs.
+ # All others will run fuzzing regression
+ COVERAGE_FUZZING_BRANCH: "$CI_DEFAULT_BRANCH"
+ # This is using semantic version and will always download latest v1 gitlab-cov-fuzz release
+ COVERAGE_FUZZING_VERSION: v1
+ # This is for users who have an offline environment and will have to replicate gitlab-cov-fuzz release binaries
+ # to their own servers
+ COVERAGE_FUZZING_URL_PREFIX: "https://gitlab.com/gitlab-org/security-products/analyzers/gitlab-cov-fuzz/-/raw"
+
+.fuzz_base:
+ stage: fuzz
+ allow_failure: true
+ before_script:
+ - if [ -x "$(command -v apt-get)" ] ; then apt-get update && apt-get install -y wget; fi
+ - wget -O gitlab-cov-fuzz "${COVERAGE_FUZZING_URL_PREFIX}"/"${COVERAGE_FUZZING_VERSION}"/binaries/gitlab-cov-fuzz_Linux_x86_64
+ - chmod a+x gitlab-cov-fuzz
+ - export REGRESSION=true
+ - if [[ $CI_COMMIT_BRANCH = $COVERAGE_FUZZING_BRANCH ]]; then REGRESSION=false; fi;
+ artifacts:
+ paths:
+ - corpus
+ - crashes
+ - gl-coverage-fuzzing-report.json
+ reports:
+ coverage_fuzzing: gl-coverage-fuzzing-report.json
+ when: always
+ rules:
+ - if: $COVERAGE_FUZZING_DISABLED
+ when: never
+ - if: $GITLAB_FEATURES =~ /\bcoverage_fuzzing\b/
+ - if: $CI_RUNNER_EXECUTABLE_ARCH == "linux"
diff --git a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
index 07399216597..7abecfb7e49 100644
--- a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
@@ -41,4 +41,11 @@ dast:
$DAST_API_SPECIFICATION == null
when: never
- if: $CI_COMMIT_BRANCH &&
+ $CI_KUBERNETES_ACTIVE &&
$GITLAB_FEATURES =~ /\bdast\b/
+ - if: $CI_COMMIT_BRANCH &&
+ $GITLAB_FEATURES =~ /\bdast\b/ &&
+ $DAST_WEBSITE
+ - if: $CI_COMMIT_BRANCH &&
+ $GITLAB_FEATURES =~ /\bdast\b/ &&
+ $DAST_API_SPECIFICATION
diff --git a/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
index fa8ccb7cf93..37f6cd216ca 100644
--- a/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
@@ -9,9 +9,6 @@ variables:
# (SAST, Dependency Scanning, ...)
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
- # Deprecated, use SECURE_ANALYZERS_PREFIX instead
- DS_ANALYZER_IMAGE_PREFIX: "$SECURE_ANALYZERS_PREFIX"
-
DS_DEFAULT_ANALYZERS: "bundler-audit, retire.js, gemnasium, gemnasium-maven, gemnasium-python"
DS_EXCLUDED_PATHS: "spec, test, tests, tmp"
DS_MAJOR_VERSION: 2
@@ -45,7 +42,7 @@ dependency_scanning:
docker run \
$(propagate_env_vars \
DS_ANALYZER_IMAGES \
- DS_ANALYZER_IMAGE_PREFIX \
+ SECURE_ANALYZERS_PREFIX \
DS_ANALYZER_IMAGE_TAG \
DS_DEFAULT_ANALYZERS \
DS_EXCLUDED_PATHS \
@@ -55,6 +52,7 @@ dependency_scanning:
DS_PYTHON_VERSION \
DS_PIP_VERSION \
DS_PIP_DEPENDENCY_PATH \
+ DS_JAVA_VERSION \
GEMNASIUM_DB_LOCAL_PATH \
GEMNASIUM_DB_REMOTE_URL \
GEMNASIUM_DB_REF_NAME \
@@ -98,7 +96,7 @@ dependency_scanning:
gemnasium-dependency_scanning:
extends: .ds-analyzer
image:
- name: "$DS_ANALYZER_IMAGE_PREFIX/gemnasium:$DS_MAJOR_VERSION"
+ name: "$SECURE_ANALYZERS_PREFIX/gemnasium:$DS_MAJOR_VERSION"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED || $DS_DISABLE_DIND == 'false'
when: never
@@ -117,7 +115,7 @@ gemnasium-dependency_scanning:
gemnasium-maven-dependency_scanning:
extends: .ds-analyzer
image:
- name: "$DS_ANALYZER_IMAGE_PREFIX/gemnasium-maven:$DS_MAJOR_VERSION"
+ name: "$SECURE_ANALYZERS_PREFIX/gemnasium-maven:$DS_MAJOR_VERSION"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED || $DS_DISABLE_DIND == 'false'
when: never
@@ -133,7 +131,7 @@ gemnasium-maven-dependency_scanning:
gemnasium-python-dependency_scanning:
extends: .ds-analyzer
image:
- name: "$DS_ANALYZER_IMAGE_PREFIX/gemnasium-python:$DS_MAJOR_VERSION"
+ name: "$SECURE_ANALYZERS_PREFIX/gemnasium-python:$DS_MAJOR_VERSION"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED || $DS_DISABLE_DIND == 'false'
when: never
@@ -156,7 +154,7 @@ gemnasium-python-dependency_scanning:
bundler-audit-dependency_scanning:
extends: .ds-analyzer
image:
- name: "$DS_ANALYZER_IMAGE_PREFIX/bundler-audit:$DS_MAJOR_VERSION"
+ name: "$SECURE_ANALYZERS_PREFIX/bundler-audit:$DS_MAJOR_VERSION"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED || $DS_DISABLE_DIND == 'false'
when: never
@@ -169,7 +167,7 @@ bundler-audit-dependency_scanning:
retire-js-dependency_scanning:
extends: .ds-analyzer
image:
- name: "$DS_ANALYZER_IMAGE_PREFIX/retire.js:$DS_MAJOR_VERSION"
+ name: "$SECURE_ANALYZERS_PREFIX/retire.js:$DS_MAJOR_VERSION"
rules:
- if: $DEPENDENCY_SCANNING_DISABLED || $DS_DISABLE_DIND == 'false'
when: never
diff --git a/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml
index b0c75b0aab0..cc34d23decc 100644
--- a/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/License-Scanning.gitlab-ci.yml
@@ -18,15 +18,15 @@ license_scanning:
name: "$SECURE_ANALYZERS_PREFIX/license-finder:$LICENSE_MANAGEMENT_VERSION"
entrypoint: [""]
variables:
- LM_REPORT_FILE: gl-license-scanning-report.json
LM_REPORT_VERSION: '2.1'
SETUP_CMD: $LICENSE_MANAGEMENT_SETUP_CMD
allow_failure: true
+ needs: []
script:
- /run.sh analyze .
artifacts:
reports:
- license_scanning: $LM_REPORT_FILE
+ license_scanning: gl-license-scanning-report.json
dependencies: []
rules:
- if: $LICENSE_MANAGEMENT_DISABLED
diff --git a/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
index ec7b34d17b5..f0e2f48dd5c 100644
--- a/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
@@ -9,10 +9,7 @@ variables:
# (SAST, Dependency Scanning, ...)
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
- # Deprecated, use SECURE_ANALYZERS_PREFIX instead
- SAST_ANALYZER_IMAGE_PREFIX: "$SECURE_ANALYZERS_PREFIX"
-
- SAST_DEFAULT_ANALYZERS: "bandit, brakeman, gosec, spotbugs, flawfinder, phpcs-security-audit, security-code-scan, nodejs-scan, eslint, tslint, secrets, sobelow, pmd-apex, kubesec"
+ SAST_DEFAULT_ANALYZERS: "bandit, brakeman, gosec, spotbugs, flawfinder, phpcs-security-audit, security-code-scan, nodejs-scan, eslint, secrets, sobelow, pmd-apex, kubesec"
SAST_EXCLUDED_PATHS: "spec, test, tests, tmp"
SAST_ANALYZER_IMAGE_TAG: 2
SAST_DISABLE_DIND: "true"
@@ -63,7 +60,7 @@ sast:
bandit-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/bandit:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/bandit:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -76,7 +73,7 @@ bandit-sast:
brakeman-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/brakeman:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/brakeman:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -88,21 +85,23 @@ brakeman-sast:
eslint-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/eslint:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/eslint:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
- if: $CI_COMMIT_BRANCH &&
- $GITLAB_FEATURES =~ /\bsast\b/ &&
$SAST_DEFAULT_ANALYZERS =~ /eslint/
exists:
- '**/*.html'
- '**/*.js'
+ - '**/*.jsx'
+ - '**/*.ts'
+ - '**/*.tsx'
flawfinder-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/flawfinder:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/flawfinder:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -116,7 +115,7 @@ flawfinder-sast:
kubesec-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/kubesec:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/kubesec:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -128,7 +127,7 @@ kubesec-sast:
gosec-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/gosec:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/gosec:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -141,7 +140,7 @@ gosec-sast:
nodejs-scan-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/nodejs-scan:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/nodejs-scan:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -154,7 +153,7 @@ nodejs-scan-sast:
phpcs-security-audit-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/phpcs-security-audit:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/phpcs-security-audit:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -167,7 +166,7 @@ phpcs-security-audit-sast:
pmd-apex-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/pmd-apex:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/pmd-apex:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -180,7 +179,7 @@ pmd-apex-sast:
secrets-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/secrets:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/secrets:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -191,7 +190,7 @@ secrets-sast:
security-code-scan-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/security-code-scan:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/security-code-scan:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -205,7 +204,7 @@ security-code-scan-sast:
sobelow-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/sobelow:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/sobelow:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -218,7 +217,7 @@ sobelow-sast:
spotbugs-sast:
extends: .sast-analyzer
image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/spotbugs:$SAST_ANALYZER_IMAGE_TAG"
+ name: "$SECURE_ANALYZERS_PREFIX/spotbugs:$SAST_ANALYZER_IMAGE_TAG"
rules:
- if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
when: never
@@ -229,16 +228,3 @@ spotbugs-sast:
- '**/*.groovy'
- '**/*.java'
- '**/*.scala'
-
-tslint-sast:
- extends: .sast-analyzer
- image:
- name: "$SAST_ANALYZER_IMAGE_PREFIX/tslint:$SAST_ANALYZER_IMAGE_TAG"
- rules:
- - if: $SAST_DISABLED || $SAST_DISABLE_DIND == 'false'
- when: never
- - if: $CI_COMMIT_BRANCH &&
- $GITLAB_FEATURES =~ /\bsast\b/ &&
- $SAST_DEFAULT_ANALYZERS =~ /tslint/
- exists:
- - '**/*.ts'
diff --git a/lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml
index e18f89cadd7..441a57048e1 100644
--- a/lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Secret-Detection.gitlab-ci.yml
@@ -8,17 +8,33 @@ variables:
SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers"
SECRETS_ANALYZER_VERSION: "3"
-secret_detection:
+.secret-analyzer:
stage: test
image: "$SECURE_ANALYZERS_PREFIX/secrets:$SECRETS_ANALYZER_VERSION"
services: []
- rules:
- - if: $SECRET_DETECTION_DISABLED
- when: never
- - if: $CI_COMMIT_BRANCH && $GITLAB_FEATURES =~ /\bsecret_detection\b/
- when: on_success
artifacts:
reports:
secret_detection: gl-secret-detection-report.json
+
+secret_detection_default_branch:
+ extends: .secret-analyzer
+ rules:
+ - if: $SECRET_DETECTION_DISABLED
+ when: never
+ - if: $CI_DEFAULT_BRANCH == $CI_COMMIT_BRANCH &&
+ $GITLAB_FEATURES =~ /\bsecret_detection\b/
+ script:
+ - /analyzer run
+
+secret_detection:
+ extends: .secret-analyzer
+ rules:
+ - if: $SECRET_DETECTION_DISABLED
+ when: never
+ - if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH &&
+ $GITLAB_FEATURES =~ /\bsecret_detection\b/
script:
+ - git fetch origin $CI_DEFAULT_BRANCH $CI_BUILD_REF_NAME
+ - export SECRET_DETECTION_COMMIT_TO=$(git log --left-right --cherry-pick --pretty=format:"%H" refs/remotes/origin/$CI_DEFAULT_BRANCH...refs/remotes/origin/$CI_BUILD_REF_NAME | tail -n 1)
+ - export SECRET_DETECTION_COMMIT_FROM=$CI_COMMIT_SHA
- /analyzer run
diff --git a/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml
index b6c05c61db1..2d2e0859373 100644
--- a/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml
@@ -13,7 +13,7 @@
variables:
SECURE_BINARIES_ANALYZERS: >-
- bandit, brakeman, gosec, spotbugs, flawfinder, phpcs-security-audit, security-code-scan, nodejs-scan, eslint, tslint, secrets, sobelow, pmd-apex, kubesec,
+ bandit, brakeman, gosec, spotbugs, flawfinder, phpcs-security-audit, security-code-scan, nodejs-scan, eslint, secrets, sobelow, pmd-apex, kubesec,
bundler-audit, retire.js, gemnasium, gemnasium-maven, gemnasium-python,
klar, clair-vulnerabilities-db,
license-finder,
@@ -40,7 +40,7 @@ variables:
- docker info
- env
- if [ -z "$SECURE_BINARIES_IMAGE" ]; then export SECURE_BINARIES_IMAGE=${SECURE_BINARIES_IMAGE:-"registry.gitlab.com/gitlab-org/security-products/analyzers/${CI_JOB_NAME}:${SECURE_BINARIES_ANALYZER_VERSION}"}; fi
- - docker pull ${SECURE_BINARIES_IMAGE}
+ - docker pull --quiet ${SECURE_BINARIES_IMAGE}
- mkdir -p output/$(dirname ${CI_JOB_NAME})
- |
if [ "$SECURE_BINARIES_SAVE_ARTIFACTS" = "true" ]; then
@@ -125,13 +125,6 @@ eslint:
- $SECURE_BINARIES_DOWNLOAD_IMAGES == "true" &&
$SECURE_BINARIES_ANALYZERS =~ /\beslint\b/
-tslint:
- extends: .download_images
- only:
- variables:
- - $SECURE_BINARIES_DOWNLOAD_IMAGES == "true" &&
- $SECURE_BINARIES_ANALYZERS =~ /\btslint\b/
-
secrets:
extends: .download_images
only:
diff --git a/lib/gitlab/ci/templates/Verify/Browser-Performance.gitlab-ci.yml b/lib/gitlab/ci/templates/Verify/Browser-Performance.gitlab-ci.yml
index e6097ae322e..9dbd9b679a8 100644
--- a/lib/gitlab/ci/templates/Verify/Browser-Performance.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Verify/Browser-Performance.gitlab-ci.yml
@@ -10,8 +10,9 @@ performance:
stage: performance
image: docker:git
variables:
- URL: https://example.com
- SITESPEED_VERSION: 11.2.0
+ URL: ''
+ SITESPEED_IMAGE: sitespeedio/sitespeed.io
+ SITESPEED_VERSION: 13.3.0
SITESPEED_OPTIONS: ''
services:
- docker:stable-dind
@@ -19,11 +20,10 @@ performance:
- mkdir gitlab-exporter
- wget -O ./gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/master/index.js
- mkdir sitespeed-results
- - docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:$SITESPEED_VERSION --plugins.add ./gitlab-exporter --outputFolder sitespeed-results $URL $SITESPEED_OPTIONS
- - mv sitespeed-results/data/performance.json performance.json
+ - docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io $SITESPEED_IMAGE:$SITESPEED_VERSION --plugins.add ./gitlab-exporter --outputFolder sitespeed-results $URL $SITESPEED_OPTIONS
+ - mv sitespeed-results/data/performance.json browser-performance.json
artifacts:
paths:
- - performance.json
- sitespeed-results/
reports:
- performance: performance.json
+ browser_performance: browser-performance.json
diff --git a/lib/gitlab/ci/templates/Verify/Load-Performance-Testing.gitlab-ci.yml b/lib/gitlab/ci/templates/Verify/Load-Performance-Testing.gitlab-ci.yml
new file mode 100644
index 00000000000..d39bd234020
--- /dev/null
+++ b/lib/gitlab/ci/templates/Verify/Load-Performance-Testing.gitlab-ci.yml
@@ -0,0 +1,23 @@
+# Read more about the feature here: https://docs.gitlab.com/ee/user/project/merge_requests/load_performance_testing.html
+
+stages:
+ - build
+ - test
+ - deploy
+ - performance
+
+load_performance:
+ stage: performance
+ image: docker:git
+ variables:
+ K6_IMAGE: loadimpact/k6
+ K6_VERSION: 0.26.2
+ K6_TEST_FILE: github.com/loadimpact/k6/samples/http_get.js
+ K6_OPTIONS: ''
+ services:
+ - docker:stable-dind
+ script:
+ - docker run --rm -v "$(pwd)":/k6 -w /k6 $K6_IMAGE:$K6_VERSION run $K6_TEST_FILE --summary-export=load-performance.json $K6_OPTIONS
+ artifacts:
+ reports:
+ load_performance: load-performance.json
diff --git a/lib/gitlab/ci/templates/index.md b/lib/gitlab/ci/templates/index.md
new file mode 100644
index 00000000000..ff151dd4d1a
--- /dev/null
+++ b/lib/gitlab/ci/templates/index.md
@@ -0,0 +1,3 @@
+# Development guide for GitLab CI templates
+
+Please follow [the development guideline](../../../../doc/development/cicd/templates.md)
diff --git a/lib/gitlab/ci/templates/npm.gitlab-ci.yml b/lib/gitlab/ci/templates/npm.gitlab-ci.yml
new file mode 100644
index 00000000000..035ba52da84
--- /dev/null
+++ b/lib/gitlab/ci/templates/npm.gitlab-ci.yml
@@ -0,0 +1,59 @@
+default:
+ image: node:latest
+
+ # Validate that the repository contains a package.json and extract a few values from it.
+ before_script:
+ - |
+ if [[ ! -f package.json ]]; then
+ echo "No package.json found! A package.json file is required to publish a package to GitLab's NPM registry."
+ echo 'For more information, see https://docs.gitlab.com/ee/user/packages/npm_registry/#creating-a-project'
+ exit 1
+ fi
+ - NPM_PACKAGE_NAME=$(node -p "require('./package.json').name")
+ - NPM_PACKAGE_VERSION=$(node -p "require('./package.json').version")
+
+# Validate that the package name is properly scoped to the project's root namespace.
+# For more information, see https://docs.gitlab.com/ee/user/packages/npm_registry/#package-naming-convention
+validate_package_scope:
+ stage: build
+ script:
+ - |
+ if [[ ! $NPM_PACKAGE_NAME =~ ^@$CI_PROJECT_ROOT_NAMESPACE/ ]]; then
+ echo "Invalid package scope! Packages must be scoped in the root namespace of the project, e.g. \"@${CI_PROJECT_ROOT_NAMESPACE}/${CI_PROJECT_NAME}\""
+ echo 'For more information, see https://docs.gitlab.com/ee/user/packages/npm_registry/#package-naming-convention'
+ exit 1
+ fi
+
+# If no .npmrc if included in the repo, generate a temporary one to use during the publish step
+# that is configured to publish to GitLab's NPM registry
+create_npmrc:
+ stage: build
+ script:
+ - |
+ if [[ ! -f .npmrc ]]; then
+ echo 'No .npmrc found! Creating one now. Please review the following link for more information: https://docs.gitlab.com/ee/user/packages/npm_registry/index.html#authenticating-with-a-ci-job-token'
+
+ {
+ echo '@${CI_PROJECT_ROOT_NAMESPACE}:registry=${CI_SERVER_PROTOCOL}://${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/${CI_PROJECT_ID}/packages/npm/'
+ echo '//${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/packages/npm/:_authToken=${CI_JOB_TOKEN}'
+ echo '//${CI_SERVER_HOST}:${CI_SERVER_PORT}/api/v4/projects/${CI_PROJECT_ID}/packages/npm/:_authToken=${CI_JOB_TOKEN}'
+ } >> .npmrc
+
+ fi
+ artifacts:
+ paths:
+ - .npmrc
+
+# Publish the package. If the version in package.json has not yet been published, it will be
+# published to GitLab's NPM registry. If the version already exists, the publish command
+# will fail and the existing package will not be updated.
+publish_package:
+ stage: deploy
+ script:
+ - |
+ {
+ npm publish &&
+ echo "Successfully published version ${NPM_PACKAGE_VERSION} of ${NPM_PACKAGE_NAME} to GitLab's NPM registry: ${CI_PROJECT_URL}/-/packages"
+ } || {
+ echo "No new version of ${NPM_PACKAGE_NAME} published. This is most likely because version ${NPM_PACKAGE_VERSION} already exists in GitLab's NPM registry."
+ }
diff --git a/lib/gitlab/ci/yaml_processor.rb b/lib/gitlab/ci/yaml_processor.rb
index 6a9b7b2fc85..8cf355bbfc1 100644
--- a/lib/gitlab/ci/yaml_processor.rb
+++ b/lib/gitlab/ci/yaml_processor.rb
@@ -3,15 +3,33 @@
module Gitlab
module Ci
class YamlProcessor
- ValidationError = Class.new(StandardError)
+ # ValidationError is treated like a result object in the form of an exception.
+ # We can return any warnings, raised during the config validation, along with
+ # the error object until we support multiple messages to be returned.
+ class ValidationError < StandardError
+ attr_reader :warnings
+
+ def initialize(message, warnings: [])
+ @warnings = warnings
+ super(message)
+ end
+ end
include Gitlab::Config::Entry::LegacyValidationHelpers
attr_reader :stages, :jobs
- ResultWithErrors = Struct.new(:content, :errors) do
+ class Result
+ attr_reader :config, :errors, :warnings
+
+ def initialize(config: nil, errors: [], warnings: [])
+ @config = config
+ @errors = errors
+ @warnings = warnings
+ end
+
def valid?
- errors.empty?
+ config.present? && errors.empty?
end
end
@@ -20,24 +38,32 @@ module Gitlab
@config = @ci_config.to_hash
unless @ci_config.valid?
- raise ValidationError, @ci_config.errors.first
+ error!(@ci_config.errors.first)
end
initial_parsing
rescue Gitlab::Ci::Config::ConfigError => e
- raise ValidationError, e.message
+ error!(e.message)
end
def self.new_with_validation_errors(content, opts = {})
- return ResultWithErrors.new('', ['Please provide content of .gitlab-ci.yml']) if content.blank?
+ return Result.new(errors: ['Please provide content of .gitlab-ci.yml']) if content.blank?
config = Gitlab::Ci::Config.new(content, **opts)
- return ResultWithErrors.new("", config.errors) unless config.valid?
+ return Result.new(errors: config.errors, warnings: config.warnings) unless config.valid?
config = Gitlab::Ci::YamlProcessor.new(content, opts)
- ResultWithErrors.new(config, [])
- rescue ValidationError, Gitlab::Ci::Config::ConfigError => e
- ResultWithErrors.new('', [e.message])
+ Result.new(config: config, warnings: config.warnings)
+
+ rescue ValidationError => e
+ Result.new(errors: [e.message], warnings: e.warnings)
+
+ rescue Gitlab::Ci::Config::ConfigError => e
+ Result.new(errors: [e.message])
+ end
+
+ def warnings
+ @ci_config&.warnings || []
end
def builds
@@ -66,6 +92,7 @@ module Gitlab
cache: job[:cache],
resource_group_key: job[:resource_group],
scheduling_type: job[:scheduling_type],
+ secrets: job[:secrets],
options: {
image: job[:image],
services: job[:services],
@@ -157,10 +184,14 @@ module Gitlab
return unless job[:stage]
unless job[:stage].is_a?(String) && job[:stage].in?(@stages)
- raise ValidationError, "#{name} job: chosen stage does not exist; available stages are #{@stages.join(", ")}"
+ error!("#{name} job: chosen stage does not exist; available stages are #{@stages.join(", ")}")
end
end
+ def error!(message)
+ raise ValidationError.new(message, warnings: warnings)
+ end
+
def validate_job_dependencies!(name, job)
return unless job[:dependencies]
@@ -190,7 +221,7 @@ module Gitlab
def validate_job_dependency!(name, dependency, dependency_type = 'dependency')
unless @jobs[dependency.to_sym]
- raise ValidationError, "#{name} job: undefined #{dependency_type}: #{dependency}"
+ error!("#{name} job: undefined #{dependency_type}: #{dependency}")
end
job_stage_index = stage_index(name)
@@ -199,7 +230,7 @@ module Gitlab
# A dependency might be defined later in the configuration
# with a stage that does not exist
unless dependency_stage_index.present? && dependency_stage_index < job_stage_index
- raise ValidationError, "#{name} job: #{dependency_type} #{dependency} is not defined in prior stages"
+ error!("#{name} job: #{dependency_type} #{dependency} is not defined in prior stages")
end
end
@@ -221,19 +252,19 @@ module Gitlab
on_stop_job = @jobs[on_stop.to_sym]
unless on_stop_job
- raise ValidationError, "#{name} job: on_stop job #{on_stop} is not defined"
+ error!("#{name} job: on_stop job #{on_stop} is not defined")
end
unless on_stop_job[:environment]
- raise ValidationError, "#{name} job: on_stop job #{on_stop} does not have environment defined"
+ error!("#{name} job: on_stop job #{on_stop} does not have environment defined")
end
unless on_stop_job[:environment][:name] == environment[:name]
- raise ValidationError, "#{name} job: on_stop job #{on_stop} have different environment name"
+ error!("#{name} job: on_stop job #{on_stop} have different environment name")
end
unless on_stop_job[:environment][:action] == 'stop'
- raise ValidationError, "#{name} job: on_stop job #{on_stop} needs to have action stop defined"
+ error!("#{name} job: on_stop job #{on_stop} needs to have action stop defined")
end
end
end
diff --git a/lib/gitlab/class_attributes.rb b/lib/gitlab/class_attributes.rb
new file mode 100644
index 00000000000..6560c97b2e6
--- /dev/null
+++ b/lib/gitlab/class_attributes.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ClassAttributes
+ extend ActiveSupport::Concern
+
+ class_methods do
+ protected
+
+ # Returns an attribute declared on this class or its parent class.
+ # This approach allows declared attributes to be inherited by
+ # child classes.
+ def get_class_attribute(name)
+ class_attributes[name] || superclass_attributes(name)
+ end
+
+ private
+
+ def class_attributes
+ @class_attributes ||= {}
+ end
+
+ def superclass_attributes(name)
+ return unless superclass.include? Gitlab::ClassAttributes
+
+ superclass.get_class_attribute(name)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/code_navigation_path.rb b/lib/gitlab/code_navigation_path.rb
index faf623faccf..909d0536b5f 100644
--- a/lib/gitlab/code_navigation_path.rb
+++ b/lib/gitlab/code_navigation_path.rb
@@ -5,7 +5,7 @@ module Gitlab
include Gitlab::Utils::StrongMemoize
include Gitlab::Routing
- LATEST_COMMITS_LIMIT = 10
+ LATEST_COMMITS_LIMIT = 2
def initialize(project, commit_sha)
@project = project
@@ -28,11 +28,11 @@ module Gitlab
latest_commits_shas =
project.repository.commits(commit_sha, limit: LATEST_COMMITS_LIMIT).map(&:sha)
- artifact =
- ::Ci::JobArtifact
- .with_file_types(['lsif'])
- .for_sha(latest_commits_shas, project.id)
- .last
+ pipeline = @project.ci_pipelines.for_sha(latest_commits_shas).last
+
+ next unless pipeline
+
+ artifact = pipeline.job_artifacts.with_file_types(['lsif']).last
artifact&.job
end
diff --git a/lib/gitlab/conan_token.rb b/lib/gitlab/conan_token.rb
new file mode 100644
index 00000000000..7526c10b608
--- /dev/null
+++ b/lib/gitlab/conan_token.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+# The Conan client uses a JWT for authenticating with remotes.
+# This class encodes and decodes a user's personal access token or
+# CI_JOB_TOKEN into a JWT that is used by the Conan client to
+# authenticate with GitLab
+
+module Gitlab
+ class ConanToken
+ HMAC_KEY = 'gitlab-conan-packages'.freeze
+
+ attr_reader :access_token_id, :user_id
+
+ class << self
+ def from_personal_access_token(access_token)
+ new(access_token_id: access_token.id, user_id: access_token.user_id)
+ end
+
+ def from_job(job)
+ new(access_token_id: job.token, user_id: job.user.id)
+ end
+
+ def from_deploy_token(deploy_token)
+ new(access_token_id: deploy_token.token, user_id: deploy_token.username)
+ end
+
+ def decode(jwt)
+ payload = JSONWebToken::HMACToken.decode(jwt, secret).first
+
+ new(access_token_id: payload['access_token'], user_id: payload['user_id'])
+ rescue JWT::DecodeError, JWT::ExpiredSignature, JWT::ImmatureSignature
+ # we return on expired and errored tokens because the Conan client
+ # will request a new token automatically.
+ end
+
+ def secret
+ OpenSSL::HMAC.hexdigest(
+ OpenSSL::Digest::SHA256.new,
+ ::Settings.attr_encrypted_db_key_base,
+ HMAC_KEY
+ )
+ end
+ end
+
+ def initialize(access_token_id:, user_id:)
+ @access_token_id = access_token_id
+ @user_id = user_id
+ end
+
+ def to_jwt
+ hmac_token.encoded
+ end
+
+ private
+
+ def hmac_token
+ JSONWebToken::HMACToken.new(self.class.secret).tap do |token|
+ token['access_token'] = access_token_id
+ token['user_id'] = user_id
+ token.expire_time = token.issued_at + 1.hour
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/config/entry/configurable.rb b/lib/gitlab/config/entry/configurable.rb
index 571e7a5127e..6bf77ebaa5b 100644
--- a/lib/gitlab/config/entry/configurable.rb
+++ b/lib/gitlab/config/entry/configurable.rb
@@ -62,7 +62,9 @@ module Gitlab
class_methods do
def nodes
- Hash[(@nodes || {}).map { |key, factory| [key, factory.dup] }]
+ return {} unless @nodes
+
+ @nodes.transform_values(&:dup)
end
def reserved_node_names
diff --git a/lib/gitlab/config/entry/node.rb b/lib/gitlab/config/entry/node.rb
index 84d3409ed91..32912cb1046 100644
--- a/lib/gitlab/config/entry/node.rb
+++ b/lib/gitlab/config/entry/node.rb
@@ -16,6 +16,7 @@ module Gitlab
@config = config
@metadata = metadata
@entries = {}
+ @warnings = []
yield(self) if block_given?
@@ -60,6 +61,14 @@ module Gitlab
[]
end
+ def warnings
+ @warnings + descendants.flat_map(&:warnings)
+ end
+
+ def add_warning(message)
+ @warnings << "#{location} #{message}"
+ end
+
def value
if leaf?
@config
@@ -68,7 +77,7 @@ module Gitlab
value.specified? && value.relevant?
end
- Hash[meaningful.map { |key, entry| [key, entry.value] }]
+ meaningful.transform_values { |entry| entry.value }
end
end
diff --git a/lib/gitlab/config/loader/yaml.rb b/lib/gitlab/config/loader/yaml.rb
index e001742a7f8..cb3fc49944c 100644
--- a/lib/gitlab/config/loader/yaml.rb
+++ b/lib/gitlab/config/loader/yaml.rb
@@ -5,6 +5,7 @@ module Gitlab
module Loader
class Yaml
DataTooLargeError = Class.new(Loader::FormatError)
+ NotHashError = Class.new(Loader::FormatError)
include Gitlab::Utils::StrongMemoize
@@ -23,7 +24,7 @@ module Gitlab
def load_raw!
raise DataTooLargeError, 'The parsed YAML is too big' if too_big?
- raise Loader::FormatError, 'Invalid configuration format' unless hash?
+ raise NotHashError, 'Invalid configuration format' unless hash?
@config
end
diff --git a/lib/gitlab/config_checker/external_database_checker.rb b/lib/gitlab/config_checker/external_database_checker.rb
index 795082a10a0..c08dd0351f3 100644
--- a/lib/gitlab/config_checker/external_database_checker.rb
+++ b/lib/gitlab/config_checker/external_database_checker.rb
@@ -5,22 +5,43 @@ module Gitlab
module ExternalDatabaseChecker
extend self
- # DB is considered deprecated if it is below version 11
- def db_version_deprecated?
- Gitlab::Database.version.to_f < 11
- end
-
def check
- return [] unless db_version_deprecated?
+ notices = []
+
+ unless Gitlab::Database.postgresql_minimum_supported_version?
+ notices <<
+ {
+ type: 'warning',
+ message: _('You are using PostgreSQL %{pg_version_current}, but PostgreSQL ' \
+ '%{pg_version_minimum} is required for this version of GitLab. ' \
+ 'Please upgrade your environment to a supported PostgreSQL version, ' \
+ 'see %{pg_requirements_url} for details.') % {
+ pg_version_current: Gitlab::Database.version,
+ pg_version_minimum: Gitlab::Database::MINIMUM_POSTGRES_VERSION,
+ pg_requirements_url: '<a href="https://docs.gitlab.com/ee/install/requirements.html#database">database requirements</a>'
+ }
+ }
+ end
+
+ if Gitlab::Database.postgresql_upcoming_deprecation?
+ upcoming_deprecation = Gitlab::Database::UPCOMING_POSTGRES_VERSION_DETAILS
+
+ notices <<
+ {
+ type: 'warning',
+ message: _('Note that PostgreSQL %{pg_version_upcoming} will become the minimum required ' \
+ 'version in GitLab %{gl_version_upcoming} (%{gl_version_upcoming_date}). Please ' \
+ 'consider upgrading your environment to a supported PostgreSQL version soon, ' \
+ 'see <a href="%{pg_version_upcoming_url}">the related epic</a> for details.') % {
+ pg_version_upcoming: upcoming_deprecation[:pg_version_minimum],
+ gl_version_upcoming: upcoming_deprecation[:gl_version],
+ gl_version_upcoming_date: upcoming_deprecation[:gl_version_date],
+ pg_version_upcoming_url: upcoming_deprecation[:url]
+ }
+ }
+ end
- [
- {
- type: 'warning',
- message: _('Note that PostgreSQL 11 will become the minimum required PostgreSQL version in GitLab 13.0 (May 2020). '\
- 'PostgreSQL 9.6 and PostgreSQL 10 will no longer be supported in GitLab 13.0. '\
- 'Please consider upgrading your PostgreSQL version (%{db_version}) soon.') % { db_version: Gitlab::Database.version.to_s }
- }
- ]
+ notices
end
end
end
diff --git a/lib/gitlab/danger/changelog.rb b/lib/gitlab/danger/changelog.rb
index 85f386594be..4427c331b8e 100644
--- a/lib/gitlab/danger/changelog.rb
+++ b/lib/gitlab/danger/changelog.rb
@@ -3,11 +3,17 @@
module Gitlab
module Danger
module Changelog
- NO_CHANGELOG_LABELS = %w[backstage ci-build meta].freeze
+ NO_CHANGELOG_LABELS = [
+ 'tooling',
+ 'tooling::pipelines',
+ 'tooling::workflow',
+ 'ci-build',
+ 'meta'
+ ].freeze
NO_CHANGELOG_CATEGORIES = %i[docs none].freeze
def needed?
- categories_need_changelog? && (gitlab.mr_labels & NO_CHANGELOG_LABELS).empty?
+ categories_need_changelog? && without_no_changelog_label?
end
def found
@@ -27,6 +33,10 @@ module Gitlab
def categories_need_changelog?
(helper.changes_by_category.keys - NO_CHANGELOG_CATEGORIES).any?
end
+
+ def without_no_changelog_label?
+ (gitlab.mr_labels & NO_CHANGELOG_LABELS).empty?
+ end
end
end
end
diff --git a/lib/gitlab/danger/commit_linter.rb b/lib/gitlab/danger/commit_linter.rb
index 58db2b58560..954934518d7 100644
--- a/lib/gitlab/danger/commit_linter.rb
+++ b/lib/gitlab/danger/commit_linter.rb
@@ -8,8 +8,6 @@ module Gitlab
class CommitLinter
MIN_SUBJECT_WORDS_COUNT = 3
MAX_LINE_LENGTH = 72
- WARN_SUBJECT_LENGTH = 50
- URL_LIMIT_SUBJECT = "https://chris.beams.io/posts/git-commit/#limit-50"
MAX_CHANGED_FILES_IN_COMMIT = 3
MAX_CHANGED_LINES_IN_COMMIT = 30
SHORT_REFERENCE_REGEX = %r{([\w\-\/]+)?(#|!|&|%)\d+\b}.freeze
@@ -18,7 +16,6 @@ module Gitlab
PROBLEMS = {
subject_too_short: "The %s must contain at least #{MIN_SUBJECT_WORDS_COUNT} words",
subject_too_long: "The %s may not be longer than #{MAX_LINE_LENGTH} characters",
- subject_above_warning: "The %s length is acceptable, but please try to [reduce it to #{WARN_SUBJECT_LENGTH} characters](#{URL_LIMIT_SUBJECT})",
subject_starts_with_lowercase: "The %s must start with a capital letter",
subject_ends_with_a_period: "The %s must not end with a period",
separator_missing: "The commit subject and body must be separated by a blank line",
@@ -88,8 +85,6 @@ module Gitlab
if subject_too_long?
add_problem(:subject_too_long, subject_description)
- elsif subject_above_warning?
- add_problem(:subject_above_warning, subject_description)
end
if subject_starts_with_lowercase?
@@ -195,10 +190,6 @@ module Gitlab
line_too_long?(subject)
end
- def subject_above_warning?
- subject.length > WARN_SUBJECT_LENGTH
- end
-
def subject_starts_with_lowercase?
first_char = subject.sub(/\A\[.+\]\s/, '')[0]
first_char_downcased = first_char.downcase
diff --git a/lib/gitlab/danger/helper.rb b/lib/gitlab/danger/helper.rb
index 327418ad100..db799c094b2 100644
--- a/lib/gitlab/danger/helper.rb
+++ b/lib/gitlab/danger/helper.rb
@@ -6,6 +6,7 @@ module Gitlab
module Danger
module Helper
RELEASE_TOOLS_BOT = 'gitlab-release-tools-bot'
+ DRAFT_REGEX = /\A*#{Regexp.union(/(?i)(\[WIP\]\s*|WIP:\s*|WIP$)/, /(?i)(\[draft\]|\(draft\)|draft:|draft\s\-\s|draft$)/)}+\s*/i.freeze
# Returns a list of all files that have been added, modified or renamed.
# `git.modified_files` might contain paths that already have been renamed,
@@ -34,6 +35,18 @@ module Gitlab
.sort
end
+ # Returns a string containing changed lines as git diff
+ #
+ # Considering changing a line in lib/gitlab/usage_data.rb it will return:
+ #
+ # [ "--- a/lib/gitlab/usage_data.rb",
+ # "+++ b/lib/gitlab/usage_data.rb",
+ # "+ # Test change",
+ # "- # Old change" ]
+ def changed_lines(changed_file)
+ git.diff_for_file(changed_file).patch.split("\n").select { |line| %r{^[+-]}.match?(line) }
+ end
+
def all_ee_changes
all_changed_files.grep(%r{\Aee/})
end
@@ -73,16 +86,25 @@ module Gitlab
# @return [Hash<String,Array<String>>]
def changes_by_category
all_changed_files.each_with_object(Hash.new { |h, k| h[k] = [] }) do |file, hash|
- hash[category_for_file(file)] << file
+ categories_for_file(file).each { |category| hash[category] << file }
end
end
- # Determines the category a file is in, e.g., `:frontend` or `:backend`
- # @return[Symbol]
- def category_for_file(file)
- _, category = CATEGORIES.find { |regexp, _| regexp.match?(file) }
+ # Determines the categories a file is in, e.g., `[:frontend]`, `[:backend]`, or `%i[frontend engineering_productivity]`
+ # using filename regex and specific change regex if given.
+ #
+ # @return Array<Symbol>
+ def categories_for_file(file)
+ _, categories = CATEGORIES.find do |key, _|
+ filename_regex, changes_regex = Array(key)
+
+ found = filename_regex.match?(file)
+ found &&= changed_lines(file).any? { |changed_line| changes_regex.match?(changed_line) } if changes_regex
- category || :unknown
+ found
+ end
+
+ Array(categories || :unknown)
end
# Returns the GFM for a category label, making its best guess if it's not
@@ -102,8 +124,10 @@ module Gitlab
}.freeze
# First-match win, so be sure to put more specific regex at the top...
CATEGORIES = {
- %r{\Adoc/} => :none, # To reinstate roulette for documentation, set to `:docs`.
- %r{\A(CONTRIBUTING|LICENSE|MAINTENANCE|PHILOSOPHY|PROCESS|README)(\.md)?\z} => :none, # To reinstate roulette for documentation, set to `:docs`.
+ [%r{usage_data}, %r{^(\+|-).*(count|distinct_count)\(.*\)(.*)$}] => [:database, :backend],
+
+ %r{\Adoc/.*(\.(md|png|gif|jpg))\z} => :docs,
+ %r{\A(CONTRIBUTING|LICENSE|MAINTENANCE|PHILOSOPHY|PROCESS|README)(\.md)?\z} => :docs,
%r{\A(ee/)?app/(assets|views)/} => :frontend,
%r{\A(ee/)?public/} => :frontend,
@@ -125,10 +149,13 @@ module Gitlab
jest\.config\.js |
package\.json |
yarn\.lock |
- config/.+\.js |
- \.gitlab/ci/frontend\.gitlab-ci\.yml
+ config/.+\.js
)\z}x => :frontend,
+ %r{(\A|/)(
+ \.gitlab/ci/frontend\.gitlab-ci\.yml
+ )\z}x => %i[frontend engineering_productivity],
+
%r{\A(ee/)?db/(?!fixtures)[^/]+} => :database,
%r{\A(ee/)?lib/gitlab/(database|background_migration|sql|github_import)(/|\.rb)} => :database,
%r{\A(app/models/project_authorization|app/services/users/refresh_authorized_projects_service)(/|\.rb)} => :database,
@@ -136,13 +163,13 @@ module Gitlab
%r{\Arubocop/cop/migration(/|\.rb)} => :database,
%r{\A(\.gitlab-ci\.yml\z|\.gitlab\/ci)} => :engineering_productivity,
+ %r{\A\.codeclimate\.yml\z} => :engineering_productivity,
%r{\A\.overcommit\.yml\.example\z} => :engineering_productivity,
- %r{\Atooling/overcommit/} => :engineering_productivity,
- %r{\A.editorconfig\z} => :engineering_productivity,
+ %r{\A\.editorconfig\z} => :engineering_productivity,
%r{Dangerfile\z} => :engineering_productivity,
%r{\A(ee/)?(danger/|lib/gitlab/danger/)} => :engineering_productivity,
%r{\A(ee/)?scripts/} => :engineering_productivity,
- %r{\A\.codeclimate\.yml\z} => :engineering_productivity,
+ %r{\Atooling/} => :engineering_productivity,
%r{\A(ee/)?app/(?!assets|views)[^/]+} => :backend,
%r{\A(ee/)?(bin|config|generator_templates|lib|rubocop)/} => :backend,
@@ -184,7 +211,7 @@ module Gitlab
end
def sanitize_mr_title(title)
- title.gsub(/^WIP: */, '').gsub(/`/, '\\\`')
+ title.gsub(DRAFT_REGEX, '').gsub(/`/, '\\\`')
end
def security_mr?
@@ -193,6 +220,18 @@ module Gitlab
gitlab_helper.mr_json['web_url'].include?('/gitlab-org/security/')
end
+ def cherry_pick_mr?
+ return false unless gitlab_helper
+
+ /cherry[\s-]*pick/i.match?(gitlab_helper.mr_json['title'])
+ end
+
+ def stable_branch?
+ return false unless gitlab_helper
+
+ /\A\d+-\d+-stable-ee/i.match?(gitlab_helper.mr_json['target_branch'])
+ end
+
def mr_has_labels?(*labels)
return false unless gitlab_helper
diff --git a/lib/gitlab/danger/roulette.rb b/lib/gitlab/danger/roulette.rb
index 9f7980dc20a..ed4af3f4a43 100644
--- a/lib/gitlab/danger/roulette.rb
+++ b/lib/gitlab/danger/roulette.rb
@@ -5,8 +5,12 @@ require_relative 'teammate'
module Gitlab
module Danger
module Roulette
- ROULETTE_DATA_URL = 'https://about.gitlab.com/roulette.json'
- OPTIONAL_CATEGORIES = [:qa, :test].freeze
+ ROULETTE_DATA_URL = 'https://gitlab-org.gitlab.io/gitlab-roulette/roulette.json'
+ HOURS_WHEN_PERSON_CAN_BE_PICKED = (6..14).freeze
+
+ INCLUDE_TIMEZONE_FOR_CATEGORY = {
+ database: false
+ }.freeze
Spin = Struct.new(:category, :reviewer, :maintainer, :optional_role)
@@ -14,7 +18,7 @@ module Gitlab
# for each change category that a Merge Request contains.
#
# @return [Array<Spin>]
- def spin(project, categories, branch_name)
+ def spin(project, categories, branch_name, timezone_experiment: false)
team =
begin
project_team(project)
@@ -26,7 +30,9 @@ module Gitlab
canonical_branch_name = canonical_branch_name(branch_name)
spin_per_category = categories.each_with_object({}) do |category, memo|
- memo[category] = spin_for_category(team, project, category, canonical_branch_name)
+ including_timezone = INCLUDE_TIMEZONE_FOR_CATEGORY.fetch(category, timezone_experiment)
+
+ memo[category] = spin_for_category(team, project, category, canonical_branch_name, timezone_experiment: including_timezone)
end
spin_per_category.map do |category, spin|
@@ -80,9 +86,14 @@ module Gitlab
# Known issue: If someone is rejected due to OOO, and then becomes not OOO, the
# selection will change on next spin
# @param [Array<Teammate>] people
- def spin_for_person(people, random:)
- people.shuffle(random: random)
- .find(&method(:valid_person?))
+ def spin_for_person(people, random:, timezone_experiment: false)
+ shuffled_people = people.shuffle(random: random)
+
+ if timezone_experiment
+ shuffled_people.find(&method(:valid_person_with_timezone?))
+ else
+ shuffled_people.find(&method(:valid_person?))
+ end
end
private
@@ -90,7 +101,13 @@ module Gitlab
# @param [Teammate] person
# @return [Boolean]
def valid_person?(person)
- !mr_author?(person) && person.available?
+ !mr_author?(person) && person.available
+ end
+
+ # @param [Teammate] person
+ # @return [Boolean]
+ def valid_person_with_timezone?(person)
+ valid_person?(person) && HOURS_WHEN_PERSON_CAN_BE_PICKED.cover?(person.local_hour)
end
# @param [Teammate] person
@@ -105,7 +122,7 @@ module Gitlab
end
end
- def spin_for_category(team, project, category, branch_name)
+ def spin_for_category(team, project, category, branch_name, timezone_experiment: false)
reviewers, traintainers, maintainers =
%i[reviewer traintainer maintainer].map do |role|
spin_role_for_category(team, role, project, category)
@@ -116,14 +133,10 @@ module Gitlab
# Make traintainers have triple the chance to be picked as a reviewer
random = new_random(branch_name)
- reviewer = spin_for_person(reviewers + traintainers + traintainers, random: random)
- maintainer = spin_for_person(maintainers, random: random)
+ reviewer = spin_for_person(reviewers + traintainers + traintainers, random: random, timezone_experiment: timezone_experiment)
+ maintainer = spin_for_person(maintainers, random: random, timezone_experiment: timezone_experiment)
- Spin.new(category, reviewer, maintainer).tap do |spin|
- if OPTIONAL_CATEGORIES.include?(category)
- spin.optional_role = :maintainer
- end
- end
+ Spin.new(category, reviewer, maintainer)
end
end
end
diff --git a/lib/gitlab/danger/sidekiq_queues.rb b/lib/gitlab/danger/sidekiq_queues.rb
new file mode 100644
index 00000000000..726b6134abf
--- /dev/null
+++ b/lib/gitlab/danger/sidekiq_queues.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Danger
+ module SidekiqQueues
+ def changed_queue_files
+ @changed_queue_files ||= git.modified_files.grep(%r{\A(ee/)?app/workers/all_queues\.yml})
+ end
+
+ def added_queue_names
+ @added_queue_names ||= new_queues.keys - old_queues.keys
+ end
+
+ def changed_queue_names
+ @changed_queue_names ||=
+ (new_queues.values_at(*old_queues.keys) - old_queues.values)
+ .compact.map { |queue| queue[:name] }
+ end
+
+ private
+
+ def old_queues
+ @old_queues ||= queues_for(gitlab.base_commit)
+ end
+
+ def new_queues
+ @new_queues ||= queues_for(gitlab.head_commit)
+ end
+
+ def queues_for(branch)
+ changed_queue_files
+ .flat_map { |file| YAML.safe_load(`git show #{branch}:#{file}`, permitted_classes: [Symbol]) }
+ .to_h { |queue| [queue[:name], queue] }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/danger/teammate.rb b/lib/gitlab/danger/teammate.rb
index 651b002d2bf..f7da66e77cd 100644
--- a/lib/gitlab/danger/teammate.rb
+++ b/lib/gitlab/danger/teammate.rb
@@ -1,28 +1,19 @@
# frozen_string_literal: true
-require 'cgi'
-require 'set'
-
module Gitlab
module Danger
class Teammate
- attr_reader :name, :username, :role, :projects
-
- AT_CAPACITY_EMOJI = Set.new(%w[red_circle]).freeze
- OOO_EMOJI = Set.new(%w[
- palm_tree
- beach beach_umbrella beach_with_umbrella
- ]).freeze
+ attr_reader :username, :name, :role, :projects, :available, :tz_offset_hours
+ # The options data are produced by https://gitlab.com/gitlab-org/gitlab-roulette/-/blob/master/lib/team_member.rb
def initialize(options = {})
@username = options['username']
- @name = options['name'] || @username
+ @name = options['name']
+ @markdown_name = options['markdown_name']
@role = options['role']
@projects = options['projects']
- end
-
- def markdown_name
- "[#{name}](https://gitlab.com/#{username}) (`@#{username}`)"
+ @available = options['available']
+ @tz_offset_hours = options['tz_offset_hours']
end
def in_project?(name)
@@ -43,40 +34,47 @@ module Gitlab
has_capability?(project, category, :maintainer, labels)
end
- def status
- return @status if defined?(@status)
+ def markdown_name(timezone_experiment: false, author: nil)
+ return @markdown_name unless timezone_experiment
- @status ||=
- begin
- Gitlab::Danger::RequestHelper.http_get_json(status_api_endpoint)
- rescue Gitlab::Danger::RequestHelper::HTTPError, JSON::ParserError
- nil # better no status than a crashing Danger
- end
+ "#{@markdown_name} (#{utc_offset_text(author)})"
end
- # @return [Boolean]
- def available?
- !out_of_office? && has_capacity?
+ def local_hour
+ (Time.now.utc + tz_offset_hours * 3600).hour
end
- private
+ protected
- def status_api_endpoint
- "https://gitlab.com/api/v4/users/#{CGI.escape(username)}/status"
- end
+ def floored_offset_hours
+ floored_offset = tz_offset_hours.floor(0)
- def status_emoji
- status&.dig("emoji")
+ floored_offset == tz_offset_hours ? floored_offset : tz_offset_hours
end
- # @return [Boolean]
- def out_of_office?
- status&.dig("message")&.match?(/OOO/i) || OOO_EMOJI.include?(status_emoji)
+ private
+
+ def utc_offset_text(author = nil)
+ offset_text =
+ if floored_offset_hours >= 0
+ "UTC+#{floored_offset_hours}"
+ else
+ "UTC#{floored_offset_hours}"
+ end
+
+ return offset_text unless author
+
+ "#{offset_text}, #{offset_diff_compared_to_author(author)}"
end
- # @return [Boolean]
- def has_capacity?
- !AT_CAPACITY_EMOJI.include?(status_emoji)
+ def offset_diff_compared_to_author(author)
+ diff = floored_offset_hours - author.floored_offset_hours
+ return "same timezone as `@#{author.username}`" if diff.zero?
+
+ ahead_or_behind = diff < 0 ? 'behind' : 'ahead'
+ pluralized_hours = pluralize(diff.abs, 'hour', 'hours')
+
+ "#{pluralized_hours} #{ahead_or_behind} `@#{author.username}`"
end
def has_capability?(project, category, kind, labels)
@@ -98,6 +96,12 @@ module Gitlab
def capabilities(project)
Array(projects.fetch(project, []))
end
+
+ def pluralize(count, singular, plural)
+ word = count == 1 || count.to_s =~ /^1(\.0+)?$/ ? singular : plural
+
+ "#{count || 0} #{word}"
+ end
end
end
end
diff --git a/lib/gitlab/database.rb b/lib/gitlab/database.rb
index 02005be1f6a..2bfb6c32886 100644
--- a/lib/gitlab/database.rb
+++ b/lib/gitlab/database.rb
@@ -4,6 +4,20 @@ module Gitlab
module Database
include Gitlab::Metrics::Methods
+ # Minimum PostgreSQL version requirement per documentation:
+ # https://docs.gitlab.com/ee/install/requirements.html#postgresql-requirements
+ MINIMUM_POSTGRES_VERSION = 11
+
+ # Upcoming PostgreSQL version requirements
+ # Allows a soft warning about an upcoming minimum version requirement
+ # so administrators can prepare to upgrade
+ UPCOMING_POSTGRES_VERSION_DETAILS = {
+ gl_version: '13.6.0',
+ gl_version_date: 'November 2020',
+ pg_version_minimum: 12,
+ url: 'https://gitlab.com/groups/gitlab-org/-/epics/2374'
+ }.freeze
+
# https://www.postgresql.org/docs/9.2/static/datatype-numeric.html
MAX_INT_VALUE = 2147483647
@@ -22,6 +36,16 @@ module Gitlab
MIN_SCHEMA_VERSION = 20190506135400
MIN_SCHEMA_GITLAB_VERSION = '11.11.0'
+ # Schema we store dynamically managed partitions in (e.g. for time partitioning)
+ DYNAMIC_PARTITIONS_SCHEMA = :gitlab_partitions_dynamic
+
+ # Schema we store static partitions in (e.g. for hash partitioning)
+ STATIC_PARTITIONS_SCHEMA = :gitlab_partitions_static
+
+ # This is an extensive list of postgres schemas owned by GitLab
+ # It does not include the default public schema
+ EXTRA_SCHEMAS = [DYNAMIC_PARTITIONS_SCHEMA, STATIC_PARTITIONS_SCHEMA].freeze
+
define_histogram :gitlab_database_transaction_seconds do
docstring "Time spent in database transactions, in seconds"
end
@@ -87,16 +111,39 @@ module Gitlab
version.to_f < 10
end
- def self.replication_slots_supported?
- version.to_f >= 9.4
+ def self.postgresql_minimum_supported_version?
+ version.to_f >= MINIMUM_POSTGRES_VERSION
end
- def self.postgresql_minimum_supported_version?
- version.to_f >= 9.6
+ def self.postgresql_upcoming_deprecation?
+ version.to_f < UPCOMING_POSTGRES_VERSION_DETAILS[:pg_version_minimum]
end
- def self.upsert_supported?
- version.to_f >= 9.5
+ def self.check_postgres_version_and_print_warning
+ return if Gitlab::Database.postgresql_minimum_supported_version?
+ return if Gitlab::Runtime.rails_runner?
+
+ Kernel.warn ERB.new(Rainbow.new.wrap(<<~EOS).red).result
+
+ ██  ██  █████  ██████  ███  ██ ██ ███  ██  ██████ 
+ ██  ██ ██   ██ ██   ██ ████  ██ ██ ████  ██ ██      
+ ██  █  ██ ███████ ██████  ██ ██  ██ ██ ██ ██  ██ ██  ███ 
+ ██ ███ ██ ██   ██ ██   ██ ██  ██ ██ ██ ██  ██ ██ ██  ██ 
+  ███ ███  ██  ██ ██  ██ ██   ████ ██ ██   ████  ██████  
+
+ ******************************************************************************
+ You are using PostgreSQL <%= Gitlab::Database.version %>, but PostgreSQL >= <%= Gitlab::Database::MINIMUM_POSTGRES_VERSION %>
+ is required for this version of GitLab.
+ <% if Rails.env.development? || Rails.env.test? %>
+ If using gitlab-development-kit, please find the relevant steps here:
+ https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/master/doc/howto/postgresql.md#upgrade-postgresql
+ <% end %>
+ Please upgrade your environment to a supported PostgreSQL version, see
+ https://docs.gitlab.com/ee/install/requirements.html#database for details.
+ ******************************************************************************
+ EOS
+ rescue ActiveRecord::ActiveRecordError, PG::Error
+ # ignore - happens when Rake tasks yet have to create a database, e.g. for testing
end
# map some of the function names that changed between PostgreSQL 9 and 10
@@ -182,9 +229,7 @@ module Gitlab
VALUES #{tuples.map { |tuple| "(#{tuple.join(', ')})" }.join(', ')}
EOF
- if upsert_supported? && on_conflict == :do_nothing
- sql = "#{sql} ON CONFLICT DO NOTHING"
- end
+ sql = "#{sql} ON CONFLICT DO NOTHING" if on_conflict == :do_nothing
sql = "#{sql} RETURNING id" if return_ids
diff --git a/lib/gitlab/database/background_migration_job.rb b/lib/gitlab/database/background_migration_job.rb
new file mode 100644
index 00000000000..445735b232a
--- /dev/null
+++ b/lib/gitlab/database/background_migration_job.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ class BackgroundMigrationJob < ActiveRecord::Base # rubocop:disable Rails/ApplicationRecord
+ self.table_name = :background_migration_jobs
+
+ scope :for_migration_class, -> (class_name) { where(class_name: normalize_class_name(class_name)) }
+ scope :for_migration_execution, -> (class_name, arguments) do
+ for_migration_class(class_name).where('arguments = ?', arguments.to_json)
+ end
+
+ scope :for_partitioning_migration, -> (class_name, table_name) do
+ for_migration_class(class_name).where('arguments ->> 2 = ?', table_name)
+ end
+
+ enum status: {
+ pending: 0,
+ succeeded: 1
+ }
+
+ def self.mark_all_as_succeeded(class_name, arguments)
+ self.pending.for_migration_execution(class_name, arguments)
+ .update_all("status = #{statuses[:succeeded]}, updated_at = NOW()")
+ end
+
+ def self.normalize_class_name(class_name)
+ return class_name unless class_name.present? && class_name.start_with?('::')
+
+ class_name[2..]
+ end
+
+ def class_name=(value)
+ write_attribute(:class_name, self.class.normalize_class_name(value))
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/dynamic_model_helpers.rb b/lib/gitlab/database/dynamic_model_helpers.rb
new file mode 100644
index 00000000000..892f8291780
--- /dev/null
+++ b/lib/gitlab/database/dynamic_model_helpers.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module DynamicModelHelpers
+ def define_batchable_model(table_name)
+ Class.new(ActiveRecord::Base) do
+ include EachBatch
+
+ self.table_name = table_name
+ self.inheritance_column = :_type_disabled
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index fd09c31e994..006a24da8fe 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -3,10 +3,10 @@
module Gitlab
module Database
module MigrationHelpers
+ include Migrations::BackgroundMigrationHelpers
+
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
MAX_IDENTIFIER_NAME_LENGTH = 63
- BACKGROUND_MIGRATION_BATCH_SIZE = 1000 # Number of rows to process per job
- BACKGROUND_MIGRATION_JOB_BUFFER_SIZE = 1000 # Number of jobs to bulk queue at a time
PERMITTED_TIMESTAMP_COLUMNS = %i[created_at updated_at deleted_at].to_set.freeze
DEFAULT_TIMESTAMP_COLUMNS = %i[created_at updated_at].freeze
@@ -136,6 +136,10 @@ module Gitlab
'in the body of your migration class'
end
+ index_name = index_name[:name] if index_name.is_a?(Hash)
+
+ raise 'remove_concurrent_index_by_name must get an index name as the second argument' if index_name.blank?
+
options = options.merge({ algorithm: :concurrently })
unless index_exists_by_name?(table_name, index_name)
@@ -477,7 +481,7 @@ module Gitlab
# type is used.
# batch_column_name - option is for tables without primary key, in this
# case another unique integer column can be used. Example: :user_id
- def rename_column_concurrently(table, old, new, type: nil, batch_column_name: :id)
+ def rename_column_concurrently(table, old, new, type: nil, type_cast_function: nil, batch_column_name: :id)
unless column_exists?(table, batch_column_name)
raise "Column #{batch_column_name} does not exist on #{table}"
end
@@ -488,7 +492,7 @@ module Gitlab
check_trigger_permissions!(table)
- create_column_from(table, old, new, type: type, batch_column_name: batch_column_name)
+ create_column_from(table, old, new, type: type, batch_column_name: batch_column_name, type_cast_function: type_cast_function)
install_rename_triggers(table, old, new)
end
@@ -536,10 +540,10 @@ module Gitlab
# table - The table containing the column.
# column - The name of the column to change.
# new_type - The new column type.
- def change_column_type_concurrently(table, column, new_type)
+ def change_column_type_concurrently(table, column, new_type, type_cast_function: nil)
temp_column = "#{column}_for_type_change"
- rename_column_concurrently(table, column, temp_column, type: new_type)
+ rename_column_concurrently(table, column, temp_column, type: new_type, type_cast_function: type_cast_function)
end
# Performs cleanup of a concurrent type change.
@@ -786,10 +790,6 @@ module Gitlab
end
end
- def perform_background_migration_inline?
- Rails.env.test? || Rails.env.development?
- end
-
# Performs a concurrent column rename when using PostgreSQL.
def install_rename_triggers_for_postgresql(trigger, table, old, new)
execute <<-EOF.strip_heredoc
@@ -973,106 +973,6 @@ into similar problems in the future (e.g. when new tables are created).
end
end
- # Bulk queues background migration jobs for an entire table, batched by ID range.
- # "Bulk" meaning many jobs will be pushed at a time for efficiency.
- # If you need a delay interval per job, then use `queue_background_migration_jobs_by_range_at_intervals`.
- #
- # model_class - The table being iterated over
- # job_class_name - The background migration job class as a string
- # batch_size - The maximum number of rows per job
- #
- # Example:
- #
- # class Route < ActiveRecord::Base
- # include EachBatch
- # self.table_name = 'routes'
- # end
- #
- # bulk_queue_background_migration_jobs_by_range(Route, 'ProcessRoutes')
- #
- # Where the model_class includes EachBatch, and the background migration exists:
- #
- # class Gitlab::BackgroundMigration::ProcessRoutes
- # def perform(start_id, end_id)
- # # do something
- # end
- # end
- def bulk_queue_background_migration_jobs_by_range(model_class, job_class_name, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE)
- raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id')
-
- jobs = []
- table_name = model_class.quoted_table_name
-
- model_class.each_batch(of: batch_size) do |relation|
- start_id, end_id = relation.pluck("MIN(#{table_name}.id)", "MAX(#{table_name}.id)").first
-
- if jobs.length >= BACKGROUND_MIGRATION_JOB_BUFFER_SIZE
- # Note: This code path generally only helps with many millions of rows
- # We push multiple jobs at a time to reduce the time spent in
- # Sidekiq/Redis operations. We're using this buffer based approach so we
- # don't need to run additional queries for every range.
- bulk_migrate_async(jobs)
- jobs.clear
- end
-
- jobs << [job_class_name, [start_id, end_id]]
- end
-
- bulk_migrate_async(jobs) unless jobs.empty?
- end
-
- # Queues background migration jobs for an entire table, batched by ID range.
- # Each job is scheduled with a `delay_interval` in between.
- # If you use a small interval, then some jobs may run at the same time.
- #
- # model_class - The table or relation being iterated over
- # job_class_name - The background migration job class as a string
- # delay_interval - The duration between each job's scheduled time (must respond to `to_f`)
- # batch_size - The maximum number of rows per job
- # other_arguments - Other arguments to send to the job
- #
- # *Returns the final migration delay*
- #
- # Example:
- #
- # class Route < ActiveRecord::Base
- # include EachBatch
- # self.table_name = 'routes'
- # end
- #
- # queue_background_migration_jobs_by_range_at_intervals(Route, 'ProcessRoutes', 1.minute)
- #
- # Where the model_class includes EachBatch, and the background migration exists:
- #
- # class Gitlab::BackgroundMigration::ProcessRoutes
- # def perform(start_id, end_id)
- # # do something
- # end
- # end
- def queue_background_migration_jobs_by_range_at_intervals(model_class, job_class_name, delay_interval, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE, other_job_arguments: [], initial_delay: 0)
- raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id')
-
- # To not overload the worker too much we enforce a minimum interval both
- # when scheduling and performing jobs.
- if delay_interval < BackgroundMigrationWorker.minimum_interval
- delay_interval = BackgroundMigrationWorker.minimum_interval
- end
-
- final_delay = 0
-
- model_class.each_batch(of: batch_size) do |relation, index|
- start_id, end_id = relation.pluck(Arel.sql('MIN(id), MAX(id)')).first
-
- # `BackgroundMigrationWorker.bulk_perform_in` schedules all jobs for
- # the same time, which is not helpful in most cases where we wish to
- # spread the work over time.
- final_delay = initial_delay + delay_interval * index
- migrate_in(final_delay, job_class_name, [start_id, end_id] + other_job_arguments)
- end
-
- final_delay
- end
-
# Fetches indexes on a column by name for postgres.
#
# This will include indexes using an expression on the column, for example:
@@ -1131,30 +1031,6 @@ into similar problems in the future (e.g. when new tables are created).
execute(sql)
end
- def migrate_async(*args)
- with_migration_context do
- BackgroundMigrationWorker.perform_async(*args)
- end
- end
-
- def migrate_in(*args)
- with_migration_context do
- BackgroundMigrationWorker.perform_in(*args)
- end
- end
-
- def bulk_migrate_in(*args)
- with_migration_context do
- BackgroundMigrationWorker.bulk_perform_in(*args)
- end
- end
-
- def bulk_migrate_async(*args)
- with_migration_context do
- BackgroundMigrationWorker.bulk_perform_async(*args)
- end
- end
-
# Returns the name for a check constraint
#
# type:
@@ -1396,7 +1272,7 @@ into similar problems in the future (e.g. when new tables are created).
"ON DELETE #{on_delete.upcase}"
end
- def create_column_from(table, old, new, type: nil, batch_column_name: :id)
+ def create_column_from(table, old, new, type: nil, batch_column_name: :id, type_cast_function: nil)
old_col = column_for(table, old)
new_type = type || old_col.type
@@ -1410,7 +1286,13 @@ into similar problems in the future (e.g. when new tables are created).
# necessary since we copy over old values further down.
change_column_default(table, new, old_col.default) unless old_col.default.nil?
- update_column_in_batches(table, new, Arel::Table.new(table)[old], batch_column_name: batch_column_name)
+ old_value = Arel::Table.new(table)[old]
+
+ if type_cast_function.present?
+ old_value = Arel::Nodes::NamedFunction.new(type_cast_function, [old_value])
+ end
+
+ update_column_in_batches(table, new, old_value, batch_column_name: batch_column_name)
add_not_null_constraint(table, new) unless old_col.null
@@ -1437,10 +1319,6 @@ into similar problems in the future (e.g. when new tables are created).
your migration class
ERROR
end
-
- def with_migration_context(&block)
- Gitlab::ApplicationContext.with_context(caller_id: self.class.to_s, &block)
- end
end
end
end
diff --git a/lib/gitlab/database/migrations/background_migration_helpers.rb b/lib/gitlab/database/migrations/background_migration_helpers.rb
new file mode 100644
index 00000000000..a6cc03aa9eb
--- /dev/null
+++ b/lib/gitlab/database/migrations/background_migration_helpers.rb
@@ -0,0 +1,157 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Migrations
+ module BackgroundMigrationHelpers
+ BACKGROUND_MIGRATION_BATCH_SIZE = 1_000 # Number of rows to process per job
+ BACKGROUND_MIGRATION_JOB_BUFFER_SIZE = 1_000 # Number of jobs to bulk queue at a time
+
+ # Bulk queues background migration jobs for an entire table, batched by ID range.
+ # "Bulk" meaning many jobs will be pushed at a time for efficiency.
+ # If you need a delay interval per job, then use `queue_background_migration_jobs_by_range_at_intervals`.
+ #
+ # model_class - The table being iterated over
+ # job_class_name - The background migration job class as a string
+ # batch_size - The maximum number of rows per job
+ #
+ # Example:
+ #
+ # class Route < ActiveRecord::Base
+ # include EachBatch
+ # self.table_name = 'routes'
+ # end
+ #
+ # bulk_queue_background_migration_jobs_by_range(Route, 'ProcessRoutes')
+ #
+ # Where the model_class includes EachBatch, and the background migration exists:
+ #
+ # class Gitlab::BackgroundMigration::ProcessRoutes
+ # def perform(start_id, end_id)
+ # # do something
+ # end
+ # end
+ def bulk_queue_background_migration_jobs_by_range(model_class, job_class_name, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE)
+ raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id')
+
+ jobs = []
+ table_name = model_class.quoted_table_name
+
+ model_class.each_batch(of: batch_size) do |relation|
+ start_id, end_id = relation.pluck("MIN(#{table_name}.id)", "MAX(#{table_name}.id)").first
+
+ if jobs.length >= BACKGROUND_MIGRATION_JOB_BUFFER_SIZE
+ # Note: This code path generally only helps with many millions of rows
+ # We push multiple jobs at a time to reduce the time spent in
+ # Sidekiq/Redis operations. We're using this buffer based approach so we
+ # don't need to run additional queries for every range.
+ bulk_migrate_async(jobs)
+ jobs.clear
+ end
+
+ jobs << [job_class_name, [start_id, end_id]]
+ end
+
+ bulk_migrate_async(jobs) unless jobs.empty?
+ end
+
+ # Queues background migration jobs for an entire table, batched by ID range.
+ # Each job is scheduled with a `delay_interval` in between.
+ # If you use a small interval, then some jobs may run at the same time.
+ #
+ # model_class - The table or relation being iterated over
+ # job_class_name - The background migration job class as a string
+ # delay_interval - The duration between each job's scheduled time (must respond to `to_f`)
+ # batch_size - The maximum number of rows per job
+ # other_arguments - Other arguments to send to the job
+ # track_jobs - When this flag is set, creates a record in the background_migration_jobs table for each job that
+ # is scheduled to be run. These records can be used to trace execution of the background job, but there is no
+ # builtin support to manage that automatically at this time. You should only set this flag if you are aware of
+ # how it works, and intend to manually cleanup the database records in your background job.
+ #
+ # *Returns the final migration delay*
+ #
+ # Example:
+ #
+ # class Route < ActiveRecord::Base
+ # include EachBatch
+ # self.table_name = 'routes'
+ # end
+ #
+ # queue_background_migration_jobs_by_range_at_intervals(Route, 'ProcessRoutes', 1.minute)
+ #
+ # Where the model_class includes EachBatch, and the background migration exists:
+ #
+ # class Gitlab::BackgroundMigration::ProcessRoutes
+ # def perform(start_id, end_id)
+ # # do something
+ # end
+ # end
+ def queue_background_migration_jobs_by_range_at_intervals(model_class, job_class_name, delay_interval, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE, other_job_arguments: [], initial_delay: 0, track_jobs: false)
+ raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id')
+
+ # To not overload the worker too much we enforce a minimum interval both
+ # when scheduling and performing jobs.
+ if delay_interval < BackgroundMigrationWorker.minimum_interval
+ delay_interval = BackgroundMigrationWorker.minimum_interval
+ end
+
+ final_delay = 0
+
+ model_class.each_batch(of: batch_size) do |relation, index|
+ start_id, end_id = relation.pluck(Arel.sql('MIN(id), MAX(id)')).first
+
+ # `BackgroundMigrationWorker.bulk_perform_in` schedules all jobs for
+ # the same time, which is not helpful in most cases where we wish to
+ # spread the work over time.
+ final_delay = initial_delay + delay_interval * index
+ full_job_arguments = [start_id, end_id] + other_job_arguments
+
+ track_in_database(job_class_name, full_job_arguments) if track_jobs
+ migrate_in(final_delay, job_class_name, full_job_arguments)
+ end
+
+ final_delay
+ end
+
+ def perform_background_migration_inline?
+ Rails.env.test? || Rails.env.development?
+ end
+
+ def migrate_async(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.perform_async(*args)
+ end
+ end
+
+ def migrate_in(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.perform_in(*args)
+ end
+ end
+
+ def bulk_migrate_in(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.bulk_perform_in(*args)
+ end
+ end
+
+ def bulk_migrate_async(*args)
+ with_migration_context do
+ BackgroundMigrationWorker.bulk_perform_async(*args)
+ end
+ end
+
+ def with_migration_context(&block)
+ Gitlab::ApplicationContext.with_context(caller_id: self.class.to_s, &block)
+ end
+
+ private
+
+ def track_in_database(class_name, arguments)
+ Gitlab::Database::BackgroundMigrationJob.create!(class_name: class_name, arguments: arguments)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning/monthly_strategy.rb b/lib/gitlab/database/partitioning/monthly_strategy.rb
new file mode 100644
index 00000000000..ecc05d9654a
--- /dev/null
+++ b/lib/gitlab/database/partitioning/monthly_strategy.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Partitioning
+ class MonthlyStrategy
+ attr_reader :model, :partitioning_key
+
+ # We create this many partitions in the future
+ HEADROOM = 6.months
+
+ delegate :table_name, to: :model
+
+ def initialize(model, partitioning_key)
+ @model = model
+ @partitioning_key = partitioning_key
+ end
+
+ def current_partitions
+ result = connection.select_all(<<~SQL)
+ select
+ pg_class.relname,
+ parent_class.relname as base_table,
+ pg_get_expr(pg_class.relpartbound, inhrelid) as condition
+ from pg_class
+ inner join pg_inherits i on pg_class.oid = inhrelid
+ inner join pg_class parent_class on parent_class.oid = inhparent
+ inner join pg_namespace ON pg_namespace.oid = pg_class.relnamespace
+ where pg_namespace.nspname = #{connection.quote(Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)}
+ and parent_class.relname = #{connection.quote(table_name)}
+ and pg_class.relispartition
+ order by pg_class.relname
+ SQL
+
+ result.map do |record|
+ TimePartition.from_sql(table_name, record['relname'], record['condition'])
+ end
+ end
+
+ # Check the currently existing partitions and determine which ones are missing
+ def missing_partitions
+ desired_partitions - current_partitions
+ end
+
+ private
+
+ def desired_partitions
+ [].tap do |parts|
+ min_date, max_date = relevant_range
+
+ parts << partition_for(upper_bound: min_date)
+
+ while min_date < max_date
+ next_date = min_date.next_month
+
+ parts << partition_for(lower_bound: min_date, upper_bound: next_date)
+
+ min_date = next_date
+ end
+ end
+ end
+
+ # This determines the relevant time range for which we expect to have data
+ # (and therefore need to create partitions for).
+ #
+ # Note: We typically expect the first partition to be half-unbounded, i.e.
+ # to start from MINVALUE to a specific date `x`. The range returned
+ # does not include the range of the first, half-unbounded partition.
+ def relevant_range
+ if first_partition = current_partitions.min
+ # Case 1: First partition starts with MINVALUE, i.e. from is nil -> start with first real partition
+ # Case 2: Rather unexpectedly, first partition does not start with MINVALUE, i.e. from is not nil
+ # In this case, use first partition beginning as a start
+ min_date = first_partition.from || first_partition.to
+ end
+
+ # In case we don't have a partition yet
+ min_date ||= Date.today
+ min_date = min_date.beginning_of_month
+
+ max_date = Date.today.end_of_month + HEADROOM
+
+ [min_date, max_date]
+ end
+
+ def partition_for(lower_bound: nil, upper_bound:)
+ TimePartition.new(table_name, lower_bound, upper_bound)
+ end
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning/partition_creator.rb b/lib/gitlab/database/partitioning/partition_creator.rb
new file mode 100644
index 00000000000..348dd1ba660
--- /dev/null
+++ b/lib/gitlab/database/partitioning/partition_creator.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Partitioning
+ class PartitionCreator
+ def self.register(model)
+ raise ArgumentError, "Only models with a #partitioning_strategy can be registered." unless model.respond_to?(:partitioning_strategy)
+
+ models << model
+ end
+
+ def self.models
+ @models ||= Set.new
+ end
+
+ LEASE_TIMEOUT = 1.minute
+ LEASE_KEY = 'database_partition_creation_%s'
+
+ attr_reader :models
+
+ def initialize(models = self.class.models)
+ @models = models
+ end
+
+ def create_partitions
+ return unless Feature.enabled?(:postgres_dynamic_partition_creation, default_enabled: true)
+
+ models.each do |model|
+ # Double-checking before getting the lease:
+ # The prevailing situation is no missing partitions
+ next if missing_partitions(model).empty?
+
+ only_with_exclusive_lease(model) do
+ partitions_to_create = missing_partitions(model)
+
+ next if partitions_to_create.empty?
+
+ create(model, partitions_to_create)
+ end
+ rescue => e
+ Gitlab::AppLogger.error("Failed to create partition(s) for #{model.table_name}: #{e.class}: #{e.message}")
+ end
+ end
+
+ private
+
+ def missing_partitions(model)
+ return [] unless connection.table_exists?(model.table_name)
+
+ model.partitioning_strategy.missing_partitions
+ end
+
+ def only_with_exclusive_lease(model)
+ lease = Gitlab::ExclusiveLease.new(LEASE_KEY % model.table_name, timeout: LEASE_TIMEOUT)
+
+ yield if lease.try_obtain
+ ensure
+ lease&.cancel
+ end
+
+ def create(model, partitions)
+ connection.transaction do
+ with_lock_retries do
+ partitions.each do |partition|
+ connection.execute partition.to_sql
+
+ Gitlab::AppLogger.info("Created partition #{partition.partition_name} for table #{partition.table}")
+ end
+ end
+ end
+ end
+
+ def with_lock_retries(&block)
+ Gitlab::Database::WithLockRetries.new({
+ klass: self.class,
+ logger: Gitlab::AppLogger
+ }).run(&block)
+ end
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning/time_partition.rb b/lib/gitlab/database/partitioning/time_partition.rb
new file mode 100644
index 00000000000..7dca60c0854
--- /dev/null
+++ b/lib/gitlab/database/partitioning/time_partition.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module Partitioning
+ class TimePartition
+ include Comparable
+
+ def self.from_sql(table, partition_name, definition)
+ matches = definition.match(/FOR VALUES FROM \('?(?<from>.+)'?\) TO \('?(?<to>.+)'?\)/)
+
+ raise ArgumentError, "Unknown partition definition: #{definition}" unless matches
+
+ raise NotImplementedError, "Open-end time partitions with MAXVALUE are not supported yet" if matches[:to] == 'MAXVALUE'
+
+ from = matches[:from] == 'MINVALUE' ? nil : matches[:from]
+ to = matches[:to]
+
+ new(table, from, to, partition_name: partition_name)
+ end
+
+ attr_reader :table, :from, :to
+
+ def initialize(table, from, to, partition_name: nil)
+ @table = table.to_s
+ @from = date_or_nil(from)
+ @to = date_or_nil(to)
+ @partition_name = partition_name
+ end
+
+ def partition_name
+ return @partition_name if @partition_name
+
+ suffix = from&.strftime('%Y%m') || '000000'
+
+ "#{table}_#{suffix}"
+ end
+
+ def to_sql
+ from_sql = from ? conn.quote(from.strftime('%Y-%m-%d')) : 'MINVALUE'
+ to_sql = conn.quote(to.strftime('%Y-%m-%d'))
+
+ <<~SQL
+ CREATE TABLE IF NOT EXISTS #{fully_qualified_partition}
+ PARTITION OF #{conn.quote_table_name(table)}
+ FOR VALUES FROM (#{from_sql}) TO (#{to_sql})
+ SQL
+ end
+
+ def ==(other)
+ table == other.table && partition_name == other.partition_name && from == other.from && to == other.to
+ end
+ alias_method :eql?, :==
+
+ def hash
+ [table, partition_name, from, to].hash
+ end
+
+ def <=>(other)
+ return if table != other.table
+
+ partition_name <=> other.partition_name
+ end
+
+ private
+
+ def date_or_nil(obj)
+ return unless obj
+ return obj if obj.is_a?(Date)
+
+ Date.parse(obj)
+ end
+
+ def fully_qualified_partition
+ "%s.%s" % [conn.quote_table_name(Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA), conn.quote_table_name(partition_name)]
+ end
+
+ def conn
+ @conn ||= ActiveRecord::Base.connection
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb b/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb
new file mode 100644
index 00000000000..f9ad1e60776
--- /dev/null
+++ b/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module PartitioningMigrationHelpers
+ # Class that will generically copy data from a given table into its corresponding partitioned table
+ class BackfillPartitionedTable
+ include ::Gitlab::Database::DynamicModelHelpers
+
+ SUB_BATCH_SIZE = 2_500
+ PAUSE_SECONDS = 0.25
+
+ def perform(start_id, stop_id, source_table, partitioned_table, source_column)
+ return unless Feature.enabled?(:backfill_partitioned_audit_events, default_enabled: true)
+
+ if transaction_open?
+ raise "Aborting job to backfill partitioned #{source_table} table! Do not run this job in a transaction block!"
+ end
+
+ unless table_exists?(partitioned_table)
+ logger.warn "exiting backfill migration because partitioned table #{partitioned_table} does not exist. " \
+ "This could be due to the migration being rolled back after migration jobs were enqueued in sidekiq"
+ return
+ end
+
+ bulk_copy = BulkCopy.new(source_table, partitioned_table, source_column)
+ parent_batch_relation = relation_scoped_to_range(source_table, source_column, start_id, stop_id)
+
+ parent_batch_relation.each_batch(of: SUB_BATCH_SIZE) do |sub_batch|
+ sub_start_id, sub_stop_id = sub_batch.pluck(Arel.sql("MIN(#{source_column}), MAX(#{source_column})")).first
+
+ bulk_copy.copy_between(sub_start_id, sub_stop_id)
+ sleep(PAUSE_SECONDS)
+ end
+
+ mark_jobs_as_succeeded(start_id, stop_id, source_table, partitioned_table, source_column)
+ end
+
+ private
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+
+ def transaction_open?
+ connection.transaction_open?
+ end
+
+ def table_exists?(table)
+ connection.table_exists?(table)
+ end
+
+ def logger
+ @logger ||= ::Gitlab::BackgroundMigration::Logger.build
+ end
+
+ def relation_scoped_to_range(source_table, source_key_column, start_id, stop_id)
+ define_batchable_model(source_table).where(source_key_column => start_id..stop_id)
+ end
+
+ def mark_jobs_as_succeeded(*arguments)
+ BackgroundMigrationJob.mark_all_as_succeeded(self.class.name, arguments)
+ end
+
+ # Helper class to copy data between two tables via upserts
+ class BulkCopy
+ DELIMITER = ', '
+
+ attr_reader :source_table, :destination_table, :source_column
+
+ def initialize(source_table, destination_table, source_column)
+ @source_table = source_table
+ @destination_table = destination_table
+ @source_column = source_column
+ end
+
+ def copy_between(start_id, stop_id)
+ connection.execute(<<~SQL)
+ INSERT INTO #{destination_table} (#{column_listing})
+ SELECT #{column_listing}
+ FROM #{source_table}
+ WHERE #{source_column} BETWEEN #{start_id} AND #{stop_id}
+ FOR UPDATE
+ ON CONFLICT (#{conflict_targets}) DO NOTHING
+ SQL
+ end
+
+ private
+
+ def connection
+ @connection ||= ActiveRecord::Base.connection
+ end
+
+ def column_listing
+ @column_listing ||= connection.columns(source_table).map(&:name).join(DELIMITER)
+ end
+
+ def conflict_targets
+ connection.primary_key(destination_table).join(DELIMITER)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers.rb b/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers.rb
index 9e687009cd7..1fb9476b7d9 100644
--- a/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers.rb
+++ b/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers.rb
@@ -99,7 +99,7 @@ module Gitlab
drop_function(fn_name, if_exists: true)
else
create_or_replace_fk_function(fn_name, final_keys)
- create_trigger(trigger_name, fn_name, fires: "AFTER DELETE ON #{to_table}")
+ create_trigger(to_table, trigger_name, fn_name, fires: 'AFTER DELETE')
end
end
end
diff --git a/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb b/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
index f77fbe98df1..b676767f41d 100644
--- a/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
+++ b/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers.rb
@@ -5,10 +5,16 @@ module Gitlab
module PartitioningMigrationHelpers
module TableManagementHelpers
include ::Gitlab::Database::SchemaHelpers
+ include ::Gitlab::Database::DynamicModelHelpers
+ include ::Gitlab::Database::Migrations::BackgroundMigrationHelpers
- WHITELISTED_TABLES = %w[audit_events].freeze
+ ALLOWED_TABLES = %w[audit_events].freeze
ERROR_SCOPE = 'table partitioning'
+ MIGRATION_CLASS_NAME = "::#{module_parent_name}::BackfillPartitionedTable"
+ BATCH_INTERVAL = 2.minutes.freeze
+ BATCH_SIZE = 50_000
+
# Creates a partitioned copy of an existing table, using a RANGE partitioning strategy on a timestamp column.
# One partition is created per month between the given `min_date` and `max_date`.
#
@@ -18,14 +24,25 @@ module Gitlab
#
# partition_table_by_date :audit_events, :created_at, min_date: Date.new(2020, 1), max_date: Date.new(2020, 6)
#
- # Required options are:
+ # Options are:
# :min_date - a date specifying the lower bounds of the partition range
- # :max_date - a date specifying the upper bounds of the partitioning range
+ # :max_date - a date specifying the upper bounds of the partitioning range, defaults to today + 1 month
#
- def partition_table_by_date(table_name, column_name, min_date:, max_date:)
- assert_table_is_whitelisted(table_name)
+ # Unless min_date is specified explicitly, we default to
+ # 1. The minimum value for the partitioning column in the table
+ # 2. If no data is present yet, the current month
+ def partition_table_by_date(table_name, column_name, min_date: nil, max_date: nil)
+ assert_table_is_allowed(table_name)
+
assert_not_in_transaction_block(scope: ERROR_SCOPE)
+ max_date ||= Date.today + 1.month
+
+ min_date ||= connection.select_one(<<~SQL)['minimum'] || max_date - 1.month
+ SELECT date_trunc('MONTH', MIN(#{column_name})) AS minimum
+ FROM #{table_name}
+ SQL
+
raise "max_date #{max_date} must be greater than min_date #{min_date}" if min_date >= max_date
primary_key = connection.primary_key(table_name)
@@ -34,10 +51,12 @@ module Gitlab
partition_column = find_column_definition(table_name, column_name)
raise "partition column #{column_name} does not exist on #{table_name}" if partition_column.nil?
- new_table_name = partitioned_table_name(table_name)
- create_range_partitioned_copy(new_table_name, table_name, partition_column, primary_key)
- create_daterange_partitions(new_table_name, partition_column.name, min_date, max_date)
- create_sync_trigger(table_name, new_table_name, primary_key)
+ partitioned_table_name = make_partitioned_table_name(table_name)
+
+ create_range_partitioned_copy(table_name, partitioned_table_name, partition_column, primary_key)
+ create_daterange_partitions(partitioned_table_name, partition_column.name, min_date, max_date)
+ create_trigger_to_sync_tables(table_name, partitioned_table_name, primary_key)
+ enqueue_background_migration(table_name, partitioned_table_name, primary_key)
end
# Clean up a partitioned copy of an existing table. This deletes the partitioned table and all partitions.
@@ -47,39 +66,58 @@ module Gitlab
# drop_partitioned_table_for :audit_events
#
def drop_partitioned_table_for(table_name)
- assert_table_is_whitelisted(table_name)
+ assert_table_is_allowed(table_name)
assert_not_in_transaction_block(scope: ERROR_SCOPE)
+ cleanup_migration_jobs(table_name)
+
with_lock_retries do
- trigger_name = sync_trigger_name(table_name)
+ trigger_name = make_sync_trigger_name(table_name)
drop_trigger(table_name, trigger_name)
end
- function_name = sync_function_name(table_name)
+ function_name = make_sync_function_name(table_name)
drop_function(function_name)
- part_table_name = partitioned_table_name(table_name)
- drop_table(part_table_name)
+ partitioned_table_name = make_partitioned_table_name(table_name)
+ drop_table(partitioned_table_name)
+ end
+
+ def create_hash_partitions(table_name, number_of_partitions)
+ transaction do
+ (0..number_of_partitions - 1).each do |partition|
+ decimals = Math.log10(number_of_partitions).ceil
+ suffix = "%0#{decimals}d" % partition
+ partition_name = "#{table_name}_#{suffix}"
+ schema = Gitlab::Database::STATIC_PARTITIONS_SCHEMA
+
+ execute(<<~SQL)
+ CREATE TABLE #{schema}.#{partition_name}
+ PARTITION OF #{table_name}
+ FOR VALUES WITH (MODULUS #{number_of_partitions}, REMAINDER #{partition});
+ SQL
+ end
+ end
end
private
- def assert_table_is_whitelisted(table_name)
- return if WHITELISTED_TABLES.include?(table_name.to_s)
+ def assert_table_is_allowed(table_name)
+ return if ALLOWED_TABLES.include?(table_name.to_s)
- raise "partitioning helpers are in active development, and #{table_name} is not whitelisted for use, " \
+ raise "partitioning helpers are in active development, and #{table_name} is not allowed for use, " \
"for more information please contact the database team"
end
- def partitioned_table_name(table)
+ def make_partitioned_table_name(table)
tmp_table_name("#{table}_part")
end
- def sync_function_name(table)
+ def make_sync_function_name(table)
object_name(table, 'table_sync_function')
end
- def sync_trigger_name(table)
+ def make_sync_trigger_name(table)
object_name(table, 'table_sync_trigger')
end
@@ -87,11 +125,11 @@ module Gitlab
connection.columns(table).find { |c| c.name == column.to_s }
end
- def create_range_partitioned_copy(table_name, template_table_name, partition_column, primary_key)
- if table_exists?(table_name)
+ def create_range_partitioned_copy(source_table_name, partitioned_table_name, partition_column, primary_key)
+ if table_exists?(partitioned_table_name)
# rubocop:disable Gitlab/RailsLogger
Rails.logger.warn "Partitioned table not created because it already exists" \
- " (this may be due to an aborted migration or similar): table_name: #{table_name} "
+ " (this may be due to an aborted migration or similar): table_name: #{partitioned_table_name} "
# rubocop:enable Gitlab/RailsLogger
return
end
@@ -99,20 +137,20 @@ module Gitlab
tmp_column_name = object_name(partition_column.name, 'partition_key')
transaction do
execute(<<~SQL)
- CREATE TABLE #{table_name} (
- LIKE #{template_table_name} INCLUDING ALL EXCLUDING INDEXES,
+ CREATE TABLE #{partitioned_table_name} (
+ LIKE #{source_table_name} INCLUDING ALL EXCLUDING INDEXES,
#{tmp_column_name} #{partition_column.sql_type} NOT NULL,
PRIMARY KEY (#{[primary_key, tmp_column_name].join(", ")})
) PARTITION BY RANGE (#{tmp_column_name})
SQL
- remove_column(table_name, partition_column.name)
- rename_column(table_name, tmp_column_name, partition_column.name)
- change_column_default(table_name, primary_key, nil)
+ remove_column(partitioned_table_name, partition_column.name)
+ rename_column(partitioned_table_name, tmp_column_name, partition_column.name)
+ change_column_default(partitioned_table_name, primary_key, nil)
- if column_of_type?(table_name, primary_key, :integer)
+ if column_of_type?(partitioned_table_name, primary_key, :integer)
# Default to int8 primary keys to prevent overflow
- change_column(table_name, primary_key, :bigint)
+ change_column(partitioned_table_name, primary_key, :bigint)
end
end
end
@@ -125,7 +163,8 @@ module Gitlab
min_date = min_date.beginning_of_month.to_date
max_date = max_date.next_month.beginning_of_month.to_date
- create_range_partition_safely("#{table_name}_000000", table_name, 'MINVALUE', to_sql_date_literal(min_date))
+ upper_bound = to_sql_date_literal(min_date)
+ create_range_partition_safely("#{table_name}_000000", table_name, 'MINVALUE', upper_bound)
while min_date < max_date
partition_name = "#{table_name}_#{min_date.strftime('%Y%m')}"
@@ -143,7 +182,7 @@ module Gitlab
end
def create_range_partition_safely(partition_name, table_name, lower_bound, upper_bound)
- if table_exists?(partition_name)
+ if table_exists?(table_for_range_partition(partition_name))
# rubocop:disable Gitlab/RailsLogger
Rails.logger.warn "Partition not created because it already exists" \
" (this may be due to an aborted migration or similar): partition_name: #{partition_name}"
@@ -154,34 +193,42 @@ module Gitlab
create_range_partition(partition_name, table_name, lower_bound, upper_bound)
end
- def create_sync_trigger(source_table, target_table, unique_key)
- function_name = sync_function_name(source_table)
- trigger_name = sync_trigger_name(source_table)
+ def create_trigger_to_sync_tables(source_table_name, partitioned_table_name, unique_key)
+ function_name = make_sync_function_name(source_table_name)
+ trigger_name = make_sync_trigger_name(source_table_name)
with_lock_retries do
- create_sync_function(function_name, target_table, unique_key)
- create_comment('FUNCTION', function_name, "Partitioning migration: table sync for #{source_table} table")
+ create_sync_function(function_name, partitioned_table_name, unique_key)
+ create_comment('FUNCTION', function_name, "Partitioning migration: table sync for #{source_table_name} table")
- create_trigger(trigger_name, function_name, fires: "AFTER INSERT OR UPDATE OR DELETE ON #{source_table}")
+ create_sync_trigger(source_table_name, trigger_name, function_name)
end
end
- def create_sync_function(name, target_table, unique_key)
+ def create_sync_function(name, partitioned_table_name, unique_key)
+ if function_exists?(name)
+ # rubocop:disable Gitlab/RailsLogger
+ Rails.logger.warn "Partitioning sync function not created because it already exists" \
+ " (this may be due to an aborted migration or similar): function name: #{name}"
+ # rubocop:enable Gitlab/RailsLogger
+ return
+ end
+
delimiter = ",\n "
- column_names = connection.columns(target_table).map(&:name)
+ column_names = connection.columns(partitioned_table_name).map(&:name)
set_statements = build_set_statements(column_names, unique_key)
insert_values = column_names.map { |name| "NEW.#{name}" }
create_trigger_function(name, replace: false) do
<<~SQL
IF (TG_OP = 'DELETE') THEN
- DELETE FROM #{target_table} where #{unique_key} = OLD.#{unique_key};
+ DELETE FROM #{partitioned_table_name} where #{unique_key} = OLD.#{unique_key};
ELSIF (TG_OP = 'UPDATE') THEN
- UPDATE #{target_table}
+ UPDATE #{partitioned_table_name}
SET #{set_statements.join(delimiter)}
- WHERE #{target_table}.#{unique_key} = NEW.#{unique_key};
+ WHERE #{partitioned_table_name}.#{unique_key} = NEW.#{unique_key};
ELSIF (TG_OP = 'INSERT') THEN
- INSERT INTO #{target_table} (#{column_names.join(delimiter)})
+ INSERT INTO #{partitioned_table_name} (#{column_names.join(delimiter)})
VALUES (#{insert_values.join(delimiter)});
END IF;
RETURN NULL;
@@ -190,7 +237,35 @@ module Gitlab
end
def build_set_statements(column_names, unique_key)
- column_names.reject { |name| name == unique_key }.map { |column_name| "#{column_name} = NEW.#{column_name}" }
+ column_names.reject { |name| name == unique_key }.map { |name| "#{name} = NEW.#{name}" }
+ end
+
+ def create_sync_trigger(table_name, trigger_name, function_name)
+ if trigger_exists?(table_name, trigger_name)
+ # rubocop:disable Gitlab/RailsLogger
+ Rails.logger.warn "Partitioning sync trigger not created because it already exists" \
+ " (this may be due to an aborted migration or similar): trigger name: #{trigger_name}"
+ # rubocop:enable Gitlab/RailsLogger
+ return
+ end
+
+ create_trigger(table_name, trigger_name, function_name, fires: 'AFTER INSERT OR UPDATE OR DELETE')
+ end
+
+ def enqueue_background_migration(source_table_name, partitioned_table_name, source_key)
+ source_model = define_batchable_model(source_table_name)
+
+ queue_background_migration_jobs_by_range_at_intervals(
+ source_model,
+ MIGRATION_CLASS_NAME,
+ BATCH_INTERVAL,
+ batch_size: BATCH_SIZE,
+ other_job_arguments: [source_table_name.to_s, partitioned_table_name, source_key],
+ track_jobs: true)
+ end
+
+ def cleanup_migration_jobs(table_name)
+ ::Gitlab::Database::BackgroundMigrationJob.for_partitioning_migration(MIGRATION_CLASS_NAME, table_name).delete_all
end
end
end
diff --git a/lib/gitlab/database/schema_helpers.rb b/lib/gitlab/database/schema_helpers.rb
index 8e544307d81..34daafd06de 100644
--- a/lib/gitlab/database/schema_helpers.rb
+++ b/lib/gitlab/database/schema_helpers.rb
@@ -16,15 +16,30 @@ module Gitlab
SQL
end
- def create_trigger(name, function_name, fires: nil)
+ def function_exists?(name)
+ connection.select_value("SELECT 1 FROM pg_proc WHERE proname = '#{name}'")
+ end
+
+ def create_trigger(table_name, name, function_name, fires:)
execute(<<~SQL)
CREATE TRIGGER #{name}
- #{fires}
+ #{fires} ON #{table_name}
FOR EACH ROW
EXECUTE PROCEDURE #{function_name}()
SQL
end
+ def trigger_exists?(table_name, name)
+ connection.select_value(<<~SQL)
+ SELECT 1
+ FROM pg_trigger
+ INNER JOIN pg_class
+ ON pg_trigger.tgrelid = pg_class.oid
+ WHERE pg_class.relname = '#{table_name}'
+ AND pg_trigger.tgname = '#{name}'
+ SQL
+ end
+
def drop_function(name, if_exists: true)
exists_clause = optional_clause(if_exists, "IF EXISTS")
execute("DROP FUNCTION #{exists_clause} #{name}()")
@@ -69,9 +84,13 @@ module Gitlab
private
+ def table_for_range_partition(partition_name)
+ "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{partition_name}"
+ end
+
def create_range_partition(partition_name, table_name, lower_bound, upper_bound)
execute(<<~SQL)
- CREATE TABLE #{partition_name} PARTITION OF #{table_name}
+ CREATE TABLE #{table_for_range_partition(partition_name)} PARTITION OF #{table_name}
FOR VALUES FROM (#{lower_bound}) TO (#{upper_bound})
SQL
end
diff --git a/lib/gitlab/diff/file.rb b/lib/gitlab/diff/file.rb
index 72dcc4fde71..dcd4bbdabf5 100644
--- a/lib/gitlab/diff/file.rb
+++ b/lib/gitlab/diff/file.rb
@@ -230,11 +230,15 @@ module Gitlab
end
def added_lines
- @stats&.additions || diff_lines.count(&:added?)
+ strong_memoize(:added_lines) do
+ @stats&.additions || diff_lines.count(&:added?)
+ end
end
def removed_lines
- @stats&.deletions || diff_lines.count(&:removed?)
+ strong_memoize(:removed_lines) do
+ @stats&.deletions || diff_lines.count(&:removed?)
+ end
end
def file_identifier
diff --git a/lib/gitlab/diff/file_collection/base.rb b/lib/gitlab/diff/file_collection/base.rb
index 38b636e4e5a..cf0611e44da 100644
--- a/lib/gitlab/diff/file_collection/base.rb
+++ b/lib/gitlab/diff/file_collection/base.rb
@@ -60,12 +60,20 @@ module Gitlab
end
end
- def diff_file_with_old_path(old_path)
- diff_files.find { |diff_file| diff_file.old_path == old_path }
+ def diff_file_with_old_path(old_path, a_mode = nil)
+ if Feature.enabled?(:file_identifier_hash) && a_mode.present?
+ diff_files.find { |diff_file| diff_file.old_path == old_path && diff_file.a_mode == a_mode }
+ else
+ diff_files.find { |diff_file| diff_file.old_path == old_path }
+ end
end
- def diff_file_with_new_path(new_path)
- diff_files.find { |diff_file| diff_file.new_path == new_path }
+ def diff_file_with_new_path(new_path, b_mode = nil)
+ if Feature.enabled?(:file_identifier_hash) && b_mode.present?
+ diff_files.find { |diff_file| diff_file.new_path == new_path && diff_file.b_mode == b_mode }
+ else
+ diff_files.find { |diff_file| diff_file.new_path == new_path }
+ end
end
def clear_cache
@@ -80,15 +88,18 @@ module Gitlab
def diff_stats_collection
strong_memoize(:diff_stats) do
- # There are scenarios where we don't need to request Diff Stats,
- # when caching for instance.
- next unless @include_stats
- next unless diff_refs
+ next unless fetch_diff_stats?
@repository.diff_stats(diff_refs.base_sha, diff_refs.head_sha)
end
end
+ def fetch_diff_stats?
+ # There are scenarios where we don't need to request Diff Stats,
+ # when caching for instance.
+ @include_stats && diff_refs
+ end
+
def decorate_diff!(diff)
return diff if diff.is_a?(File)
diff --git a/lib/gitlab/diff/file_collection/merge_request_diff_base.rb b/lib/gitlab/diff/file_collection/merge_request_diff_base.rb
index d126fdb2be2..d54e1aad19a 100644
--- a/lib/gitlab/diff/file_collection/merge_request_diff_base.rb
+++ b/lib/gitlab/diff/file_collection/merge_request_diff_base.rb
@@ -20,7 +20,7 @@ module Gitlab
strong_memoize(:diff_files) do
diff_files = super
- diff_files.each { |diff_file| cache.decorate(diff_file) }
+ diff_files.each { |diff_file| highlight_cache.decorate(diff_file) }
diff_files
end
@@ -28,16 +28,14 @@ module Gitlab
override :write_cache
def write_cache
- cache.write_if_empty
+ highlight_cache.write_if_empty
+ diff_stats_cache.write_if_empty(diff_stats_collection)
end
override :clear_cache
def clear_cache
- cache.clear
- end
-
- def cache_key
- cache.key
+ highlight_cache.clear
+ diff_stats_cache.clear
end
def real_size
@@ -46,8 +44,25 @@ module Gitlab
private
- def cache
- @cache ||= Gitlab::Diff::HighlightCache.new(self)
+ def highlight_cache
+ strong_memoize(:highlight_cache) do
+ Gitlab::Diff::HighlightCache.new(self)
+ end
+ end
+
+ def diff_stats_cache
+ strong_memoize(:diff_stats_cache) do
+ Gitlab::Diff::StatsCache.new(cachable_key: @merge_request_diff.cache_key)
+ end
+ end
+
+ override :diff_stats_collection
+ def diff_stats_collection
+ strong_memoize(:diff_stats) do
+ next unless fetch_diff_stats?
+
+ diff_stats_cache.read || super
+ end
end
end
end
diff --git a/lib/gitlab/diff/file_collection/wiki_page.rb b/lib/gitlab/diff/file_collection/wiki_page.rb
new file mode 100644
index 00000000000..7873e85a0eb
--- /dev/null
+++ b/lib/gitlab/diff/file_collection/wiki_page.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Diff
+ module FileCollection
+ class WikiPage < Base
+ def initialize(page, diff_options:)
+ commit = page.wiki.commit(page.version.commit)
+ diff_options = diff_options.merge(
+ expanded: true,
+ paths: [page.path]
+ )
+
+ super(commit,
+ # TODO: Uncouple diffing from projects
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/217752
+ project: page.wiki,
+ diff_options: diff_options,
+ diff_refs: commit.diff_refs)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/diff/position_tracer.rb b/lib/gitlab/diff/position_tracer.rb
index a1c82ce9afc..1c21c35fa60 100644
--- a/lib/gitlab/diff/position_tracer.rb
+++ b/lib/gitlab/diff/position_tracer.rb
@@ -42,6 +42,10 @@ module Gitlab
@cd_diffs ||= compare(new_diff_refs.start_sha, new_diff_refs.head_sha)
end
+ def diff_file(position)
+ position.diff_file(project.repository)
+ end
+
private
def compare(start_sha, head_sha, straight: false)
diff --git a/lib/gitlab/diff/position_tracer/base_strategy.rb b/lib/gitlab/diff/position_tracer/base_strategy.rb
index 65049daabf4..61250bd2473 100644
--- a/lib/gitlab/diff/position_tracer/base_strategy.rb
+++ b/lib/gitlab/diff/position_tracer/base_strategy.rb
@@ -8,6 +8,7 @@ module Gitlab
delegate \
:project,
+ :diff_file,
:ac_diffs,
:bd_diffs,
:cd_diffs,
diff --git a/lib/gitlab/diff/position_tracer/image_strategy.rb b/lib/gitlab/diff/position_tracer/image_strategy.rb
index 79244a17951..046a6782dda 100644
--- a/lib/gitlab/diff/position_tracer/image_strategy.rb
+++ b/lib/gitlab/diff/position_tracer/image_strategy.rb
@@ -5,22 +5,26 @@ module Gitlab
class PositionTracer
class ImageStrategy < BaseStrategy
def trace(position)
+ a_path = position.old_path
b_path = position.new_path
+ diff_file = diff_file(position)
+ a_mode = diff_file&.a_mode
+ b_mode = diff_file&.b_mode
# If file exists in B->D (e.g. updated, renamed, removed), let the
# note become outdated.
- bd_diff = bd_diffs.diff_file_with_old_path(b_path)
+ bd_diff = bd_diffs.diff_file_with_old_path(b_path, b_mode)
return { position: new_position(position, bd_diff), outdated: true } if bd_diff
# If file still exists in the new diff, update the position.
- cd_diff = cd_diffs.diff_file_with_new_path(bd_diff&.new_path || b_path)
+ cd_diff = cd_diffs.diff_file_with_new_path(b_path, b_mode)
return { position: new_position(position, cd_diff), outdated: false } if cd_diff
# If file exists in A->C (e.g. rebased and same changes were present
# in target branch), let the note become outdated.
- ac_diff = ac_diffs.diff_file_with_old_path(position.old_path)
+ ac_diff = ac_diffs.diff_file_with_old_path(a_path, a_mode)
return { position: new_position(position, ac_diff), outdated: true } if ac_diff
diff --git a/lib/gitlab/diff/position_tracer/line_strategy.rb b/lib/gitlab/diff/position_tracer/line_strategy.rb
index 8db0fc6f963..e3c1e549b96 100644
--- a/lib/gitlab/diff/position_tracer/line_strategy.rb
+++ b/lib/gitlab/diff/position_tracer/line_strategy.rb
@@ -76,16 +76,20 @@ module Gitlab
def trace_added_line(position)
b_path = position.new_path
b_line = position.new_line
+ diff_file = diff_file(position)
+ b_mode = diff_file&.b_mode
- bd_diff = bd_diffs.diff_file_with_old_path(b_path)
+ bd_diff = bd_diffs.diff_file_with_old_path(b_path, b_mode)
d_path = bd_diff&.new_path || b_path
+ d_mode = bd_diff&.b_mode || b_mode
d_line = LineMapper.new(bd_diff).old_to_new(b_line)
if d_line
- cd_diff = cd_diffs.diff_file_with_new_path(d_path)
+ cd_diff = cd_diffs.diff_file_with_new_path(d_path, d_mode)
c_path = cd_diff&.old_path || d_path
+ c_mode = cd_diff&.a_mode || d_mode
c_line = LineMapper.new(cd_diff).new_to_old(d_line)
if c_line
@@ -98,7 +102,7 @@ module Gitlab
else
# If the line is no longer in the MR, we unfortunately cannot show
# the current state on the CD diff, so we treat it as outdated.
- ac_diff = ac_diffs.diff_file_with_new_path(c_path)
+ ac_diff = ac_diffs.diff_file_with_new_path(c_path, c_mode)
{ position: new_position(ac_diff, nil, c_line), outdated: true }
end
@@ -115,22 +119,26 @@ module Gitlab
def trace_removed_line(position)
a_path = position.old_path
a_line = position.old_line
+ diff_file = diff_file(position)
+ a_mode = diff_file&.a_mode
- ac_diff = ac_diffs.diff_file_with_old_path(a_path)
+ ac_diff = ac_diffs.diff_file_with_old_path(a_path, a_mode)
c_path = ac_diff&.new_path || a_path
+ c_mode = ac_diff&.b_mode || a_mode
c_line = LineMapper.new(ac_diff).old_to_new(a_line)
if c_line
- cd_diff = cd_diffs.diff_file_with_old_path(c_path)
+ cd_diff = cd_diffs.diff_file_with_old_path(c_path, c_mode)
d_path = cd_diff&.new_path || c_path
+ d_mode = cd_diff&.b_mode || c_mode
d_line = LineMapper.new(cd_diff).old_to_new(c_line)
if d_line
# If the line is still in C but also in D, it has turned from a
# removed line into an unchanged one.
- bd_diff = bd_diffs.diff_file_with_new_path(d_path)
+ bd_diff = bd_diffs.diff_file_with_new_path(d_path, d_mode)
{ position: new_position(bd_diff, nil, d_line), outdated: true }
else
@@ -148,17 +156,21 @@ module Gitlab
a_line = position.old_line
b_path = position.new_path
b_line = position.new_line
+ diff_file = diff_file(position)
+ a_mode = diff_file&.a_mode
+ b_mode = diff_file&.b_mode
- ac_diff = ac_diffs.diff_file_with_old_path(a_path)
+ ac_diff = ac_diffs.diff_file_with_old_path(a_path, a_mode)
c_path = ac_diff&.new_path || a_path
+ c_mode = ac_diff&.b_mode || a_mode
c_line = LineMapper.new(ac_diff).old_to_new(a_line)
- bd_diff = bd_diffs.diff_file_with_old_path(b_path)
+ bd_diff = bd_diffs.diff_file_with_old_path(b_path, b_mode)
d_line = LineMapper.new(bd_diff).old_to_new(b_line)
- cd_diff = cd_diffs.diff_file_with_old_path(c_path)
+ cd_diff = cd_diffs.diff_file_with_old_path(c_path, c_mode)
if c_line && d_line
# If the line is still in C and D, it is still unchanged.
diff --git a/lib/gitlab/diff/stats_cache.rb b/lib/gitlab/diff/stats_cache.rb
new file mode 100644
index 00000000000..f38fb21d497
--- /dev/null
+++ b/lib/gitlab/diff/stats_cache.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+#
+module Gitlab
+ module Diff
+ class StatsCache
+ include Gitlab::Metrics::Methods
+ include Gitlab::Utils::StrongMemoize
+
+ EXPIRATION = 1.week
+ VERSION = 1
+
+ def initialize(cachable_key:)
+ @cachable_key = cachable_key
+ end
+
+ def read
+ strong_memoize(:cached_values) do
+ content = cache.fetch(key)
+
+ next unless content
+
+ stats = content.map { |stat| Gitaly::DiffStats.new(stat) }
+
+ Gitlab::Git::DiffStatsCollection.new(stats)
+ end
+ end
+
+ def write_if_empty(stats)
+ return if cache.exist?(key)
+ return unless stats
+
+ cache.write(key, stats.as_json, expires_in: EXPIRATION)
+ end
+
+ def clear
+ cache.delete(key)
+ end
+
+ private
+
+ attr_reader :cachable_key
+
+ def cache
+ Rails.cache
+ end
+
+ def key
+ strong_memoize(:redis_key) do
+ ['diff_stats', cachable_key, VERSION].join(":")
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/discussions_diff/file_collection.rb b/lib/gitlab/discussions_diff/file_collection.rb
index 7a9d4c5c0c2..60b3a1738f1 100644
--- a/lib/gitlab/discussions_diff/file_collection.rb
+++ b/lib/gitlab/discussions_diff/file_collection.rb
@@ -70,8 +70,8 @@ module Gitlab
#
# Returns a Hash with { id => [Array of Gitlab::Diff::line], ...]
def highlighted_lines_by_ids(ids)
- diff_files_indexed_by_id.slice(*ids).each_with_object({}) do |(id, file), hash|
- hash[id] = file.highlighted_diff_lines.map(&:to_hash)
+ diff_files_indexed_by_id.slice(*ids).transform_values do |file|
+ file.highlighted_diff_lines.map(&:to_hash)
end
end
end
diff --git a/lib/gitlab/discussions_diff/highlight_cache.rb b/lib/gitlab/discussions_diff/highlight_cache.rb
index 75d5a5df74b..4bec6467c1a 100644
--- a/lib/gitlab/discussions_diff/highlight_cache.rb
+++ b/lib/gitlab/discussions_diff/highlight_cache.rb
@@ -36,7 +36,9 @@ module Gitlab
content =
Redis::Cache.with do |redis|
- redis.mget(keys)
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis.mget(keys)
+ end
end
content.map! do |lines|
@@ -58,7 +60,11 @@ module Gitlab
keys = raw_keys.map { |id| cache_key_for(id) }
- Redis::Cache.with { |redis| redis.del(keys) }
+ Redis::Cache.with do |redis|
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis.del(keys)
+ end
+ end
end
def cache_key_for(raw_key)
diff --git a/lib/gitlab/email/handler.rb b/lib/gitlab/email/handler.rb
index 7f8dd815103..1b8421d34f3 100644
--- a/lib/gitlab/email/handler.rb
+++ b/lib/gitlab/email/handler.rb
@@ -12,7 +12,8 @@ module Gitlab
CreateNoteHandler,
CreateIssueHandler,
UnsubscribeHandler,
- CreateMergeRequestHandler
+ CreateMergeRequestHandler,
+ ServiceDeskHandler
]
end
@@ -25,5 +26,3 @@ module Gitlab
end
end
end
-
-Gitlab::Email::Handler.prepend_if_ee('::EE::Gitlab::Email::Handler')
diff --git a/lib/gitlab/email/handler/reply_processing.rb b/lib/gitlab/email/handler/reply_processing.rb
index 312a9fdfbae..1beea4f9054 100644
--- a/lib/gitlab/email/handler/reply_processing.rb
+++ b/lib/gitlab/email/handler/reply_processing.rb
@@ -37,7 +37,11 @@ module Gitlab
def process_message(**kwargs)
message = ReplyParser.new(mail, **kwargs).execute.strip
- add_attachments(message)
+ message_with_attachments = add_attachments(message)
+
+ # Support bot is specifically forbidden
+ # from using slash commands.
+ strip_quick_actions(message_with_attachments)
end
def add_attachments(reply)
@@ -82,6 +86,15 @@ module Gitlab
def valid_project_slug?(found_project)
project_slug == found_project.full_path_slug
end
+
+ def strip_quick_actions(content)
+ return content unless author.support_bot?
+
+ command_definitions = ::QuickActions::InterpretService.command_definitions
+ extractor = ::Gitlab::QuickActions::Extractor.new(command_definitions)
+
+ extractor.redact_commands(content)
+ end
end
end
end
diff --git a/lib/gitlab/email/handler/service_desk_handler.rb b/lib/gitlab/email/handler/service_desk_handler.rb
new file mode 100644
index 00000000000..bcd8b98a06f
--- /dev/null
+++ b/lib/gitlab/email/handler/service_desk_handler.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+# handles service desk issue creation emails with these formats:
+# incoming+gitlab-org-gitlab-ce-20-issue-@incoming.gitlab.com
+# incoming+gitlab-org/gitlab-ce@incoming.gitlab.com (legacy)
+module Gitlab
+ module Email
+ module Handler
+ class ServiceDeskHandler < BaseHandler
+ include ReplyProcessing
+ include Gitlab::Utils::StrongMemoize
+
+ HANDLER_REGEX = /\A#{HANDLER_ACTION_BASE_REGEX}-issue-\z/.freeze
+ HANDLER_REGEX_LEGACY = /\A(?<project_path>[^\+]*)\z/.freeze
+ PROJECT_KEY_PATTERN = /\A(?<slug>.+)-(?<key>[a-z0-9_]+)\z/.freeze
+
+ def initialize(mail, mail_key, service_desk_key: nil)
+ super(mail, mail_key)
+
+ if service_desk_key.present?
+ @service_desk_key = service_desk_key
+ elsif !mail_key&.include?('/') && (matched = HANDLER_REGEX.match(mail_key.to_s))
+ @project_slug = matched[:project_slug]
+ @project_id = matched[:project_id]&.to_i
+ elsif matched = HANDLER_REGEX_LEGACY.match(mail_key.to_s)
+ @project_path = matched[:project_path]
+ end
+ end
+
+ def can_handle?
+ Gitlab::ServiceDesk.supported? && (project_id || can_handle_legacy_format? || service_desk_key)
+ end
+
+ def execute
+ raise ProjectNotFound if project.nil?
+
+ create_issue!
+ send_thank_you_email! if from_address
+ end
+
+ def metrics_params
+ super.merge(project: project&.full_path)
+ end
+
+ def metrics_event
+ :receive_email_service_desk
+ end
+
+ private
+
+ attr_reader :project_id, :project_path, :service_desk_key
+
+ def project
+ strong_memoize(:project) do
+ @project = service_desk_key ? project_from_key : super
+ @project = nil unless @project&.service_desk_enabled?
+ @project
+ end
+ end
+
+ def project_from_key
+ return unless match = service_desk_key.match(PROJECT_KEY_PATTERN)
+
+ project = Project.find_by_service_desk_project_key(match[:key])
+ return unless valid_project_key?(project, match[:slug])
+
+ project
+ end
+
+ def valid_project_key?(project, slug)
+ project.present? && slug == project.full_path_slug && Feature.enabled?(:service_desk_custom_address, project)
+ end
+
+ def create_issue!
+ @issue = Issues::CreateService.new(
+ project,
+ User.support_bot,
+ title: issue_title,
+ description: message_including_template,
+ confidential: true,
+ service_desk_reply_to: from_address
+ ).execute
+
+ raise InvalidIssueError unless @issue.persisted?
+
+ if service_desk_setting&.issue_template_missing?
+ create_template_not_found_note(@issue)
+ end
+ end
+
+ def send_thank_you_email!
+ Notify.service_desk_thank_you_email(@issue.id).deliver_later!
+ end
+
+ def message_including_template
+ description = message_including_reply
+ template_content = service_desk_setting&.issue_template_content
+
+ if template_content.present?
+ description += " \n" + template_content
+ end
+
+ description
+ end
+
+ def service_desk_setting
+ strong_memoize(:service_desk_setting) do
+ project.service_desk_setting
+ end
+ end
+
+ def create_template_not_found_note(issue)
+ issue_template_key = service_desk_setting&.issue_template_key
+
+ warning_note = <<-MD.strip_heredoc
+ WARNING: The template file #{issue_template_key}.md used for service desk issues is empty or could not be found.
+ Please check service desk settings and update the file to be used.
+ MD
+
+ note_params = {
+ noteable: issue,
+ note: warning_note
+ }
+
+ ::Notes::CreateService.new(
+ project,
+ User.support_bot,
+ note_params
+ ).execute
+ end
+
+ def from_address
+ (mail.reply_to || []).first || mail.from.first || mail.sender
+ end
+
+ def issue_title
+ from = "(from #{from_address})" if from_address
+
+ "Service Desk #{from}: #{mail.subject}"
+ end
+
+ def can_handle_legacy_format?
+ project_path && project_path.include?('/') && !mail_key.include?('+')
+ end
+
+ def author
+ User.support_bot
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/email/service_desk_receiver.rb b/lib/gitlab/email/service_desk_receiver.rb
new file mode 100644
index 00000000000..1ee5c10097b
--- /dev/null
+++ b/lib/gitlab/email/service_desk_receiver.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Email
+ class ServiceDeskReceiver < Receiver
+ private
+
+ def find_handler(mail)
+ key = service_desk_key(mail)
+ return unless key
+
+ Gitlab::Email::Handler::ServiceDeskHandler.new(mail, nil, service_desk_key: key)
+ end
+
+ def service_desk_key(mail)
+ mail.to.find do |address|
+ key = ::Gitlab::ServiceDeskEmail.key_from_address(address)
+ break key if key
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/emoji.rb b/lib/gitlab/emoji.rb
index bcf92b35720..cab21d875ab 100644
--- a/lib/gitlab/emoji.rb
+++ b/lib/gitlab/emoji.rb
@@ -44,6 +44,10 @@ module Gitlab
"<img class='emoji' title=':#{name}:' alt=':#{name}:' src='#{src}' height='20' width='20' align='absmiddle' />"
end
+ def emoji_exists?(name)
+ emojis.has_key?(name)
+ end
+
# CSS sprite fallback takes precedence over image fallback
def gl_emoji_tag(name, options = {})
emoji_name = emojis_aliases[name] || name
diff --git a/lib/gitlab/error_tracking.rb b/lib/gitlab/error_tracking.rb
index a19ce22e53f..8d5611411c9 100644
--- a/lib/gitlab/error_tracking.rb
+++ b/lib/gitlab/error_tracking.rb
@@ -10,7 +10,6 @@ module Gitlab
Acme::Client::Error::Timeout
Acme::Client::Error::UnsupportedOperation
ActiveRecord::ConnectionTimeoutError
- ActiveRecord::QueryCanceled
Gitlab::RequestContext::RequestDeadlineExceeded
GRPC::DeadlineExceeded
JIRA::HTTPError
@@ -29,7 +28,7 @@ module Gitlab
config.processors << ::Gitlab::ErrorTracking::Processor::SidekiqProcessor
# Sanitize authentication headers
config.sanitize_http_headers = %w[Authorization Private-Token]
- config.tags = { program: Gitlab.process_name }
+ config.tags = extra_tags_from_env.merge(program: Gitlab.process_name)
config.before_send = method(:before_send)
yield config if block_given?
@@ -166,6 +165,15 @@ module Gitlab
}
end
+ # Static tags that are set on application start
+ def extra_tags_from_env
+ Gitlab::Json.parse(ENV.fetch('GITLAB_SENTRY_EXTRA_TAGS', '{}')).to_hash
+ rescue => e
+ Gitlab::AppLogger.debug("GITLAB_SENTRY_EXTRA_TAGS could not be parsed as JSON: #{e.class.name}: #{e.message}")
+
+ {}
+ end
+
# Debugging for https://gitlab.com/gitlab-org/gitlab-foss/issues/57727
def add_context_from_exception_type(event, hint)
if ActiveModel::MissingAttributeError === hint[:exception]
@@ -173,8 +181,7 @@ module Gitlab
.connection
.schema_cache
.instance_variable_get(:@columns_hash)
- .map { |k, v| [k, v.map(&:first)] }
- .to_h
+ .transform_values { |v| v.map(&:first) }
event.extra.merge!(columns_hash)
end
diff --git a/lib/gitlab/error_tracking/detailed_error.rb b/lib/gitlab/error_tracking/detailed_error.rb
index b49f2472e01..5d272efa64a 100644
--- a/lib/gitlab/error_tracking/detailed_error.rb
+++ b/lib/gitlab/error_tracking/detailed_error.rb
@@ -22,6 +22,7 @@ module Gitlab
:id,
:last_release_last_commit,
:last_release_short_version,
+ :last_release_version,
:last_seen,
:message,
:project_id,
diff --git a/lib/gitlab/file_finder.rb b/lib/gitlab/file_finder.rb
index d438b0415fa..6225955a930 100644
--- a/lib/gitlab/file_finder.rb
+++ b/lib/gitlab/file_finder.rb
@@ -42,7 +42,7 @@ module Gitlab
end
end
- # Overriden in Gitlab::WikiFileFinder
+ # Overridden in Gitlab::WikiFileFinder
def search_paths(query)
repository.search_files_by_name(query, ref)
end
diff --git a/lib/gitlab/git/commit.rb b/lib/gitlab/git/commit.rb
index 17d0a62ba8c..8db73ecc480 100644
--- a/lib/gitlab/git/commit.rb
+++ b/lib/gitlab/git/commit.rb
@@ -90,14 +90,15 @@ module Gitlab
#
# Commit.last_for_path(repo, 'master', 'Gemfile')
#
- def last_for_path(repo, ref, path = nil)
+ def last_for_path(repo, ref, path = nil, literal_pathspec: false)
# rubocop: disable Rails/FindBy
# This is not where..first from ActiveRecord
where(
repo: repo,
ref: ref,
path: path,
- limit: 1
+ limit: 1,
+ literal_pathspec: literal_pathspec
).first
# rubocop: enable Rails/FindBy
end
diff --git a/lib/gitlab/git/diff.rb b/lib/gitlab/git/diff.rb
index bb845f11181..09a49b6c1ca 100644
--- a/lib/gitlab/git/diff.rb
+++ b/lib/gitlab/git/diff.rb
@@ -224,18 +224,18 @@ module Gitlab
end
end
- def init_from_gitaly(diff)
- @diff = diff.respond_to?(:patch) ? encode!(diff.patch) : ''
- @new_path = encode!(diff.to_path.dup)
- @old_path = encode!(diff.from_path.dup)
- @a_mode = diff.old_mode.to_s(8)
- @b_mode = diff.new_mode.to_s(8)
- @new_file = diff.from_id == BLANK_SHA
- @renamed_file = diff.from_path != diff.to_path
- @deleted_file = diff.to_id == BLANK_SHA
- @too_large = diff.too_large if diff.respond_to?(:too_large)
-
- collapse! if diff.respond_to?(:collapsed) && diff.collapsed
+ def init_from_gitaly(gitaly_diff)
+ @diff = gitaly_diff.respond_to?(:patch) ? encode!(gitaly_diff.patch) : ''
+ @new_path = encode!(gitaly_diff.to_path.dup)
+ @old_path = encode!(gitaly_diff.from_path.dup)
+ @a_mode = gitaly_diff.old_mode.to_s(8)
+ @b_mode = gitaly_diff.new_mode.to_s(8)
+ @new_file = gitaly_diff.from_id == BLANK_SHA
+ @renamed_file = gitaly_diff.from_path != gitaly_diff.to_path
+ @deleted_file = gitaly_diff.to_id == BLANK_SHA
+ @too_large = gitaly_diff.too_large if gitaly_diff.respond_to?(:too_large)
+
+ collapse! if gitaly_diff.respond_to?(:collapsed) && gitaly_diff.collapsed
end
def prune_diff_if_eligible
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index ed746163748..ea7a6e84195 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -127,9 +127,9 @@ module Gitlab
end
end
- def local_branches(sort_by: nil)
+ def local_branches(sort_by: nil, pagination_params: nil)
wrapped_gitaly_errors do
- gitaly_ref_client.local_branches(sort_by: sort_by)
+ gitaly_ref_client.local_branches(sort_by: sort_by, pagination_params: pagination_params)
end
end
@@ -1002,15 +1002,21 @@ module Gitlab
end
end
- def list_last_commits_for_tree(sha, path, offset: 0, limit: 25)
+ def list_last_commits_for_tree(sha, path, offset: 0, limit: 25, literal_pathspec: false)
wrapped_gitaly_errors do
- gitaly_commit_client.list_last_commits_for_tree(sha, path, offset: offset, limit: limit)
+ gitaly_commit_client.list_last_commits_for_tree(sha, path, offset: offset, limit: limit, literal_pathspec: literal_pathspec)
end
end
- def last_commit_for_path(sha, path)
+ def list_commits_by_ref_name(refs)
wrapped_gitaly_errors do
- gitaly_commit_client.last_commit_for_path(sha, path)
+ gitaly_commit_client.list_commits_by_ref_name(refs)
+ end
+ end
+
+ def last_commit_for_path(sha, path, literal_pathspec: false)
+ wrapped_gitaly_errors do
+ gitaly_commit_client.last_commit_for_path(sha, path, literal_pathspec: literal_pathspec)
end
end
diff --git a/lib/gitlab/git/wiki.rb b/lib/gitlab/git/wiki.rb
index 3025fc6bfdb..76771f0417b 100644
--- a/lib/gitlab/git/wiki.rb
+++ b/lib/gitlab/git/wiki.rb
@@ -101,6 +101,10 @@ module Gitlab
wrapped_gitaly_errors do
gitaly_find_page(title: title, version: version, dir: dir)
end
+ rescue Gitlab::Git::CommandError
+ # Return nil for invalid versions.
+ # This can be removed with https://gitlab.com/gitlab-org/gitaly/-/merge_requests/2323 in place.
+ nil
end
def file(name, version)
diff --git a/lib/gitlab/git_ref_validator.rb b/lib/gitlab/git_ref_validator.rb
index dfff6823689..1330b06bf9c 100644
--- a/lib/gitlab/git_ref_validator.rb
+++ b/lib/gitlab/git_ref_validator.rb
@@ -19,7 +19,7 @@ module Gitlab
begin
Rugged::Reference.valid_name?("refs/heads/#{ref_name}")
rescue ArgumentError
- return false
+ false
end
end
@@ -35,7 +35,7 @@ module Gitlab
begin
Rugged::Reference.valid_name?(expanded_name)
rescue ArgumentError
- return false
+ false
end
end
end
diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb
index bed99ef0ed4..b284aadc107 100644
--- a/lib/gitlab/gitaly_client.rb
+++ b/lib/gitlab/gitaly_client.rb
@@ -166,20 +166,7 @@ module Gitlab
# "gitaly-2 is at network address tcp://10.0.1.2:8075".
#
def self.call(storage, service, rpc, request, remote_storage: nil, timeout: default_timeout, &block)
- self.measure_timings(service, rpc, request) do
- self.execute(storage, service, rpc, request, remote_storage: remote_storage, timeout: timeout, &block)
- end
- end
-
- # This method is like GitalyClient.call but should be used with
- # Gitaly streaming RPCs. It measures how long the the RPC took to
- # produce the full response, not just the initial response.
- def self.streaming_call(storage, service, rpc, request, remote_storage: nil, timeout: default_timeout)
- self.measure_timings(service, rpc, request) do
- response = self.execute(storage, service, rpc, request, remote_storage: remote_storage, timeout: timeout)
-
- yield(response)
- end
+ Gitlab::GitalyClient::Call.new(storage, service, rpc, request, remote_storage, timeout).call(&block)
end
def self.execute(storage, service, rpc, request, remote_storage:, timeout:)
@@ -192,23 +179,6 @@ module Gitlab
stub(service, storage).__send__(rpc, request, kwargs) # rubocop:disable GitlabSecurity/PublicSend
end
- def self.measure_timings(service, rpc, request)
- start = Gitlab::Metrics::System.monotonic_time
-
- yield
- ensure
- duration = Gitlab::Metrics::System.monotonic_time - start
- request_hash = request.is_a?(Google::Protobuf::MessageExts) ? request.to_h : {}
-
- # Keep track, separately, for the performance bar
- self.add_query_time(duration)
-
- if Gitlab::PerformanceBar.enabled_for_request?
- add_call_details(feature: "#{service}##{rpc}", duration: duration, request: request_hash, rpc: rpc,
- backtrace: Gitlab::BacktraceCleaner.clean_backtrace(caller))
- end
- end
-
def self.query_time
query_time = Gitlab::SafeRequestStore[:gitaly_query_time] || 0
query_time.round(Gitlab::InstrumentationHelper::DURATION_PRECISION)
diff --git a/lib/gitlab/gitaly_client/blob_service.rb b/lib/gitlab/gitaly_client/blob_service.rb
index 8c704c2ceea..c66b3335d89 100644
--- a/lib/gitlab/gitaly_client/blob_service.rb
+++ b/lib/gitlab/gitaly_client/blob_service.rb
@@ -16,27 +16,7 @@ module Gitlab
limit: limit
)
response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_blob, request, timeout: GitalyClient.fast_timeout)
-
- data = []
- blob = nil
- response.each do |msg|
- if blob.nil?
- blob = msg
- end
-
- data << msg.data
- end
-
- return if blob.oid.blank?
-
- data = data.join
-
- Gitlab::Git::Blob.new(
- id: blob.oid,
- size: blob.size,
- data: data,
- binary: Gitlab::Git::Blob.binary?(data)
- )
+ consume_blob_response(response)
end
def batch_lfs_pointers(blob_ids)
@@ -48,7 +28,6 @@ module Gitlab
)
response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_lfs_pointers, request, timeout: GitalyClient.medium_timeout)
-
map_lfs_pointers(response)
end
@@ -70,8 +49,7 @@ module Gitlab
:blob_service,
:get_blobs,
request,
- timeout: GitalyClient.fast_timeout
- )
+ timeout: GitalyClient.fast_timeout)
GitalyClient::BlobsStitcher.new(response)
end
@@ -96,7 +74,6 @@ module Gitlab
request,
timeout: GitalyClient.fast_timeout
)
-
map_blob_types(response)
end
@@ -127,7 +104,6 @@ module Gitlab
request,
timeout: timeout
)
-
map_lfs_pointers(response)
end
@@ -137,12 +113,34 @@ module Gitlab
)
response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_all_lfs_pointers, request, timeout: GitalyClient.medium_timeout)
-
map_lfs_pointers(response)
end
private
+ def consume_blob_response(response)
+ data = []
+ blob = nil
+ response.each do |msg|
+ if blob.nil?
+ blob = msg
+ end
+
+ data << msg.data
+ end
+
+ return if blob.oid.blank?
+
+ data = data.join
+
+ Gitlab::Git::Blob.new(
+ id: blob.oid,
+ size: blob.size,
+ data: data,
+ binary: Gitlab::Git::Blob.binary?(data)
+ )
+ end
+
def map_lfs_pointers(response)
response.flat_map do |message|
message.lfs_pointers.map do |lfs_pointer|
diff --git a/lib/gitlab/gitaly_client/call.rb b/lib/gitlab/gitaly_client/call.rb
new file mode 100644
index 00000000000..9d4d86997ad
--- /dev/null
+++ b/lib/gitlab/gitaly_client/call.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GitalyClient
+ class Call
+ def initialize(storage, service, rpc, request, remote_storage, timeout)
+ @storage = storage
+ @service = service
+ @rpc = rpc
+ @request = request
+ @remote_storage = remote_storage
+ @timeout = timeout
+ @duration = 0
+ end
+
+ def call(&block)
+ response = recording_request do
+ GitalyClient.execute(@storage, @service, @rpc, @request, remote_storage: @remote_storage, timeout: @timeout, &block)
+ end
+
+ if response.is_a?(Enumerator)
+ # When the given response is an enumerator (coming from streamed
+ # responses), we wrap it in order to properly measure the stream
+ # consumption as it happens.
+ #
+ # store_timings is not called in that scenario as needs to be
+ # handled lazily in the custom Enumerator context.
+ instrument_stream(response)
+ else
+ store_timings
+ response
+ end
+ rescue => err
+ store_timings
+ raise err
+ end
+
+ private
+
+ def instrument_stream(response)
+ Enumerator.new do |yielder|
+ loop do
+ value = recording_request { response.next }
+
+ yielder.yield(value)
+ end
+ ensure
+ store_timings
+ end
+ end
+
+ def recording_request
+ start = Gitlab::Metrics::System.monotonic_time
+
+ yield
+ ensure
+ @duration += Gitlab::Metrics::System.monotonic_time - start
+ end
+
+ def store_timings
+ GitalyClient.add_query_time(@duration)
+
+ return unless Gitlab::PerformanceBar.enabled_for_request?
+
+ request_hash = @request.is_a?(Google::Protobuf::MessageExts) ? @request.to_h : {}
+
+ GitalyClient.add_call_details(feature: "#{@service}##{@rpc}", duration: @duration, request: request_hash, rpc: @rpc,
+ backtrace: Gitlab::BacktraceCleaner.clean_backtrace(caller))
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/cleanup_service.rb b/lib/gitlab/gitaly_client/cleanup_service.rb
index e2293d3121a..649aaa46362 100644
--- a/lib/gitlab/gitaly_client/cleanup_service.rb
+++ b/lib/gitlab/gitaly_client/cleanup_service.rb
@@ -13,15 +13,14 @@ module Gitlab
end
def apply_bfg_object_map_stream(io, &blk)
- responses = GitalyClient.call(
+ response = GitalyClient.call(
storage,
:cleanup_service,
:apply_bfg_object_map_stream,
build_object_map_enum(io),
timeout: GitalyClient.long_timeout
)
-
- responses.each(&blk)
+ response.each(&blk)
end
private
diff --git a/lib/gitlab/gitaly_client/commit_service.rb b/lib/gitlab/gitaly_client/commit_service.rb
index aed132aaca0..464d2519b27 100644
--- a/lib/gitlab/gitaly_client/commit_service.rb
+++ b/lib/gitlab/gitaly_client/commit_service.rb
@@ -73,7 +73,6 @@ module Gitlab
def commit_deltas(commit)
request = Gitaly::CommitDeltaRequest.new(diff_from_parent_request_params(commit))
response = GitalyClient.call(@repository.storage, :diff_service, :commit_delta, request, timeout: GitalyClient.fast_timeout)
-
response.flat_map { |msg| msg.deltas }
end
@@ -162,13 +161,14 @@ module Gitlab
[response.left_count, response.right_count]
end
- def list_last_commits_for_tree(revision, path, offset: 0, limit: 25)
+ def list_last_commits_for_tree(revision, path, offset: 0, limit: 25, literal_pathspec: false)
request = Gitaly::ListLastCommitsForTreeRequest.new(
repository: @gitaly_repo,
revision: encode_binary(revision),
path: encode_binary(path.to_s),
offset: offset,
- limit: limit
+ limit: limit,
+ global_options: parse_global_options!(literal_pathspec: literal_pathspec)
)
response = GitalyClient.call(@repository.storage, :commit_service, :list_last_commits_for_tree, request, timeout: GitalyClient.medium_timeout)
@@ -180,11 +180,12 @@ module Gitlab
end
end
- def last_commit_for_path(revision, path)
+ def last_commit_for_path(revision, path, literal_pathspec: false)
request = Gitaly::LastCommitForPathRequest.new(
repository: @gitaly_repo,
revision: encode_binary(revision),
- path: encode_binary(path.to_s)
+ path: encode_binary(path.to_s),
+ global_options: parse_global_options!(literal_pathspec: literal_pathspec)
)
gitaly_commit = GitalyClient.call(@repository.storage, :commit_service, :last_commit_for_path, request, timeout: GitalyClient.fast_timeout).commit
@@ -200,9 +201,8 @@ module Gitlab
to: to
)
- GitalyClient.streaming_call(@repository.storage, :commit_service, :commits_between, request, timeout: GitalyClient.medium_timeout) do |response|
- consume_commits_response(response)
- end
+ response = GitalyClient.call(@repository.storage, :commit_service, :commits_between, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
end
def diff_stats(left_commit_sha, right_commit_sha)
@@ -212,9 +212,8 @@ module Gitlab
right_commit_id: right_commit_sha
)
- GitalyClient.streaming_call(@repository.storage, :diff_service, :diff_stats, request, timeout: GitalyClient.medium_timeout) do |response|
- response.flat_map(&:stats)
- end
+ response = GitalyClient.call(@repository.storage, :diff_service, :diff_stats, request, timeout: GitalyClient.medium_timeout)
+ response.flat_map(&:stats)
end
def find_all_commits(opts = {})
@@ -226,9 +225,8 @@ module Gitlab
)
request.order = opts[:order].upcase if opts[:order].present?
- GitalyClient.streaming_call(@repository.storage, :commit_service, :find_all_commits, request, timeout: GitalyClient.medium_timeout) do |response|
- consume_commits_response(response)
- end
+ response = GitalyClient.call(@repository.storage, :commit_service, :find_all_commits, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
end
def list_commits_by_oid(oids)
@@ -236,26 +234,25 @@ module Gitlab
request = Gitaly::ListCommitsByOidRequest.new(repository: @gitaly_repo, oid: oids)
- GitalyClient.streaming_call(@repository.storage, :commit_service, :list_commits_by_oid, request, timeout: GitalyClient.medium_timeout) do |response|
- consume_commits_response(response)
- end
+ response = GitalyClient.call(@repository.storage, :commit_service, :list_commits_by_oid, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
rescue GRPC::NotFound # If no repository is found, happens mainly during testing
[]
end
- def commits_by_message(query, revision: '', path: '', limit: 1000, offset: 0)
+ def commits_by_message(query, revision: '', path: '', limit: 1000, offset: 0, literal_pathspec: true)
request = Gitaly::CommitsByMessageRequest.new(
repository: @gitaly_repo,
query: query,
revision: encode_binary(revision),
path: encode_binary(path),
limit: limit.to_i,
- offset: offset.to_i
+ offset: offset.to_i,
+ global_options: parse_global_options!(literal_pathspec: literal_pathspec)
)
- GitalyClient.streaming_call(@repository.storage, :commit_service, :commits_by_message, request, timeout: GitalyClient.medium_timeout) do |response|
- consume_commits_response(response)
- end
+ response = GitalyClient.call(@repository.storage, :commit_service, :commits_by_message, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
end
def languages(ref = nil)
@@ -320,6 +317,7 @@ module Gitlab
skip_merges: options[:skip_merges],
all: !!options[:all],
first_parent: !!options[:first_parent],
+ global_options: parse_global_options!(options),
disable_walk: true # This option is deprecated. The 'walk' implementation is being removed.
)
request.after = GitalyClient.timestamp(options[:after]) if options[:after]
@@ -330,9 +328,8 @@ module Gitlab
request.paths = encode_repeated(Array(options[:path])) if options[:path].present?
- GitalyClient.streaming_call(@repository.storage, :commit_service, :find_commits, request, timeout: GitalyClient.medium_timeout) do |response|
- consume_commits_response(response)
- end
+ response = GitalyClient.call(@repository.storage, :commit_service, :find_commits, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
end
def filter_shas_with_signatures(shas)
@@ -349,7 +346,6 @@ module Gitlab
end
response = GitalyClient.call(@repository.storage, :commit_service, :filter_shas_with_signatures, enum, timeout: GitalyClient.fast_timeout)
-
response.flat_map do |msg|
msg.shas.map { |sha| EncodingHelper.encode!(sha) }
end
@@ -390,8 +386,28 @@ module Gitlab
messages
end
+ def list_commits_by_ref_name(refs)
+ request = Gitaly::ListCommitsByRefNameRequest
+ .new(repository: @gitaly_repo, ref_names: refs.map { |ref| encode_binary(ref) })
+
+ response = GitalyClient.call(@repository.storage, :commit_service, :list_commits_by_ref_name, request, timeout: GitalyClient.medium_timeout)
+
+ commit_refs = response.flat_map do |message|
+ message.commit_refs.map do |commit_ref|
+ [encode_utf8(commit_ref.ref_name), Gitlab::Git::Commit.new(@repository, commit_ref.commit)]
+ end
+ end
+
+ Hash[commit_refs]
+ end
+
private
+ def parse_global_options!(options)
+ literal_pathspec = options.delete(:literal_pathspec)
+ Gitaly::GlobalOptions.new(literal_pathspecs: literal_pathspec)
+ end
+
def call_commit_diff(request_params, options = {})
request_params[:ignore_whitespace_change] = options.fetch(:ignore_whitespace_change, false)
request_params[:enforce_limits] = options.fetch(:limits, true)
diff --git a/lib/gitlab/gitaly_client/conflicts_service.rb b/lib/gitlab/gitaly_client/conflicts_service.rb
index f7eb4b45197..6f08dcc69b6 100644
--- a/lib/gitlab/gitaly_client/conflicts_service.rb
+++ b/lib/gitlab/gitaly_client/conflicts_service.rb
@@ -21,7 +21,6 @@ module Gitlab
their_commit_oid: @their_commit_oid
)
response = GitalyClient.call(@repository.storage, :conflicts_service, :list_conflict_files, request, timeout: GitalyClient.long_timeout)
-
GitalyClient::ConflictFilesStitcher.new(response, @gitaly_repo)
end
diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb
index 9ed4b2da09a..87505418ae9 100644
--- a/lib/gitlab/gitaly_client/operation_service.rb
+++ b/lib/gitlab/gitaly_client/operation_service.rb
@@ -178,6 +178,10 @@ module Gitlab
timeout: GitalyClient.long_timeout
)
+ if response.pre_receive_error.present?
+ raise Gitlab::Git::PreReceiveError.new("GL-HOOK-ERR: pre-receive hook failed.")
+ end
+
Gitlab::Git::OperationService::BranchUpdate.from_gitaly(response.branch_update)
rescue GRPC::FailedPrecondition => e
raise Gitlab::Git::CommitError, e
diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb
index 63def4e29c9..97b6813c080 100644
--- a/lib/gitlab/gitaly_client/ref_service.rb
+++ b/lib/gitlab/gitaly_client/ref_service.rb
@@ -15,14 +15,12 @@ module Gitlab
def branches
request = Gitaly::FindAllBranchesRequest.new(repository: @gitaly_repo)
response = GitalyClient.call(@storage, :ref_service, :find_all_branches, request, timeout: GitalyClient.fast_timeout)
-
consume_find_all_branches_response(response)
end
def remote_branches(remote_name)
request = Gitaly::FindAllRemoteBranchesRequest.new(repository: @gitaly_repo, remote_name: remote_name)
- response = GitalyClient.call(@repository.storage, :ref_service, :find_all_remote_branches, request, timeout: GitalyClient.medium_timeout)
-
+ response = GitalyClient.call(@storage, :ref_service, :find_all_remote_branches, request, timeout: GitalyClient.medium_timeout)
consume_find_all_remote_branches_response(remote_name, response)
end
@@ -33,7 +31,6 @@ module Gitlab
merged_branches: branch_names.map { |s| encode_binary(s) }
)
response = GitalyClient.call(@storage, :ref_service, :find_all_branches, request, timeout: GitalyClient.fast_timeout)
-
consume_find_all_branches_response(response)
end
@@ -71,10 +68,9 @@ module Gitlab
commit_id: newrev
)
- response = GitalyClient
- .call(@storage, :ref_service, :list_new_commits, request, timeout: GitalyClient.medium_timeout)
-
commits = []
+
+ response = GitalyClient.call(@storage, :ref_service, :list_new_commits, request, timeout: GitalyClient.medium_timeout)
response.each do |msg|
msg.commits.each do |c|
commits << Gitlab::Git::Commit.new(@repository, c)
@@ -98,9 +94,7 @@ module Gitlab
GitalyClient.medium_timeout
end
- response = GitalyClient
- .call(@storage, :ref_service, :list_new_blobs, request, timeout: timeout)
-
+ response = GitalyClient.call(@storage, :ref_service, :list_new_blobs, request, timeout: timeout)
response.flat_map do |msg|
# Returns an Array of Gitaly::NewBlobObject objects
# Available methods are: #size, #oid and #path
@@ -116,8 +110,8 @@ module Gitlab
branch_names.count
end
- def local_branches(sort_by: nil)
- request = Gitaly::FindLocalBranchesRequest.new(repository: @gitaly_repo)
+ def local_branches(sort_by: nil, pagination_params: nil)
+ request = Gitaly::FindLocalBranchesRequest.new(repository: @gitaly_repo, pagination_params: pagination_params)
request.sort_by = sort_by_param(sort_by) if sort_by
response = GitalyClient.call(@storage, :ref_service, :find_local_branches, request, timeout: GitalyClient.fast_timeout)
consume_find_local_branches_response(response)
@@ -171,9 +165,8 @@ module Gitlab
limit: limit
)
- stream = GitalyClient.call(@repository.storage, :ref_service, :list_tag_names_containing_commit, request, timeout: GitalyClient.medium_timeout)
-
- consume_ref_contains_sha_response(stream, :tag_names)
+ response = GitalyClient.call(@storage, :ref_service, :list_tag_names_containing_commit, request, timeout: GitalyClient.medium_timeout)
+ consume_ref_contains_sha_response(response, :tag_names)
end
# Limit: 0 implies no limit, thus all tag names will be returned
@@ -184,18 +177,16 @@ module Gitlab
limit: limit
)
- stream = GitalyClient.call(@repository.storage, :ref_service, :list_branch_names_containing_commit, request, timeout: GitalyClient.medium_timeout)
-
- consume_ref_contains_sha_response(stream, :branch_names)
+ response = GitalyClient.call(@storage, :ref_service, :list_branch_names_containing_commit, request, timeout: GitalyClient.medium_timeout)
+ consume_ref_contains_sha_response(response, :branch_names)
end
def get_tag_messages(tag_ids)
request = Gitaly::GetTagMessagesRequest.new(repository: @gitaly_repo, tag_ids: tag_ids)
- response = GitalyClient.call(@repository.storage, :ref_service, :get_tag_messages, request, timeout: GitalyClient.fast_timeout)
-
messages = Hash.new { |h, k| h[k] = +''.b }
current_tag_id = nil
+ response = GitalyClient.call(@storage, :ref_service, :get_tag_messages, request, timeout: GitalyClient.fast_timeout)
response.each do |rpc_message|
current_tag_id = rpc_message.tag_id if rpc_message.tag_id.present?
diff --git a/lib/gitlab/gitaly_client/remote_service.rb b/lib/gitlab/gitaly_client/remote_service.rb
index 4566c59bbe0..06aaf460751 100644
--- a/lib/gitlab/gitaly_client/remote_service.rb
+++ b/lib/gitlab/gitaly_client/remote_service.rb
@@ -8,9 +8,11 @@ module Gitlab
MAX_MSG_SIZE = 128.kilobytes.freeze
def self.exists?(remote_url)
- request = Gitaly::FindRemoteRepositoryRequest.new(remote: remote_url)
+ storage = GitalyClient.random_storage
- response = GitalyClient.call(GitalyClient.random_storage,
+ request = Gitaly::FindRemoteRepositoryRequest.new(remote: remote_url, storage_name: storage)
+
+ response = GitalyClient.call(storage,
:remote_service,
:find_remote_repository, request,
timeout: GitalyClient.medium_timeout)
diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb
index f74c9ea4192..20ad6d0184b 100644
--- a/lib/gitlab/gitaly_client/repository_service.rb
+++ b/lib/gitlab/gitaly_client/repository_service.rb
@@ -201,9 +201,9 @@ module Gitlab
response = GitalyClient.call(@storage, :repository_service, :fsck, request, timeout: GitalyClient.long_timeout)
if response.error.empty?
- return "", 0
+ ["", 0]
else
- return response.error.b, 1
+ [response.error.b, 1]
end
end
@@ -335,7 +335,6 @@ module Gitlab
def search_files_by_content(ref, query, options = {})
request = Gitaly::SearchFilesByContentRequest.new(repository: @gitaly_repo, ref: ref, query: query)
response = GitalyClient.call(@storage, :repository_service, :search_files_by_content, request, timeout: GitalyClient.default_timeout)
-
search_results_from_response(response, options)
end
@@ -410,7 +409,10 @@ module Gitlab
request,
timeout: timeout
)
+ write_stream_to_file(response, save_path)
+ end
+ def write_stream_to_file(response, save_path)
File.open(save_path, 'wb') do |f|
response.each do |message|
f.write(message.data)
diff --git a/lib/gitlab/gl_repository.rb b/lib/gitlab/gl_repository.rb
index abd4e847a50..7346de13626 100644
--- a/lib/gitlab/gl_repository.rb
+++ b/lib/gitlab/gl_repository.rb
@@ -43,10 +43,10 @@ module Gitlab
end
def self.parse(gl_repository)
- result = ::Gitlab::GlRepository::Identifier.new(gl_repository)
+ identifier = ::Gitlab::GlRepository::Identifier.parse(gl_repository)
- repo_type = result.repo_type
- container = result.fetch_container!
+ repo_type = identifier.repo_type
+ container = identifier.container
[container, repo_type.project_for(container), repo_type]
end
diff --git a/lib/gitlab/gl_repository/identifier.rb b/lib/gitlab/gl_repository/identifier.rb
index dc3e7931696..57350b1edb0 100644
--- a/lib/gitlab/gl_repository/identifier.rb
+++ b/lib/gitlab/gl_repository/identifier.rb
@@ -3,71 +3,83 @@
module Gitlab
class GlRepository
class Identifier
- attr_reader :gl_repository, :repo_type
+ include Gitlab::Utils::StrongMemoize
- def initialize(gl_repository)
- @gl_repository = gl_repository
- @segments = gl_repository.split('-')
+ InvalidIdentifier = Class.new(ArgumentError)
- raise_error if segments.size > 3
+ def self.parse(gl_repository)
+ segments = gl_repository&.split('-')
- @repo_type = find_repo_type
- @container_id = find_container_id
- @container_class = find_container_class
- end
+ # gl_repository can either have 2 or 3 segments:
+ #
+ # TODO: convert all 2-segment format to 3-segment:
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/219192
+ identifier = case segments&.size
+ when 2
+ TwoPartIdentifier.new(*segments)
+ when 3
+ ThreePartIdentifier.new(*segments)
+ end
+
+ return identifier if identifier&.valid?
- def fetch_container!
- container_class.find_by_id(container_id)
+ raise InvalidIdentifier, %Q(Invalid GL Repository "#{gl_repository}")
end
- private
+ # The older 2-segment format, where the container is implied.
+ # eg. project-1, wiki-1
+ class TwoPartIdentifier < Identifier
+ def initialize(repo_type_name, container_id_str)
+ @container_id_str = container_id_str
+ @repo_type_name = repo_type_name
+ end
- attr_reader :segments, :container_class, :container_id
+ private
- def find_repo_type
- type_name = three_segments_format? ? segments.last : segments.first
- type = Gitlab::GlRepository.types[type_name]
+ def container_class
+ repo_type.container_class
+ end
+ end
- raise_error unless type
+ # The newer 3-segment format, where the container is explicit
+ # eg. group-1-wiki, project-1-wiki
+ class ThreePartIdentifier < Identifier
+ def initialize(container_type, container_id_str, repo_type_name)
+ @container_id_str = container_id_str
+ @container_type = container_type
+ @repo_type_name = repo_type_name
+ end
- type
- end
+ private
- def find_container_class
- if three_segments_format?
- case segments[0]
+ def container_class
+ case @container_type
when 'project'
Project
when 'group'
Group
- else
- raise_error
end
- else
- repo_type.container_class
end
end
- def find_container_id
- id = Integer(segments[1], 10, exception: false)
-
- raise_error unless id
+ def repo_type
+ strong_memoize(:repo_type) { Gitlab::GlRepository.types[repo_type_name] }
+ end
- id
+ def container
+ strong_memoize(:container) { container_class.find_by_id(container_id) }
end
- # gl_repository can either have 2 or 3 segments:
- # "wiki-1" is the older 2-segment format, where container is implied.
- # "group-1-wiki" is the newer 3-segment format, including container information.
- #
- # TODO: convert all 2-segment format to 3-segment:
- # https://gitlab.com/gitlab-org/gitlab/-/issues/219192
- def three_segments_format?
- segments.size == 3
+ def valid?
+ repo_type.present? && container_class.present? && container_id&.positive?
end
- def raise_error
- raise ArgumentError, "Invalid GL Repository \"#{gl_repository}\""
+ private
+
+ attr_reader :container_id_str, :repo_type_name
+
+ def container_id
+ strong_memoize(:container_id) { Integer(container_id_str, 10, exception: false) }
end
end
end
diff --git a/lib/gitlab/global_id.rb b/lib/gitlab/global_id.rb
index cc82b6c5897..e8a6006dce1 100644
--- a/lib/gitlab/global_id.rb
+++ b/lib/gitlab/global_id.rb
@@ -2,6 +2,8 @@
module Gitlab
module GlobalId
+ CoerceError = Class.new(ArgumentError)
+
def self.build(object = nil, model_name: nil, id: nil, params: nil)
if object
model_name ||= object.class.name
@@ -10,5 +12,20 @@ module Gitlab
::URI::GID.build(app: GlobalID.app, model_name: model_name, model_id: id, params: params)
end
+
+ def self.as_global_id(value, model_name: nil)
+ case value
+ when GlobalID
+ value
+ when URI::GID
+ GlobalID.new(value)
+ when Integer
+ raise CoerceError, 'Cannot coerce Integer' unless model_name.present?
+
+ GlobalID.new(::Gitlab::GlobalId.build(model_name: model_name, id: value))
+ else
+ raise CoerceError, "Invalid ID. Cannot coerce instances of #{value.class}"
+ end
+ end
end
end
diff --git a/lib/gitlab/graphql/authorize/authorize_resource.rb b/lib/gitlab/graphql/authorize/authorize_resource.rb
index 94871498cf8..27673e5c27a 100644
--- a/lib/gitlab/graphql/authorize/authorize_resource.rb
+++ b/lib/gitlab/graphql/authorize/authorize_resource.rb
@@ -30,8 +30,7 @@ module Gitlab
end
def authorized_find!(*args)
- object = find_object(*args)
- object = object.sync if object.respond_to?(:sync)
+ object = Graphql::Lazy.force(find_object(*args))
authorize!(object)
diff --git a/lib/gitlab/graphql/lazy.rb b/lib/gitlab/graphql/lazy.rb
new file mode 100644
index 00000000000..a7f7610a041
--- /dev/null
+++ b/lib/gitlab/graphql/lazy.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ class Lazy
+ # Force evaluation of a (possibly) lazy value
+ def self.force(value)
+ case value
+ when ::BatchLoader::GraphQL
+ value.sync
+ when ::Concurrent::Promise
+ value.execute.value
+ else
+ value
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/loaders/issuable_loader.rb b/lib/gitlab/graphql/loaders/issuable_loader.rb
new file mode 100644
index 00000000000..1cc0fbe215f
--- /dev/null
+++ b/lib/gitlab/graphql/loaders/issuable_loader.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module Loaders
+ class IssuableLoader
+ attr_reader :parent, :issuable_finder
+
+ BatchKey = Struct.new(:parent, :finder_class, :current_user)
+
+ def initialize(parent, issuable_finder)
+ @parent = parent
+ @issuable_finder = issuable_finder
+ end
+
+ def batching_find_all(&with_query)
+ if issuable_finder.params.keys == ['iids']
+ batch_load_issuables(issuable_finder.params[:iids], with_query)
+ else
+ post_process(find_all, with_query)
+ end
+ end
+
+ def find_all
+ issuable_finder.params[parent_param] = parent if parent
+
+ issuable_finder.execute
+ end
+
+ private
+
+ def parent_param
+ case parent
+ when Project
+ :project_id
+ when Group
+ :group_id
+ else
+ raise "Unexpected parent: #{parent.class}"
+ end
+ end
+
+ def post_process(query, with_query)
+ if with_query
+ with_query.call(query)
+ else
+ query
+ end
+ end
+
+ def batch_load_issuables(iids, with_query)
+ Array.wrap(iids).map { |iid| batch_load(iid, with_query) }
+ end
+
+ def batch_load(iid, with_query)
+ return if parent.nil?
+
+ BatchLoader::GraphQL
+ .for([parent_param, iid.to_s])
+ .batch(key: batch_key) do |params, loader, args|
+ batch_key = args[:key]
+ user = batch_key.current_user
+
+ params.group_by(&:first).each do |key, group|
+ iids = group.map(&:second).uniq
+ args = { key => batch_key.parent, iids: iids }
+ query = batch_key.finder_class.new(user, args).execute
+
+ post_process(query, with_query).each do |item|
+ loader.call([key, item.iid.to_s], item)
+ end
+ end
+ end
+ end
+
+ def batch_key
+ BatchKey.new(parent, issuable_finder.class, issuable_finder.current_user)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/mount_mutation.rb b/lib/gitlab/graphql/mount_mutation.rb
index b10e963170a..8e507ba4531 100644
--- a/lib/gitlab/graphql/mount_mutation.rb
+++ b/lib/gitlab/graphql/mount_mutation.rb
@@ -13,6 +13,14 @@ module Gitlab
mutation: mutation_class,
**custom_kwargs
end
+
+ def mount_aliased_mutation(alias_name, mutation_class, **custom_kwargs)
+ aliased_mutation_class = Class.new(mutation_class) do
+ graphql_name alias_name
+ end
+
+ mount_mutation(aliased_mutation_class, **custom_kwargs)
+ end
end
end
end
diff --git a/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb b/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb
index 6f705239fa3..6b6bb72eb31 100644
--- a/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb
+++ b/lib/gitlab/graphql/query_analyzers/logger_analyzer.rb
@@ -52,8 +52,7 @@ module Gitlab
end
def duration(time_started)
- nanoseconds = Gitlab::Metrics::System.monotonic_time - time_started
- nanoseconds * 1000000
+ Gitlab::Metrics::System.monotonic_time - time_started
end
def default_initial_values(query)
diff --git a/lib/gitlab/health_checks/probes/collection.rb b/lib/gitlab/health_checks/probes/collection.rb
index db3ef4834c2..08b6d82291e 100644
--- a/lib/gitlab/health_checks/probes/collection.rb
+++ b/lib/gitlab/health_checks/probes/collection.rb
@@ -20,6 +20,12 @@ module Gitlab
success ? 200 : 503,
status(success).merge(payload(readiness))
)
+ rescue => e
+ exception_payload = { message: "#{e.class} : #{e.message}" }
+
+ Probes::Status.new(
+ 500,
+ status(false).merge(exception_payload))
end
private
diff --git a/lib/gitlab/import/metrics.rb b/lib/gitlab/import/metrics.rb
index 76638a8cf86..2692ab2fa12 100644
--- a/lib/gitlab/import/metrics.rb
+++ b/lib/gitlab/import/metrics.rb
@@ -1,59 +1,54 @@
# frozen_string_literal: true
-# Prepend `Gitlab::Import::Metrics` to a class in order
-# to measure and emit `Gitlab::Metrics` metrics of specified methods.
-#
-# @example
-# class Importer
-# prepend Gitlab::Import::Metrics
-#
-# Gitlab::ImportExport::Metrics.measure :execute, metrics: {
-# importer_counter: {
-# type: :counter,
-# description: 'counter'
-# },
-# importer_histogram: {
-# type: :histogram,
-# labels: { importer: 'importer' },
-# description: 'histogram'
-# }
-# }
-#
-# def execute
-# ...
-# end
-# end
-#
-# Each call to `#execute` increments `importer_counter` as well as
-# measures `#execute` duration and reports histogram `importer_histogram`
module Gitlab
module Import
- module Metrics
- def self.measure(method_name, metrics:)
- define_method "#{method_name}" do |*args|
- start_time = Time.zone.now
+ class Metrics
+ IMPORT_DURATION_BUCKETS = [0.5, 1, 3, 5, 10, 60, 120, 240, 360, 720, 1440].freeze
- result = super(*args)
+ attr_reader :importer
- end_time = Time.zone.now
+ def initialize(importer, project)
+ @importer = importer
+ @project = project
+ end
+
+ def track_finished_import
+ duration = Time.zone.now - @project.created_at
+
+ duration_histogram.observe({ importer: importer }, duration)
+ projects_counter.increment
+ end
- report_measurement_metrics(metrics, end_time - start_time)
+ def projects_counter
+ @projects_counter ||= Gitlab::Metrics.counter(
+ :"#{importer}_imported_projects_total",
+ 'The number of imported projects'
+ )
+ end
+
+ def issues_counter
+ @issues_counter ||= Gitlab::Metrics.counter(
+ :"#{importer}_imported_issues_total",
+ 'The number of imported issues'
+ )
+ end
- result
- end
+ def merge_requests_counter
+ @merge_requests_counter ||= Gitlab::Metrics.counter(
+ :"#{importer}_imported_merge_requests_total",
+ 'The number of imported merge (pull) requests'
+ )
end
- def report_measurement_metrics(metrics, duration)
- metrics.each do |metric_name, metric_value|
- case metric_value[:type]
- when :counter
- Gitlab::Metrics.counter(metric_name, metric_value[:description]).increment
- when :histogram
- Gitlab::Metrics.histogram(metric_name, metric_value[:description]).observe(metric_value[:labels], duration)
- else
- nil
- end
- end
+ private
+
+ def duration_histogram
+ @duration_histogram ||= Gitlab::Metrics.histogram(
+ :"#{importer}_total_duration_seconds",
+ 'Total time spent importing projects, in seconds',
+ {},
+ IMPORT_DURATION_BUCKETS
+ )
end
end
end
diff --git a/lib/gitlab/import_export/json/streaming_serializer.rb b/lib/gitlab/import_export/json/streaming_serializer.rb
index 20f9c668b9c..05b7679e0ff 100644
--- a/lib/gitlab/import_export/json/streaming_serializer.rb
+++ b/lib/gitlab/import_export/json/streaming_serializer.rb
@@ -7,7 +7,7 @@ module Gitlab
include Gitlab::ImportExport::CommandLineUtil
BATCH_SIZE = 100
- SMALLER_BATCH_SIZE = 20
+ SMALLER_BATCH_SIZE = 2
def self.batch_size(exportable)
if Feature.enabled?(:export_reduce_relation_batch_size, exportable)
@@ -69,8 +69,16 @@ module Gitlab
key_preloads = preloads&.dig(key)
records = records.preload(key_preloads) if key_preloads
- records.find_each(batch_size: batch_size) do |record|
- items << Raw.new(record.to_json(options))
+ records.in_batches(of: batch_size) do |batch| # rubocop:disable Cop/InBatches
+ # order each batch by its primary key to ensure
+ # consistent and predictable ordering of each exported relation
+ # as additional `WHERE` clauses can impact the order in which data is being
+ # returned by database when no `ORDER` is specified
+ batch = batch.reorder(batch.klass.primary_key)
+
+ batch.each do |record|
+ items << Raw.new(record.to_json(options))
+ end
end
end
diff --git a/lib/gitlab/import_export/project/import_export.yml b/lib/gitlab/import_export/project/import_export.yml
index f0b733d7e95..aa961bd8d19 100644
--- a/lib/gitlab/import_export/project/import_export.yml
+++ b/lib/gitlab/import_export/project/import_export.yml
@@ -89,7 +89,6 @@ tree:
- :triggers
- :pipeline_schedules
- :container_expiration_policy
- - :services
- protected_branches:
- :merge_access_levels
- :push_access_levels
@@ -169,6 +168,7 @@ excluded_attributes:
- :marked_for_deletion_by_user_id
- :compliance_framework_setting
- :show_default_award_emojis
+ - :services
namespaces:
- :runners_token
- :runners_token_encrypted
@@ -261,10 +261,6 @@ excluded_attributes:
runners:
- :token
- :token_encrypted
- services:
- - :inherit_from_id
- - :instance
- - :template
error_tracking_setting:
- :encrypted_token
- :encrypted_token_iv
@@ -313,12 +309,14 @@ excluded_attributes:
- :merge_request_id
- :external_pull_request_id
- :ci_ref_id
+ - :locked
stages:
- :pipeline_id
merge_access_levels:
- :protected_branch_id
push_access_levels:
- :protected_branch_id
+ - :deploy_key_id
unprotect_access_levels:
- :protected_branch_id
create_access_levels:
@@ -353,8 +351,6 @@ methods:
- :type
statuses:
- :type
- services:
- - :type
merge_request_diff_files:
- :utf8_diff
merge_requests:
diff --git a/lib/gitlab/import_export/project/relation_factory.rb b/lib/gitlab/import_export/project/relation_factory.rb
index 3ab9f2c4bfa..ae92228276e 100644
--- a/lib/gitlab/import_export/project/relation_factory.rb
+++ b/lib/gitlab/import_export/project/relation_factory.rb
@@ -70,10 +70,8 @@ module Gitlab
private
def invalid_relation?
- # Do not create relation if it is:
- # - An unknown service
- # - A legacy trigger
- unknown_service? || legacy_trigger?
+ # Do not create relation if it is a legacy trigger
+ legacy_trigger?
end
def setup_models
@@ -137,11 +135,6 @@ module Gitlab
end
end
- def unknown_service?
- @relation_name == :services && parsed_relation_hash['type'] &&
- !Object.const_defined?(parsed_relation_hash['type'])
- end
-
def legacy_trigger?
@relation_name == :'Ci::Trigger' && @relation_hash['owner_id'].nil?
end
diff --git a/lib/gitlab/import_export/snippet_repo_restorer.rb b/lib/gitlab/import_export/snippet_repo_restorer.rb
index 31b1a37bbe1..2d0aa05fc3c 100644
--- a/lib/gitlab/import_export/snippet_repo_restorer.rb
+++ b/lib/gitlab/import_export/snippet_repo_restorer.rb
@@ -42,6 +42,8 @@ module Gitlab
snippet.repository.expire_exists_cache
raise SnippetRepositoryError, _("Invalid repository bundle for snippet with id %{snippet_id}") % { snippet_id: snippet.id }
+ else
+ Snippets::UpdateStatisticsService.new(snippet).execute
end
end
diff --git a/lib/gitlab/incident_management/pager_duty/incident_issue_description.rb b/lib/gitlab/incident_management/pager_duty/incident_issue_description.rb
new file mode 100644
index 00000000000..cd947b15154
--- /dev/null
+++ b/lib/gitlab/incident_management/pager_duty/incident_issue_description.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module IncidentManagement
+ module PagerDuty
+ class IncidentIssueDescription
+ def initialize(incident_payload)
+ @incident_payload = incident_payload
+ end
+
+ def to_s
+ markdown_line_break = " \n"
+
+ [
+ "**Incident:** #{markdown_incident}",
+ "**Incident number:** #{incident_payload['incident_number']}",
+ "**Urgency:** #{incident_payload['urgency']}",
+ "**Status:** #{incident_payload['status']}",
+ "**Incident key:** #{incident_payload['incident_key']}",
+ "**Created at:** #{markdown_incident_created_at}",
+ "**Assignees:** #{markdown_assignees.join(', ')}",
+ "**Impacted services:** #{markdown_impacted_services.join(', ')}"
+ ].join(markdown_line_break)
+ end
+
+ private
+
+ attr_reader :incident_payload
+
+ def markdown_incident
+ markdown_link(incident_payload['title'], incident_payload['url'])
+ end
+
+ def incident_created_at
+ Time.parse(incident_payload['created_at'])
+ rescue
+ Time.current.utc # PagerDuty provides time in UTC
+ end
+
+ def markdown_incident_created_at
+ incident_created_at.strftime('%d %B %Y, %-l:%M%p (%Z)')
+ end
+
+ def markdown_assignees
+ Array(incident_payload['assignees']).map do |assignee|
+ markdown_link(assignee['summary'], assignee['url'])
+ end
+ end
+
+ def markdown_impacted_services
+ Array(incident_payload['impacted_services']).map do |is|
+ markdown_link(is['summary'], is['url'])
+ end
+ end
+
+ def markdown_link(label, url)
+ return label if url.blank?
+
+ "[#{label}](#{url})"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/instrumentation/elasticsearch_transport.rb b/lib/gitlab/instrumentation/elasticsearch_transport.rb
index deee0127c0c..56179eda22d 100644
--- a/lib/gitlab/instrumentation/elasticsearch_transport.rb
+++ b/lib/gitlab/instrumentation/elasticsearch_transport.rb
@@ -5,8 +5,10 @@ require 'elasticsearch-transport'
module Gitlab
module Instrumentation
module ElasticsearchTransportInterceptor
- def perform_request(*args)
+ def perform_request(method, path, params = {}, body = nil, headers = nil)
start = Time.now
+ headers = (headers || {})
+ .reverse_merge({ 'X-Opaque-Id': Labkit::Correlation::CorrelationId.current_or_new_id })
super
ensure
if ::Gitlab::SafeRequestStore.active?
@@ -14,7 +16,7 @@ module Gitlab
::Gitlab::Instrumentation::ElasticsearchTransport.increment_request_count
::Gitlab::Instrumentation::ElasticsearchTransport.add_duration(duration)
- ::Gitlab::Instrumentation::ElasticsearchTransport.add_call_details(duration, args)
+ ::Gitlab::Instrumentation::ElasticsearchTransport.add_call_details(duration, method, path, params, body)
end
end
end
@@ -47,14 +49,14 @@ module Gitlab
::Gitlab::SafeRequestStore[ELASTICSEARCH_CALL_DURATION] += duration
end
- def self.add_call_details(duration, args)
+ def self.add_call_details(duration, method, path, params, body)
return unless Gitlab::PerformanceBar.enabled_for_request?
detail_store << {
- method: args[0],
- path: args[1],
- params: args[2],
- body: args[3],
+ method: method,
+ path: path,
+ params: params,
+ body: body,
duration: duration,
backtrace: ::Gitlab::BacktraceCleaner.clean_backtrace(caller)
}
diff --git a/lib/gitlab/instrumentation/redis.rb b/lib/gitlab/instrumentation/redis.rb
index 82b4701872f..4a85a313fd7 100644
--- a/lib/gitlab/instrumentation/redis.rb
+++ b/lib/gitlab/instrumentation/redis.rb
@@ -5,9 +5,9 @@ module Gitlab
# Aggregates Redis measurements from different request storage sources.
class Redis
ActionCable = Class.new(RedisBase)
- Cache = Class.new(RedisBase)
+ Cache = Class.new(RedisBase).enable_redis_cluster_validation
Queues = Class.new(RedisBase)
- SharedState = Class.new(RedisBase)
+ SharedState = Class.new(RedisBase).enable_redis_cluster_validation
STORAGES = [ActionCable, Cache, Queues, SharedState].freeze
diff --git a/lib/gitlab/instrumentation/redis_base.rb b/lib/gitlab/instrumentation/redis_base.rb
index 012543e1645..1df899747e0 100644
--- a/lib/gitlab/instrumentation/redis_base.rb
+++ b/lib/gitlab/instrumentation/redis_base.rb
@@ -25,9 +25,6 @@ module Gitlab
# redis-rb passes an array (e.g. [[:get, key]])
return unless args.length == 1
- # TODO: Add information about current Redis client
- # being instrumented.
- # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/316.
detail_store << {
cmd: args.first,
duration: duration,
@@ -71,6 +68,40 @@ module Gitlab
query_time.round(::Gitlab::InstrumentationHelper::DURATION_PRECISION)
end
+ def redis_cluster_validate!(command)
+ ::Gitlab::Instrumentation::RedisClusterValidator.validate!(command) if @redis_cluster_validation
+ end
+
+ def enable_redis_cluster_validation
+ @redis_cluster_validation = true
+
+ self
+ end
+
+ def instance_count_request
+ @request_counter ||= Gitlab::Metrics.counter(:gitlab_redis_client_requests_total, 'Client side Redis request count, per Redis server')
+ @request_counter.increment({ storage: storage_key })
+ end
+
+ def instance_count_exception(ex)
+ # This metric is meant to give a client side view of how the Redis
+ # server is doing. Redis itself does not expose error counts. This
+ # metric can be used for Redis alerting and service health monitoring.
+ @exception_counter ||= Gitlab::Metrics.counter(:gitlab_redis_client_exceptions_total, 'Client side Redis exception count, per Redis server, per exception class')
+ @exception_counter.increment({ storage: storage_key, exception: ex.class.to_s })
+ end
+
+ def instance_observe_duration(duration)
+ @request_latency_histogram ||= Gitlab::Metrics.histogram(
+ :gitlab_redis_client_requests_duration_seconds,
+ 'Client side Redis request latency, per Redis server, excluding blocking commands',
+ {},
+ [0.005, 0.01, 0.1, 0.5]
+ )
+
+ @request_latency_histogram.observe({ storage: storage_key }, duration)
+ end
+
private
def request_count_key
diff --git a/lib/gitlab/instrumentation/redis_cluster_validator.rb b/lib/gitlab/instrumentation/redis_cluster_validator.rb
new file mode 100644
index 00000000000..6800e5667f6
--- /dev/null
+++ b/lib/gitlab/instrumentation/redis_cluster_validator.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'rails'
+require 'redis'
+
+module Gitlab
+ module Instrumentation
+ module RedisClusterValidator
+ # Generate with:
+ #
+ # Gitlab::Redis::Cache
+ # .with { |redis| redis.call('COMMAND') }
+ # .select { |command| command[3] != command[4] }
+ # .map { |command| [command[0].upcase, { first: command[3], last: command[4], step: command[5] }] }
+ # .sort_by(&:first)
+ # .to_h
+ #
+ MULTI_KEY_COMMANDS = {
+ "BITOP" => { first: 2, last: -1, step: 1 },
+ "BLPOP" => { first: 1, last: -2, step: 1 },
+ "BRPOP" => { first: 1, last: -2, step: 1 },
+ "BRPOPLPUSH" => { first: 1, last: 2, step: 1 },
+ "BZPOPMAX" => { first: 1, last: -2, step: 1 },
+ "BZPOPMIN" => { first: 1, last: -2, step: 1 },
+ "DEL" => { first: 1, last: -1, step: 1 },
+ "EXISTS" => { first: 1, last: -1, step: 1 },
+ "MGET" => { first: 1, last: -1, step: 1 },
+ "MSET" => { first: 1, last: -1, step: 2 },
+ "MSETNX" => { first: 1, last: -1, step: 2 },
+ "PFCOUNT" => { first: 1, last: -1, step: 1 },
+ "PFMERGE" => { first: 1, last: -1, step: 1 },
+ "RENAME" => { first: 1, last: 2, step: 1 },
+ "RENAMENX" => { first: 1, last: 2, step: 1 },
+ "RPOPLPUSH" => { first: 1, last: 2, step: 1 },
+ "SDIFF" => { first: 1, last: -1, step: 1 },
+ "SDIFFSTORE" => { first: 1, last: -1, step: 1 },
+ "SINTER" => { first: 1, last: -1, step: 1 },
+ "SINTERSTORE" => { first: 1, last: -1, step: 1 },
+ "SMOVE" => { first: 1, last: 2, step: 1 },
+ "SUNION" => { first: 1, last: -1, step: 1 },
+ "SUNIONSTORE" => { first: 1, last: -1, step: 1 },
+ "UNLINK" => { first: 1, last: -1, step: 1 },
+ "WATCH" => { first: 1, last: -1, step: 1 }
+ }.freeze
+
+ CrossSlotError = Class.new(StandardError)
+
+ class << self
+ def validate!(command)
+ return unless Rails.env.development? || Rails.env.test?
+ return if allow_cross_slot_commands?
+
+ command_name = command.first.to_s.upcase
+ argument_positions = MULTI_KEY_COMMANDS[command_name]
+
+ return unless argument_positions
+
+ arguments = command.flatten[argument_positions[:first]..argument_positions[:last]]
+
+ key_slots = arguments.each_slice(argument_positions[:step]).map do |args|
+ key_slot(args.first)
+ end
+
+ unless key_slots.uniq.length == 1
+ raise CrossSlotError.new("Redis command #{command_name} arguments hash to different slots. See https://docs.gitlab.com/ee/development/redis.html#multi-key-commands")
+ end
+ end
+
+ # Keep track of the call stack to allow nested calls to work.
+ def allow_cross_slot_commands
+ Thread.current[:allow_cross_slot_commands] ||= 0
+ Thread.current[:allow_cross_slot_commands] += 1
+
+ yield
+ ensure
+ Thread.current[:allow_cross_slot_commands] -= 1
+ end
+
+ private
+
+ def allow_cross_slot_commands?
+ Thread.current[:allow_cross_slot_commands].to_i > 0
+ end
+
+ def key_slot(key)
+ ::Redis::Cluster::KeySlotConverter.convert(extract_hash_tag(key))
+ end
+
+ # This is almost identical to Redis::Cluster::Command#extract_hash_tag,
+ # except that it returns the original string if no hash tag is found.
+ #
+ def extract_hash_tag(key)
+ s = key.index('{')
+
+ return key unless s
+
+ e = key.index('}', s + 1)
+
+ return key unless e
+
+ key[s + 1..e - 1]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/instrumentation/redis_interceptor.rb b/lib/gitlab/instrumentation/redis_interceptor.rb
index a36aade59c3..b5a5f8fd984 100644
--- a/lib/gitlab/instrumentation/redis_interceptor.rb
+++ b/lib/gitlab/instrumentation/redis_interceptor.rb
@@ -5,13 +5,26 @@ require 'redis'
module Gitlab
module Instrumentation
module RedisInterceptor
+ APDEX_EXCLUDE = %w[brpop blpop brpoplpush bzpopmin bzpopmax xread xreadgroup].freeze
+
def call(*args, &block)
- start = Time.now
+ start = Time.now # must come first so that 'start' is always defined
+ instrumentation_class.instance_count_request
+ instrumentation_class.redis_cluster_validate!(args.first)
+
super(*args, &block)
+ rescue ::Redis::BaseError => ex
+ instrumentation_class.instance_count_exception(ex)
+ raise ex
ensure
- duration = (Time.now - start)
+ duration = Time.now - start
+
+ unless APDEX_EXCLUDE.include?(command_from_args(args))
+ instrumentation_class.instance_observe_duration(duration)
+ end
if ::RequestStore.active?
+ # These metrics measure total Redis usage per Rails request / job.
instrumentation_class.increment_request_count
instrumentation_class.add_duration(duration)
instrumentation_class.add_call_details(duration, args)
@@ -77,6 +90,12 @@ module Gitlab
def instrumentation_class
@options[:instrumentation_class] # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
+
+ def command_from_args(args)
+ command = args[0]
+ command = command[0] if command.is_a?(Array)
+ command.to_s.downcase
+ end
end
end
end
diff --git a/lib/gitlab/issuable_metadata.rb b/lib/gitlab/issuable_metadata.rb
index e946fc00c4d..f96c937aec3 100644
--- a/lib/gitlab/issuable_metadata.rb
+++ b/lib/gitlab/issuable_metadata.rb
@@ -7,11 +7,13 @@ module Gitlab
# data structure to store issuable meta data like
# upvotes, downvotes, notes and closing merge requests counts for issues and merge requests
# this avoiding n+1 queries when loading issuable collections on frontend
- IssuableMeta = Struct.new(:upvotes, :downvotes, :user_notes_count, :mrs_count) do
- def merge_requests_count(user = nil)
- mrs_count
- end
- end
+ IssuableMeta = Struct.new(
+ :upvotes,
+ :downvotes,
+ :user_notes_count,
+ :merge_requests_count,
+ :blocking_issues_count # EE-ONLY
+ )
attr_reader :current_user, :issuable_collection
@@ -95,3 +97,5 @@ module Gitlab
end
end
end
+
+Gitlab::IssuableMetadata.prepend_if_ee('EE::Gitlab::IssuableMetadata')
diff --git a/lib/gitlab/jira_import/issue_serializer.rb b/lib/gitlab/jira_import/issue_serializer.rb
index df57680073e..43280606bb6 100644
--- a/lib/gitlab/jira_import/issue_serializer.rb
+++ b/lib/gitlab/jira_import/issue_serializer.rb
@@ -52,7 +52,9 @@ module Gitlab
end
def map_user_id(jira_user)
- Gitlab::JiraImport::UserMapper.new(project, jira_user).execute&.id
+ return unless jira_user&.dig('accountId')
+
+ Gitlab::JiraImport.get_user_mapping(project.id, jira_user['accountId'])
end
def reporter
diff --git a/lib/gitlab/jira_import/user_mapper.rb b/lib/gitlab/jira_import/user_mapper.rb
deleted file mode 100644
index 208ee49b724..00000000000
--- a/lib/gitlab/jira_import/user_mapper.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module JiraImport
- class UserMapper
- include ::Gitlab::Utils::StrongMemoize
-
- def initialize(project, jira_user)
- @project = project
- @jira_user = jira_user
- end
-
- def execute
- return unless jira_user
-
- email = jira_user['emailAddress']
-
- # We also include emails that are not yet confirmed
- users = User.by_any_email(email).to_a
-
- user = users.first
-
- # this event should never happen but we should log it in case we have invalid data
- log_user_mapping_message('Multiple users found for an email address', email) if users.count > 1
-
- unless project.project_member(user) || project.group&.group_member(user)
- log_user_mapping_message('Jira user not found', email)
-
- return
- end
-
- user
- end
-
- private
-
- attr_reader :project, :jira_user, :params
-
- def log_user_mapping_message(message, email)
- logger.info(
- project_id: project.id,
- project_path: project.full_path,
- user_email: email,
- message: message
- )
- end
-
- def logger
- @logger ||= Gitlab::Import::Logger.build
- end
- end
- end
-end
diff --git a/lib/gitlab/json.rb b/lib/gitlab/json.rb
index 5b6689dbefe..21f837c58bb 100644
--- a/lib/gitlab/json.rb
+++ b/lib/gitlab/json.rb
@@ -1,59 +1,224 @@
# frozen_string_literal: true
+# This is a GitLab-specific JSON interface. You should use this instead
+# of using `JSON` directly. This allows us to swap the adapter and handle
+# legacy issues.
+
module Gitlab
module Json
INVALID_LEGACY_TYPES = [String, TrueClass, FalseClass].freeze
class << self
- def parse(string, *args, **named_args)
- legacy_mode = legacy_mode_enabled?(named_args.delete(:legacy_mode))
- data = adapter.parse(string, *args, **named_args)
+ # Parse a string and convert it to a Ruby object
+ #
+ # @param string [String] the JSON string to convert to Ruby objects
+ # @param opts [Hash] an options hash in the standard JSON gem format
+ # @return [Boolean, String, Array, Hash]
+ # @raise [JSON::ParserError] raised if parsing fails
+ def parse(string, opts = {})
+ # First we should ensure this really is a string, not some other
+ # type which purports to be a string. This handles some legacy
+ # usage of the JSON class.
+ string = string.to_s unless string.is_a?(String)
+
+ legacy_mode = legacy_mode_enabled?(opts.delete(:legacy_mode))
+ data = adapter_load(string, opts)
handle_legacy_mode!(data) if legacy_mode
data
end
- def parse!(string, *args, **named_args)
- legacy_mode = legacy_mode_enabled?(named_args.delete(:legacy_mode))
- data = adapter.parse!(string, *args, **named_args)
+ alias_method :parse!, :parse
+
+ # Restricted method for converting a Ruby object to JSON. If you
+ # need to pass options to this, you should use `.generate` instead,
+ # as the underlying implementation of this varies wildly based on
+ # the adapter in use.
+ #
+ # @param object [Object] the object to convert to JSON
+ # @return [String]
+ def dump(object)
+ adapter_dump(object)
+ end
- handle_legacy_mode!(data) if legacy_mode
+ # Generates JSON for an object. In Oj this takes fewer options than .dump,
+ # in the JSON gem this is the only method which takes an options argument.
+ #
+ # @param object [Hash, Array, Object] must be hash, array, or an object that responds to .to_h or .to_json
+ # @param opts [Hash] an options hash with fewer supported settings than .dump
+ # @return [String]
+ def generate(object, opts = {})
+ adapter_generate(object, opts)
+ end
- data
+ # Generates JSON for an object and makes it look purdy
+ #
+ # The Oj variant in this looks seriously weird but these are the settings
+ # needed to emulate the style generated by the JSON gem.
+ #
+ # NOTE: This currently ignores Oj, because Oj doesn't generate identical
+ # formatting, issue: https://github.com/ohler55/oj/issues/608
+ #
+ # @param object [Hash, Array, Object] must be hash, array, or an object that responds to .to_h or .to_json
+ # @param opts [Hash] an options hash with fewer supported settings than .dump
+ # @return [String]
+ def pretty_generate(object, opts = {})
+ ::JSON.pretty_generate(object, opts)
end
- def dump(*args)
- adapter.dump(*args)
+ # Feature detection for using Oj instead of the `json` gem.
+ #
+ # @return [Boolean]
+ def enable_oj?
+ return false unless feature_table_exists?
+
+ Feature.enabled?(:oj_json, default_enabled: true)
end
- def generate(*args)
- adapter.generate(*args)
+ private
+
+ # Convert JSON string into Ruby through toggleable adapters.
+ #
+ # Must rescue adapter-specific errors and return `parser_error`, and
+ # must also standardize the options hash to support each adapter as
+ # they all take different options.
+ #
+ # @param string [String] the JSON string to convert to Ruby objects
+ # @param opts [Hash] an options hash in the standard JSON gem format
+ # @return [Boolean, String, Array, Hash]
+ # @raise [JSON::ParserError]
+ def adapter_load(string, *args, **opts)
+ opts = standardize_opts(opts)
+
+ if enable_oj?
+ Oj.load(string, opts)
+ else
+ ::JSON.parse(string, opts)
+ end
+ rescue Oj::ParseError, Encoding::UndefinedConversionError => ex
+ raise parser_error.new(ex)
end
- def pretty_generate(*args)
- adapter.pretty_generate(*args)
+ # Take a Ruby object and convert it to a string. This method varies
+ # based on the underlying JSON interpreter. Oj treats this like JSON
+ # treats `.generate`. JSON.dump takes no options.
+ #
+ # This supports these options to ensure this difference is recorded here,
+ # as it's very surprising. The public interface is more restrictive to
+ # prevent adapter-specific options being passed.
+ #
+ # @overload adapter_dump(object, opts)
+ # @param object [Object] the object to convert to JSON
+ # @param opts [Hash] options as named arguments, only supported by Oj
+ #
+ # @overload adapter_dump(object, anIO, limit)
+ # @param object [Object] the object, will have JSON.generate called on it
+ # @param anIO [Object] an IO-like object that responds to .write, default nil
+ # @param limit [Fixnum] the nested array/object limit, default nil
+ # @raise [ArgumentError] when depth limit exceeded
+ #
+ # @return [String]
+ def adapter_dump(object, *args, **opts)
+ if enable_oj?
+ Oj.dump(object, opts)
+ else
+ ::JSON.dump(object, *args)
+ end
end
- private
+ # Generates JSON for an object but with fewer options, using toggleable adapters.
+ #
+ # @param object [Hash, Array, Object] must be hash, array, or an object that responds to .to_h or .to_json
+ # @param opts [Hash] an options hash with fewer supported settings than .dump
+ # @return [String]
+ def adapter_generate(object, opts = {})
+ opts = standardize_opts(opts)
+
+ if enable_oj?
+ Oj.generate(object, opts)
+ else
+ ::JSON.generate(object, opts)
+ end
+ end
+
+ # Take a JSON standard options hash and standardize it to work across adapters
+ # An example of this is Oj taking :symbol_keys instead of :symbolize_names
+ #
+ # @param opts [Hash, Nil]
+ # @return [Hash]
+ def standardize_opts(opts)
+ opts ||= {}
- def adapter
- ::JSON
+ if enable_oj?
+ opts[:mode] = :rails
+ opts[:symbol_keys] = opts[:symbolize_keys] || opts[:symbolize_names]
+ end
+
+ opts
end
+ # The standard parser error we should be returning. Defined in a method
+ # so we can potentially override it later.
+ #
+ # @return [JSON::ParserError]
def parser_error
::JSON::ParserError
end
+ # @param [Nil, Boolean] an extracted :legacy_mode key from the opts hash
+ # @return [Boolean]
def legacy_mode_enabled?(arg_value)
arg_value.nil? ? false : arg_value
end
+ # If legacy mode is enabled, we need to raise an error depending on the values
+ # provided in the string. This will be deprecated.
+ #
+ # @param data [Boolean, String, Array, Hash, Object]
+ # @return [Boolean, String, Array, Hash, Object]
+ # @raise [JSON::ParserError]
def handle_legacy_mode!(data)
+ return data unless feature_table_exists?
return data unless Feature.enabled?(:json_wrapper_legacy_mode, default_enabled: true)
raise parser_error if INVALID_LEGACY_TYPES.any? { |type| data.is_a?(type) }
end
+
+ # There are a variety of database errors possible when checking the feature
+ # flags at the wrong time during boot, e.g. during migrations. We don't care
+ # about these errors, we just need to ensure that we skip feature detection
+ # if they will fail.
+ #
+ # @return [Boolean]
+ def feature_table_exists?
+ Feature::FlipperFeature.table_exists?
+ rescue
+ false
+ end
+ end
+
+ # GrapeFormatter is a JSON formatter for the Grape API.
+ # This is set in lib/api/api.rb
+
+ class GrapeFormatter
+ # Convert an object to JSON.
+ #
+ # This will default to the built-in Grape formatter if either :oj_json or :grape_gitlab_json
+ # flags are disabled.
+ #
+ # The `env` param is ignored because it's not needed in either our formatter or Grape's,
+ # but it is passed through for consistency.
+ #
+ # @param object [Object]
+ # @return [String]
+ def self.call(object, env = nil)
+ if Gitlab::Json.enable_oj? && Feature.enabled?(:grape_gitlab_json, default_enabled: true)
+ Gitlab::Json.dump(object)
+ else
+ Grape::Formatter::Json.call(object, env)
+ end
+ end
end
end
end
diff --git a/lib/gitlab/json_logger.rb b/lib/gitlab/json_logger.rb
index ab34fb03158..3a74df8dc8f 100644
--- a/lib/gitlab/json_logger.rb
+++ b/lib/gitlab/json_logger.rb
@@ -19,7 +19,7 @@ module Gitlab
data.merge!(message)
end
- data.to_json + "\n"
+ Gitlab::Json.dump(data) + "\n"
end
end
end
diff --git a/lib/gitlab/kubernetes/helm.rb b/lib/gitlab/kubernetes/helm.rb
index 9507f7bc117..39bd8d5a01f 100644
--- a/lib/gitlab/kubernetes/helm.rb
+++ b/lib/gitlab/kubernetes/helm.rb
@@ -3,7 +3,7 @@
module Gitlab
module Kubernetes
module Helm
- HELM_VERSION = '2.16.6'
+ HELM_VERSION = '2.16.9'
KUBECTL_VERSION = '1.13.12'
NAMESPACE = 'gitlab-managed-apps'
NAMESPACE_LABELS = { 'app.gitlab.com/managed_by' => :gitlab }.freeze
diff --git a/lib/gitlab/kubernetes/node.rb b/lib/gitlab/kubernetes/node.rb
new file mode 100644
index 00000000000..bd765ef3852
--- /dev/null
+++ b/lib/gitlab/kubernetes/node.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Kubernetes
+ class Node
+ def initialize(cluster)
+ @cluster = cluster
+ end
+
+ def all
+ nodes.map do |node|
+ attributes = node(node)
+ attributes.merge(node_metrics(node))
+ end
+ end
+
+ private
+
+ attr_reader :cluster
+
+ def nodes_from_cluster
+ graceful_request { cluster.kubeclient.get_nodes }
+ end
+
+ def nodes_metrics_from_cluster
+ graceful_request { cluster.kubeclient.metrics_client.get_nodes }
+ end
+
+ def nodes
+ @nodes ||= nodes_from_cluster[:response].to_a
+ end
+
+ def nodes_metrics
+ @nodes_metrics ||= nodes_metrics_from_cluster[:response].to_a
+ end
+
+ def node_metrics_from_node(node)
+ nodes_metrics.find do |node_metric|
+ node_metric.metadata.name == node.metadata.name
+ end
+ end
+
+ def graceful_request(&block)
+ ::Gitlab::Kubernetes::KubeClient.graceful_request(cluster.id, &block)
+ end
+
+ def node(node)
+ {
+ 'metadata' => {
+ 'name' => node.metadata.name
+ },
+ 'status' => {
+ 'capacity' => {
+ 'cpu' => node.status.capacity.cpu,
+ 'memory' => node.status.capacity.memory
+ },
+ 'allocatable' => {
+ 'cpu' => node.status.allocatable.cpu,
+ 'memory' => node.status.allocatable.memory
+ }
+ }
+ }
+ end
+
+ def node_metrics(node)
+ node_metrics = node_metrics_from_node(node)
+ return {} unless node_metrics
+
+ {
+ 'usage' => {
+ 'cpu' => node_metrics.usage.cpu,
+ 'memory' => node_metrics.usage.memory
+ }
+ }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/lograge/custom_options.rb b/lib/gitlab/lograge/custom_options.rb
index 17a36c292c0..e6dd87a8bec 100644
--- a/lib/gitlab/lograge/custom_options.rb
+++ b/lib/gitlab/lograge/custom_options.rb
@@ -20,8 +20,6 @@ module Gitlab
username: event.payload[:username],
ua: event.payload[:ua]
}
- add_db_counters!(payload)
-
payload.merge!(event.payload[:metadata]) if event.payload[:metadata]
::Gitlab::InstrumentationHelper.add_instrumentation_data(payload)
@@ -46,16 +44,6 @@ module Gitlab
payload
end
-
- def self.add_db_counters!(payload)
- current_transaction = Gitlab::Metrics::Transaction.current
- if current_transaction
- payload[:db_count] = current_transaction.get(:db_count, :counter).to_i
- payload[:db_write_count] = current_transaction.get(:db_write_count, :counter).to_i
- payload[:db_cached_count] = current_transaction.get(:db_cached_count, :counter).to_i
- end
- end
- private_class_method :add_db_counters!
end
end
end
diff --git a/lib/gitlab/marginalia/comment.rb b/lib/gitlab/marginalia/comment.rb
index a0eee823763..d5dae5ef4b3 100644
--- a/lib/gitlab/marginalia/comment.rb
+++ b/lib/gitlab/marginalia/comment.rb
@@ -26,9 +26,9 @@ module Gitlab
job = ::Marginalia::Comment.marginalia_job
# We are using 'Marginalia::SidekiqInstrumentation' which does not support 'ActiveJob::Base'.
- # Gitlab also uses 'ActionMailer::DeliveryJob' which inherits from ActiveJob::Base.
+ # Gitlab also uses 'ActionMailer::MailDeliveryJob' which inherits from ActiveJob::Base.
# So below condition is used to return metadata for such jobs.
- if job && job.is_a?(ActionMailer::DeliveryJob)
+ if job.is_a?(ActionMailer::MailDeliveryJob) || job.is_a?(ActionMailer::DeliveryJob)
{
"class" => job.arguments.first,
"jid" => job.job_id
diff --git a/lib/gitlab/markdown_cache/redis/extension.rb b/lib/gitlab/markdown_cache/redis/extension.rb
index af3237f4ba6..add71fa120e 100644
--- a/lib/gitlab/markdown_cache/redis/extension.rb
+++ b/lib/gitlab/markdown_cache/redis/extension.rb
@@ -22,16 +22,32 @@ module Gitlab
end
end
- private
+ prepended do
+ def self.preload_markdown_cache!(objects)
+ fields = Gitlab::MarkdownCache::Redis::Store.bulk_read(objects)
- def save_markdown(updates)
- markdown_store.save(updates)
+ objects.each do |object|
+ fields[object.cache_key].value.each do |field_name, value|
+ object.write_markdown_field(field_name, value)
+ end
+ end
+ end
end
def write_markdown_field(field_name, value)
+ # The value read from redis is a string, so we're converting it back
+ # to an int.
+ value = value.to_i if field_name == :cached_markdown_version
+
instance_variable_set("@#{field_name}", value)
end
+ private
+
+ def save_markdown(updates)
+ markdown_store.save(updates)
+ end
+
def markdown_field_changed?(field_name)
false
end
diff --git a/lib/gitlab/markdown_cache/redis/store.rb b/lib/gitlab/markdown_cache/redis/store.rb
index 0f954404808..5a8efa34097 100644
--- a/lib/gitlab/markdown_cache/redis/store.rb
+++ b/lib/gitlab/markdown_cache/redis/store.rb
@@ -6,6 +6,20 @@ module Gitlab
class Store
EXPIRES_IN = 1.day
+ def self.bulk_read(subjects)
+ results = {}
+
+ Gitlab::Redis::Cache.with do |r|
+ r.pipelined do
+ subjects.each do |subject|
+ results[subject.cache_key] = new(subject).read
+ end
+ end
+ end
+
+ results
+ end
+
def initialize(subject)
@subject = subject
@loaded = false
@@ -23,13 +37,9 @@ module Gitlab
def read
@loaded = true
- results = Gitlab::Redis::Cache.with do |r|
+ Gitlab::Redis::Cache.with do |r|
r.mapped_hmget(markdown_cache_key, *fields)
end
- # The value read from redis is a string, so we're converting it back
- # to an int.
- results[:cached_markdown_version] = results[:cached_markdown_version].to_i
- results
end
def loaded?
diff --git a/lib/gitlab/metrics/background_transaction.rb b/lib/gitlab/metrics/background_transaction.rb
index fe1722b1095..7b05ae29b02 100644
--- a/lib/gitlab/metrics/background_transaction.rb
+++ b/lib/gitlab/metrics/background_transaction.rb
@@ -9,7 +9,7 @@ module Gitlab
end
def labels
- { controller: @worker_class.name, action: 'perform' }
+ { controller: @worker_class.name, action: 'perform', feature_category: @worker_class.try(:get_feature_category).to_s }
end
end
end
diff --git a/lib/gitlab/metrics/dashboard/errors.rb b/lib/gitlab/metrics/dashboard/errors.rb
index 264ea0488e7..07ddd315bcc 100644
--- a/lib/gitlab/metrics/dashboard/errors.rb
+++ b/lib/gitlab/metrics/dashboard/errors.rb
@@ -20,20 +20,20 @@ module Gitlab
when DashboardProcessingError
error(error.message, :unprocessable_entity)
when NOT_FOUND_ERROR
- error("#{dashboard_path} could not be found.", :not_found)
+ error(_("%{dashboard_path} could not be found.") % { dashboard_path: dashboard_path }, :not_found)
when PanelNotFoundError
error(error.message, :not_found)
when ::Grafana::Client::Error
error(error.message, :service_unavailable)
when MissingIntegrationError
- error('Proxy support for this API is not available currently', :bad_request)
+ error(_('Proxy support for this API is not available currently'), :bad_request)
else
raise error
end
end
def panels_not_found!(opts)
- raise PanelNotFoundError.new("No panels matching properties #{opts}")
+ raise PanelNotFoundError.new(_("No panels matching properties %{opts}") % { opts: opts })
end
end
end
diff --git a/lib/gitlab/metrics/dashboard/finder.rb b/lib/gitlab/metrics/dashboard/finder.rb
index d80985e0a0e..5e2d78e10a4 100644
--- a/lib/gitlab/metrics/dashboard/finder.rb
+++ b/lib/gitlab/metrics/dashboard/finder.rb
@@ -7,6 +7,19 @@ module Gitlab
module Metrics
module Dashboard
class Finder
+ # Dashboards that should not be part of the list of all dashboards
+ # displayed on the metrics dashboard page.
+ PREDEFINED_DASHBOARD_EXCLUSION_LIST = [
+ # This dashboard is only useful in the self monitoring project.
+ ::Metrics::Dashboard::SelfMonitoringDashboardService,
+
+ # This dashboard is displayed on the K8s cluster settings health page.
+ ::Metrics::Dashboard::ClusterDashboardService,
+
+ # This dashboard is not yet ready for the world.
+ ::Metrics::Dashboard::PodDashboardService
+ ].freeze
+
class << self
# Returns a formatted dashboard packed with DB info.
# @param project [Project]
@@ -67,12 +80,32 @@ module Gitlab
def find_all_paths_from_source(project)
Gitlab::Metrics::Dashboard::Cache.delete_all!
- default_dashboard_path(project)
- .+ project_service.all_dashboard_paths(project)
+ user_facing_dashboard_services(project).flat_map do |service|
+ service.all_dashboard_paths(project)
+ end
end
private
+ def user_facing_dashboard_services(project)
+ predefined_dashboard_services_for(project) + [project_service]
+ end
+
+ def predefined_dashboard_services_for(project)
+ # Only list the self monitoring dashboard on the self monitoring project,
+ # since it is the only dashboard (at time of writing) that shows data
+ # about GitLab itself.
+ if project.self_monitoring?
+ return [self_monitoring_service]
+ end
+
+ predefined_dashboard_services
+ end
+
+ def predefined_dashboard_services
+ ::Metrics::Dashboard::PredefinedDashboardService.descendants - PREDEFINED_DASHBOARD_EXCLUSION_LIST
+ end
+
def system_service
::Metrics::Dashboard::SystemDashboardService
end
@@ -85,14 +118,6 @@ module Gitlab
::Metrics::Dashboard::SelfMonitoringDashboardService
end
- def default_dashboard_path(project)
- if project.self_monitoring?
- self_monitoring_service.all_dashboard_paths(project)
- else
- system_service.all_dashboard_paths(project)
- end
- end
-
def service_for(options)
Gitlab::Metrics::Dashboard::ServiceSelector.call(options)
end
diff --git a/lib/gitlab/metrics/dashboard/service_selector.rb b/lib/gitlab/metrics/dashboard/service_selector.rb
index 49682da320c..641c0c76f8f 100644
--- a/lib/gitlab/metrics/dashboard/service_selector.rb
+++ b/lib/gitlab/metrics/dashboard/service_selector.rb
@@ -13,6 +13,8 @@ module Gitlab
include Gitlab::Utils::StrongMemoize
SERVICES = [
+ ::Metrics::Dashboard::ClusterMetricsEmbedService,
+ ::Metrics::Dashboard::ClusterDashboardService,
::Metrics::Dashboard::GitlabAlertEmbedService,
::Metrics::Dashboard::CustomMetricEmbedService,
::Metrics::Dashboard::GrafanaMetricEmbedService,
@@ -51,5 +53,3 @@ module Gitlab
end
end
end
-
-Gitlab::Metrics::Dashboard::ServiceSelector.prepend_if_ee('EE::Gitlab::Metrics::Dashboard::ServiceSelector')
diff --git a/lib/gitlab/metrics/dashboard/stages/base_stage.rb b/lib/gitlab/metrics/dashboard/stages/base_stage.rb
index 622d5aa8cdb..ee2d36621b4 100644
--- a/lib/gitlab/metrics/dashboard/stages/base_stage.rb
+++ b/lib/gitlab/metrics/dashboard/stages/base_stage.rb
@@ -48,6 +48,14 @@ module Gitlab
end
end
+ def for_variables
+ return unless dashboard.dig(:templating, :variables).is_a?(Hash)
+
+ dashboard.dig(:templating, :variables).each do |variable_name, variable|
+ yield variable_name, variable
+ end
+ end
+
def for_panel_groups
dashboard[:panel_groups].each do |panel_group|
yield panel_group
diff --git a/lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb b/lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb
new file mode 100644
index 00000000000..a12082b704c
--- /dev/null
+++ b/lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Metrics
+ module Dashboard
+ module Stages
+ class ClusterEndpointInserter < BaseStage
+ def transform!
+ verify_params
+
+ for_metrics do |metric|
+ metric[:prometheus_endpoint_path] = endpoint_for_metric(metric)
+ end
+ end
+
+ private
+
+ def admin_url(metric)
+ Gitlab::Routing.url_helpers.prometheus_api_admin_cluster_path(
+ params[:cluster],
+ proxy_path: query_type(metric),
+ query: query_for_metric(metric)
+ )
+ end
+
+ def endpoint_for_metric(metric)
+ case params[:cluster_type]
+ when :admin
+ admin_url(metric)
+ when :group
+ error!(_('Group is required when cluster_type is :group')) unless params[:group]
+ group_url(metric)
+ when :project
+ error!(_('Project is required when cluster_type is :project')) unless project
+ project_url(metric)
+ else
+ error!(_('Unrecognized cluster type'))
+ end
+ end
+
+ def error!(message)
+ raise Errors::DashboardProcessingError.new(message)
+ end
+
+ def group_url(metric)
+ Gitlab::Routing.url_helpers.prometheus_api_group_cluster_path(
+ params[:group],
+ params[:cluster],
+ proxy_path: query_type(metric),
+ query: query_for_metric(metric)
+ )
+ end
+
+ def project_url(metric)
+ Gitlab::Routing.url_helpers.prometheus_api_project_cluster_path(
+ project,
+ params[:cluster],
+ proxy_path: query_type(metric),
+ query: query_for_metric(metric)
+ )
+ end
+
+ def query_type(metric)
+ metric[:query] ? :query : :query_range
+ end
+
+ def query_for_metric(metric)
+ query = metric[query_type(metric)]
+
+ raise Errors::MissingQueryError.new('Each "metric" must define one of :query or :query_range') unless query
+
+ query
+ end
+
+ def verify_params
+ raise Errors::DashboardProcessingError.new(_('Cluster is required for Stages::ClusterEndpointInserter')) unless params[:cluster]
+ raise Errors::DashboardProcessingError.new(_('Cluster type must be specificed for Stages::ClusterEndpointInserter')) unless params[:cluster_type]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/dashboard/stages/endpoint_inserter.rb b/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter.rb
index e085f551952..c48a7ff25a5 100644
--- a/lib/gitlab/metrics/dashboard/stages/endpoint_inserter.rb
+++ b/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter.rb
@@ -4,9 +4,9 @@ module Gitlab
module Metrics
module Dashboard
module Stages
- class EndpointInserter < BaseStage
+ class MetricEndpointInserter < BaseStage
def transform!
- raise Errors::DashboardProcessingError.new('Environment is required for Stages::EndpointInserter') unless params[:environment]
+ raise Errors::DashboardProcessingError.new(_('Environment is required for Stages::MetricEndpointInserter')) unless params[:environment]
for_metrics do |metric|
metric[:prometheus_endpoint_path] = endpoint_for_metric(metric)
@@ -33,7 +33,11 @@ module Gitlab
end
def query_type(metric)
- metric[:query] ? :query : :query_range
+ if metric[:query]
+ ::Prometheus::ProxyService::PROMETHEUS_QUERY_API.to_sym
+ else
+ ::Prometheus::ProxyService::PROMETHEUS_QUERY_RANGE_API.to_sym
+ end
end
def query_for_metric(metric)
diff --git a/lib/gitlab/metrics/dashboard/stages/sorter.rb b/lib/gitlab/metrics/dashboard/stages/sorter.rb
index ba5aa78059c..882211e1441 100644
--- a/lib/gitlab/metrics/dashboard/stages/sorter.rb
+++ b/lib/gitlab/metrics/dashboard/stages/sorter.rb
@@ -16,7 +16,7 @@ module Gitlab
# Sorts the groups in the dashboard by the :priority key
def sort_groups!
- dashboard[:panel_groups] = dashboard[:panel_groups].sort_by { |group| -group[:priority].to_i }
+ dashboard[:panel_groups] = Gitlab::Utils.stable_sort_by(dashboard[:panel_groups]) { |group| -group[:priority].to_i }
end
# Sorts the panels in the dashboard by the :weight key
@@ -24,7 +24,7 @@ module Gitlab
dashboard[:panel_groups].each do |group|
missing_panels! unless group[:panels].is_a? Array
- group[:panels] = group[:panels].sort_by { |panel| -panel[:weight].to_i }
+ group[:panels] = Gitlab::Utils.stable_sort_by(group[:panels]) { |panel| -panel[:weight].to_i }
end
end
end
diff --git a/lib/gitlab/metrics/dashboard/stages/url_validator.rb b/lib/gitlab/metrics/dashboard/stages/url_validator.rb
index ff36f7b605e..9e2bb0d1a70 100644
--- a/lib/gitlab/metrics/dashboard/stages/url_validator.rb
+++ b/lib/gitlab/metrics/dashboard/stages/url_validator.rb
@@ -6,8 +6,47 @@ module Gitlab
module Stages
class UrlValidator < BaseStage
def transform!
- dashboard[:links]&.each do |link|
- Gitlab::UrlBlocker.validate!(link[:url])
+ validate_dashboard_links(dashboard)
+
+ validate_chart_links(dashboard)
+ end
+
+ private
+
+ def blocker_args
+ {
+ schemes: %w(http https),
+ ports: [],
+ allow_localhost: allow_setting_local_requests?,
+ allow_local_network: allow_setting_local_requests?,
+ ascii_only: false,
+ enforce_user: false,
+ enforce_sanitization: false,
+ dns_rebind_protection: true
+ }
+ end
+
+ def allow_setting_local_requests?
+ Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
+ end
+
+ def validate_dashboard_links(dashboard)
+ validate_links(dashboard[:links])
+ end
+
+ def validate_chart_links(dashboard)
+ dashboard[:panel_groups].each do |panel_group|
+ panel_group[:panels].each do |panel|
+ validate_links(panel[:links])
+ end
+ end
+ end
+
+ def validate_links(links)
+ links&.each do |link|
+ next unless link.is_a? Hash
+
+ Gitlab::UrlBlocker.validate!(link[:url], blocker_args)
rescue Gitlab::UrlBlocker::BlockedUrlError
link[:url] = ''
end
diff --git a/lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter.rb b/lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter.rb
new file mode 100644
index 00000000000..20e7fe477e5
--- /dev/null
+++ b/lib/gitlab/metrics/dashboard/stages/variable_endpoint_inserter.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Metrics
+ module Dashboard
+ module Stages
+ class VariableEndpointInserter < BaseStage
+ VARIABLE_TYPE_METRIC_LABEL_VALUES = 'metric_label_values'
+
+ def transform!
+ raise Errors::DashboardProcessingError.new(_('Environment is required for Stages::VariableEndpointInserter')) unless params[:environment]
+
+ for_variables do |variable_name, variable|
+ if variable.is_a?(Hash) && variable[:type] == VARIABLE_TYPE_METRIC_LABEL_VALUES
+ variable[:options][:prometheus_endpoint_path] = endpoint_for_variable(variable.dig(:options, :series_selector))
+ end
+ end
+ end
+
+ private
+
+ def endpoint_for_variable(series_selector)
+ Gitlab::Routing.url_helpers.prometheus_api_project_environment_path(
+ project,
+ params[:environment],
+ proxy_path: ::Prometheus::ProxyService::PROMETHEUS_SERIES_API,
+ match: Array(series_selector)
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/dashboard/url.rb b/lib/gitlab/metrics/dashboard/url.rb
index 31670a3f533..10a2f3c2397 100644
--- a/lib/gitlab/metrics/dashboard/url.rb
+++ b/lib/gitlab/metrics/dashboard/url.rb
@@ -60,6 +60,22 @@ module Gitlab
Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_environment_url(*args)
end
+ # Matches dashboard urls for a metric chart embed
+ # for cluster metrics
+ #
+ # EX - https://<host>/<namespace>/<project>/-/clusters/<cluster_id>/?group=Cluster%20Health&title=Memory%20Usage&y_label=Memory%20(GiB)
+ def clusters_regex
+ strong_memoize(:clusters_regex) do
+ regex_for_project_metrics(
+ %r{
+ /clusters
+ /(?<cluster_id>\d+)
+ /?
+ }x
+ )
+ end
+ end
+
private
def regex_for_project_metrics(path_suffix_pattern)
diff --git a/lib/gitlab/metrics/methods.rb b/lib/gitlab/metrics/methods.rb
index 5955987541c..83a7b925392 100644
--- a/lib/gitlab/metrics/methods.rb
+++ b/lib/gitlab/metrics/methods.rb
@@ -35,7 +35,7 @@ module Gitlab
end
def init_metric(type, name, opts = {}, &block)
- options = MetricOptions.new(opts)
+ options = ::Gitlab::Metrics::Methods::MetricOptions.new(opts)
options.evaluate(&block)
if disabled_by_feature(options)
diff --git a/lib/gitlab/metrics/sidekiq_middleware.rb b/lib/gitlab/metrics/sidekiq_middleware.rb
index de8e1ca3256..1c99e1e730c 100644
--- a/lib/gitlab/metrics/sidekiq_middleware.rb
+++ b/lib/gitlab/metrics/sidekiq_middleware.rb
@@ -26,9 +26,7 @@ module Gitlab
private
def add_info_to_payload(payload, trans)
- payload[:db_count] = trans.get(:db_count, :counter).to_i
- payload[:db_write_count] = trans.get(:db_write_count, :counter).to_i
- payload[:db_cached_count] = trans.get(:db_cached_count, :counter).to_i
+ payload.merge!(::Gitlab::Metrics::Subscribers::ActiveRecord.db_counter_payload)
end
end
end
diff --git a/lib/gitlab/metrics/subscribers/active_record.rb b/lib/gitlab/metrics/subscribers/active_record.rb
index 1628eeb5a95..d2736882432 100644
--- a/lib/gitlab/metrics/subscribers/active_record.rb
+++ b/lib/gitlab/metrics/subscribers/active_record.rb
@@ -23,6 +23,14 @@ module Gitlab
increment_db_counters(payload)
end
+ def self.db_counter_payload
+ return {} unless Gitlab::SafeRequestStore.active?
+
+ DB_COUNTERS.map do |counter|
+ [counter, Gitlab::SafeRequestStore[counter].to_i]
+ end.to_h
+ end
+
private
define_histogram :gitlab_sql_duration_seconds do
@@ -36,13 +44,21 @@ module Gitlab
end
def increment_db_counters(payload)
- current_transaction.increment(:db_count, 1)
+ increment(:db_count)
if payload.fetch(:cached, payload[:name] == 'CACHE')
- current_transaction.increment(:db_cached_count, 1)
+ increment(:db_cached_count)
end
- current_transaction.increment(:db_write_count, 1) unless select_sql_command?(payload)
+ increment(:db_write_count) unless select_sql_command?(payload)
+ end
+
+ def increment(counter)
+ current_transaction.increment(counter, 1)
+
+ if Gitlab::SafeRequestStore.active?
+ Gitlab::SafeRequestStore[counter] = Gitlab::SafeRequestStore[counter].to_i + 1
+ end
end
def current_transaction
diff --git a/lib/gitlab/metrics/transaction.rb b/lib/gitlab/metrics/transaction.rb
index 822f5243e9d..da06be9c79c 100644
--- a/lib/gitlab/metrics/transaction.rb
+++ b/lib/gitlab/metrics/transaction.rb
@@ -7,7 +7,7 @@ module Gitlab
include Gitlab::Metrics::Methods
# base labels shared among all transactions
- BASE_LABELS = { controller: nil, action: nil }.freeze
+ BASE_LABELS = { controller: nil, action: nil, feature_category: nil }.freeze
# labels that potentially contain sensitive information and will be filtered
FILTERED_LABELS = [:branch, :path].freeze
@@ -92,12 +92,6 @@ module Gitlab
self.class.transaction_metric(name, :gauge).set(labels, value) if use_prometheus
end
- def get(name, type, tags = {})
- metric = self.class.transaction_metric(name, type)
-
- metric.get(filter_tags(tags).merge(labels))
- end
-
def labels
BASE_LABELS
end
diff --git a/lib/gitlab/metrics/web_transaction.rb b/lib/gitlab/metrics/web_transaction.rb
index fa17548723e..2064f9290d3 100644
--- a/lib/gitlab/metrics/web_transaction.rb
+++ b/lib/gitlab/metrics/web_transaction.rb
@@ -32,6 +32,10 @@ module Gitlab
action = "#{controller.action_name}"
+ # Try to get the feature category, but don't fail when the controller is
+ # not an ApplicationController.
+ feature_category = controller.class.try(:feature_category_for_action, action).to_s
+
# Devise exposes a method called "request_format" that does the below.
# However, this method is not available to all controllers (e.g. certain
# Doorkeeper controllers). As such we use the underlying code directly.
@@ -45,7 +49,7 @@ module Gitlab
action = "#{action}.#{suffix}"
end
- { controller: controller.class.name, action: action }
+ { controller: controller.class.name, action: action, feature_category: feature_category }
end
def labels_from_endpoint
@@ -61,7 +65,10 @@ module Gitlab
if route
path = endpoint_paths_cache[route.request_method][route.path]
- { controller: 'Grape', action: "#{route.request_method} #{path}" }
+
+ # Feature categories will be added for grape endpoints in
+ # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/462
+ { controller: 'Grape', action: "#{route.request_method} #{path}", feature_category: '' }
end
end
diff --git a/lib/gitlab/middleware/go.rb b/lib/gitlab/middleware/go.rb
index abdbccd3aa8..47d0b9ba8cb 100644
--- a/lib/gitlab/middleware/go.rb
+++ b/lib/gitlab/middleware/go.rb
@@ -101,7 +101,7 @@ module Gitlab
if project
# If a project is found and the user has access, we return the full project path
- return project.full_path, project.default_branch
+ [project.full_path, project.default_branch]
else
# If not, we return the first two components as if it were a simple `namespace/project` path,
# so that we don't reveal the existence of a nested project the user doesn't have access to.
@@ -112,7 +112,7 @@ module Gitlab
# `go get gitlab.com/group/subgroup/project/subpackage` will not work for private projects.
# `go get gitlab.com/group/subgroup/project.git/subpackage` will work, since Go is smart enough
# to figure that out. `import 'gitlab.com/...'` behaves the same as `go get`.
- return simple_project_path, 'master'
+ [simple_project_path, 'master']
end
end
diff --git a/lib/gitlab/middleware/multipart.rb b/lib/gitlab/middleware/multipart.rb
index 3c45f841653..c0b671abd44 100644
--- a/lib/gitlab/middleware/multipart.rb
+++ b/lib/gitlab/middleware/multipart.rb
@@ -105,6 +105,21 @@ module Gitlab
private
+ def package_allowed_paths
+ packages_config = ::Gitlab.config.packages
+ return [] unless allow_packages_storage_path?(packages_config)
+
+ [::Packages::PackageFileUploader.workhorse_upload_path]
+ end
+
+ def allow_packages_storage_path?(packages_config)
+ return false unless packages_config.enabled
+ return false unless packages_config['storage_path']
+ return false if packages_config.object_store.enabled && packages_config.object_store.direct_upload
+
+ true
+ end
+
def allowed_paths
[
::FileUploader.root,
@@ -112,7 +127,7 @@ module Gitlab
JobArtifactUploader.workhorse_upload_path,
LfsObjectUploader.workhorse_upload_path,
File.join(Rails.root, 'public/uploads/tmp')
- ]
+ ] + package_allowed_paths
end
end
@@ -135,5 +150,3 @@ module Gitlab
end
end
end
-
-::Gitlab::Middleware::Multipart::Handler.prepend_if_ee('EE::Gitlab::Middleware::Multipart::Handler')
diff --git a/lib/gitlab/project_template.rb b/lib/gitlab/project_template.rb
index fdb3fbc03bc..e6e599e079d 100644
--- a/lib/gitlab/project_template.rb
+++ b/lib/gitlab/project_template.rb
@@ -59,6 +59,7 @@ module Gitlab
ProjectTemplate.new('nfhexo', 'Netlify/Hexo', _('A Hexo site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfhexo', 'illustrations/logos/netlify.svg'),
ProjectTemplate.new('salesforcedx', 'SalesforceDX', _('A project boilerplate for Salesforce App development with Salesforce Developer tools.'), 'https://gitlab.com/gitlab-org/project-templates/salesforcedx'),
ProjectTemplate.new('serverless_framework', 'Serverless Framework/JS', _('A basic page and serverless function that uses AWS Lambda, AWS API Gateway, and GitLab Pages'), 'https://gitlab.com/gitlab-org/project-templates/serverless-framework', 'illustrations/logos/serverless_framework.svg'),
+ ProjectTemplate.new('jsonnet', 'Jsonnet for Dynamic Child Pipelines', _('An example showing how to use Jsonnet with GitLab dynamic child pipelines'), 'https://gitlab.com/gitlab-org/project-templates/jsonnet'),
ProjectTemplate.new('cluster_management', 'GitLab Cluster Management', _('An example project for managing Kubernetes clusters integrated with GitLab.'), 'https://gitlab.com/gitlab-org/project-templates/cluster-management')
].freeze
end
diff --git a/lib/gitlab/prometheus_client.rb b/lib/gitlab/prometheus_client.rb
index 213e3ba835d..69499b5494e 100644
--- a/lib/gitlab/prometheus_client.rb
+++ b/lib/gitlab/prometheus_client.rb
@@ -5,6 +5,8 @@ module Gitlab
class PrometheusClient
include Gitlab::Utils::StrongMemoize
Error = Class.new(StandardError)
+ ConnectionError = Class.new(Gitlab::PrometheusClient::Error)
+ UnexpectedResponseError = Class.new(Gitlab::PrometheusClient::Error)
QueryError = Class.new(Gitlab::PrometheusClient::Error)
HEALTHY_RESPONSE = "Prometheus is Healthy.\n"
@@ -44,7 +46,7 @@ module Gitlab
path = api_path(type)
get(path, args)
rescue Gitlab::HTTP::ResponseError => ex
- raise PrometheusClient::Error, "Network connection error" unless ex.response && ex.response.try(:code)
+ raise PrometheusClient::ConnectionError, "Network connection error" unless ex.response && ex.response.try(:code)
handle_querying_api_response(ex.response)
end
@@ -115,7 +117,7 @@ module Gitlab
response = get(path, args)
handle_querying_api_response(response)
rescue Gitlab::HTTP::ResponseError => ex
- raise PrometheusClient::Error, "Network connection error" unless ex.response && ex.response.try(:code)
+ raise PrometheusClient::ConnectionError, "Network connection error" unless ex.response && ex.response.try(:code)
handle_querying_api_response(ex.response)
end
@@ -137,18 +139,18 @@ module Gitlab
def get(path, args)
Gitlab::HTTP.get(path, { query: args }.merge(http_options) )
rescue SocketError
- raise PrometheusClient::Error, "Can't connect to #{api_url}"
+ raise PrometheusClient::ConnectionError, "Can't connect to #{api_url}"
rescue OpenSSL::SSL::SSLError
- raise PrometheusClient::Error, "#{api_url} contains invalid SSL data"
+ raise PrometheusClient::ConnectionError, "#{api_url} contains invalid SSL data"
rescue Errno::ECONNREFUSED
- raise PrometheusClient::Error, 'Connection refused'
+ raise PrometheusClient::ConnectionError, 'Connection refused'
end
def handle_management_api_response(response)
if response.code == 200
response.body
else
- raise PrometheusClient::Error, "#{response.code} - #{response.body}"
+ raise PrometheusClient::UnexpectedResponseError, "#{response.code} - #{response.body}"
end
end
@@ -156,7 +158,7 @@ module Gitlab
response_code = response.try(:code)
response_body = response.try(:body)
- raise PrometheusClient::Error, "#{response_code} - #{response_body}" unless response_code
+ raise PrometheusClient::UnexpectedResponseError, "#{response_code} - #{response_body}" unless response_code
json_data = parse_json(response_body) if [200, 400].include?(response_code)
@@ -166,7 +168,7 @@ module Gitlab
when 400
raise PrometheusClient::QueryError, json_data['error'] || 'Bad data received'
else
- raise PrometheusClient::Error, "#{response_code} - #{response_body}"
+ raise PrometheusClient::UnexpectedResponseError, "#{response_code} - #{response_body}"
end
end
@@ -178,7 +180,7 @@ module Gitlab
def parse_json(response_body)
Gitlab::Json.parse(response_body, legacy_mode: true)
rescue JSON::ParserError
- raise PrometheusClient::Error, 'Parsing response failed'
+ raise PrometheusClient::UnexpectedResponseError, 'Parsing response failed'
end
end
end
diff --git a/lib/gitlab/regex.rb b/lib/gitlab/regex.rb
index 4caff8ae679..784f8b48f3c 100644
--- a/lib/gitlab/regex.rb
+++ b/lib/gitlab/regex.rb
@@ -43,6 +43,10 @@ module Gitlab
@maven_app_name_regex ||= /\A[\w\-\.]+\z/.freeze
end
+ def maven_version_regex
+ @maven_version_regex ||= /\A(\.?[\w\+-]+\.?)+\z/.freeze
+ end
+
def maven_app_group_regex
maven_app_name_regex
end
@@ -246,6 +250,14 @@ module Gitlab
@utc_date_regex ||= /\A[0-9]{4}-[0-9]{2}-[0-9]{2}\z/.freeze
end
+ def merge_request_wip
+ /(?i)(\[WIP\]\s*|WIP:\s*|WIP$)/
+ end
+
+ def merge_request_draft
+ /(?i)(\[draft\]|\(draft\)|draft:|draft\s\-\s|draft$)/
+ end
+
def issue
@issue ||= /(?<issue>\d+\b)/
end
diff --git a/lib/gitlab/runtime.rb b/lib/gitlab/runtime.rb
index abf6ee07d53..8b40aaa101a 100644
--- a/lib/gitlab/runtime.rb
+++ b/lib/gitlab/runtime.rb
@@ -37,7 +37,7 @@ module Gitlab
end
def puma?
- !!defined?(::Puma) && !defined?(ACTION_CABLE_SERVER)
+ !!defined?(::Puma)
end
# For unicorn, we need to check for actual server instances to avoid false positives.
@@ -70,11 +70,11 @@ module Gitlab
end
def web_server?
- puma? || unicorn? || action_cable?
+ puma? || unicorn?
end
def action_cable?
- !!defined?(ACTION_CABLE_SERVER)
+ web_server? && (!!defined?(ACTION_CABLE_SERVER) || Gitlab::ActionCable::Config.in_app?)
end
def multi_threaded?
@@ -82,19 +82,21 @@ module Gitlab
end
def max_threads
- main_thread = 1
+ threads = 1 # main thread
- if action_cable?
- Gitlab::Application.config.action_cable.worker_pool_size
- elsif puma?
- Puma.cli_config.options[:max_threads]
+ if puma?
+ threads += Puma.cli_config.options[:max_threads]
elsif sidekiq?
# An extra thread for the poller in Sidekiq Cron:
# https://github.com/ondrejbartas/sidekiq-cron#under-the-hood
- Sidekiq.options[:concurrency] + 1
- else
- 0
- end + main_thread
+ threads += Sidekiq.options[:concurrency] + 1
+ end
+
+ if action_cable?
+ threads += Gitlab::ActionCable::Config.worker_pool_size
+ end
+
+ threads
end
end
end
diff --git a/lib/gitlab/search_results.rb b/lib/gitlab/search_results.rb
index 6239158ef06..3d5f64ce05b 100644
--- a/lib/gitlab/search_results.rb
+++ b/lib/gitlab/search_results.rb
@@ -27,6 +27,7 @@ module Gitlab
end
def objects(scope, page: nil, per_page: DEFAULT_PER_PAGE, without_count: true, preload_method: nil)
+ should_preload = preload_method.present?
collection = case scope
when 'projects'
projects
@@ -39,9 +40,11 @@ module Gitlab
when 'users'
users
else
+ should_preload = false
Kaminari.paginate_array([])
end
+ collection = collection.public_send(preload_method) if should_preload # rubocop:disable GitlabSecurity/PublicSend
collection = collection.page(page).per(per_page)
without_count ? collection.without_count : collection
diff --git a/lib/gitlab/seeder.rb b/lib/gitlab/seeder.rb
index 53cbd5b21ea..d652719721e 100644
--- a/lib/gitlab/seeder.rb
+++ b/lib/gitlab/seeder.rb
@@ -18,6 +18,7 @@ module Gitlab
MASS_INSERT_PROJECT_START = 'mass_insert_project_'
MASS_INSERT_USER_START = 'mass_insert_user_'
+ REPORTED_USER_START = 'reported_user_'
ESTIMATED_INSERT_PER_MINUTE = 2_000_000
MASS_INSERT_ENV = 'MASS_INSERT'
@@ -36,7 +37,7 @@ module Gitlab
included do
scope :not_mass_generated, -> do
- where.not("username LIKE '#{MASS_INSERT_USER_START}%'")
+ where.not("username LIKE '#{MASS_INSERT_USER_START}%' OR username LIKE '#{REPORTED_USER_START}%'")
end
end
end
diff --git a/lib/gitlab/service_desk.rb b/lib/gitlab/service_desk.rb
new file mode 100644
index 00000000000..b3d6e890e03
--- /dev/null
+++ b/lib/gitlab/service_desk.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ServiceDesk
+ # Check whether a project or GitLab instance can support the Service Desk
+ # feature. Use `project.service_desk_enabled?` to check whether it is
+ # enabled for a particular project.
+ def self.enabled?(project:)
+ supported? && project[:service_desk_enabled]
+ end
+
+ def self.supported?
+ Gitlab::IncomingEmail.enabled? && Gitlab::IncomingEmail.supports_wildcard?
+ end
+ end
+end
diff --git a/lib/gitlab/service_desk_email.rb b/lib/gitlab/service_desk_email.rb
new file mode 100644
index 00000000000..f8dba82cb40
--- /dev/null
+++ b/lib/gitlab/service_desk_email.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ServiceDeskEmail
+ class << self
+ def enabled?
+ !!config&.enabled && config&.address.present?
+ end
+
+ def key_from_address(address)
+ wildcard_address = config&.address
+ return unless wildcard_address
+
+ Gitlab::IncomingEmail.key_from_address(address, wildcard_address: wildcard_address)
+ end
+
+ def config
+ Gitlab.config.service_desk_email
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/set_cache.rb b/lib/gitlab/set_cache.rb
index e891b805879..6ba9ee26634 100644
--- a/lib/gitlab/set_cache.rb
+++ b/lib/gitlab/set_cache.rb
@@ -20,7 +20,10 @@ module Gitlab
with do |redis|
keys = keys.map { |key| cache_key(key) }
- unlink_or_delete(redis, keys)
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ unlink_or_delete(redis, keys)
+ end
end
end
diff --git a/lib/gitlab/sidekiq_logging/deduplication_logger.rb b/lib/gitlab/sidekiq_logging/deduplication_logger.rb
index 01810e474dc..c5654819ffb 100644
--- a/lib/gitlab/sidekiq_logging/deduplication_logger.rb
+++ b/lib/gitlab/sidekiq_logging/deduplication_logger.rb
@@ -6,11 +6,14 @@ module Gitlab
include Singleton
include LogsJobs
- def log(job, deduplication_type)
+ def log(job, deduplication_type, deduplication_options = {})
payload = parse_job(job)
payload['job_status'] = 'deduplicated'
payload['message'] = "#{base_message(payload)}: deduplicated: #{deduplication_type}"
- payload['deduplication_type'] = deduplication_type
+ payload['deduplication.type'] = deduplication_type
+ # removing nil values from deduplication options
+ payload.merge!(
+ deduplication_options.compact.transform_keys { |k| "deduplication.options.#{k}" })
Sidekiq.logger.info payload
end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb
index 0ed4912c4cc..46ce0eb4a91 100644
--- a/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing.rb
@@ -17,7 +17,8 @@ module Gitlab
job['duplicate-of'] = duplicate_job.existing_jid
if duplicate_job.droppable?
- Gitlab::SidekiqLogging::DeduplicationLogger.instance.log(job, "dropped until executing")
+ Gitlab::SidekiqLogging::DeduplicationLogger.instance.log(
+ job, "dropped until executing", duplicate_job.options)
return false
end
end
diff --git a/lib/gitlab/static_site_editor/config.rb b/lib/gitlab/static_site_editor/config.rb
index 65c567ec2a6..08ed6599a6e 100644
--- a/lib/gitlab/static_site_editor/config.rb
+++ b/lib/gitlab/static_site_editor/config.rb
@@ -3,7 +3,7 @@
module Gitlab
module StaticSiteEditor
class Config
- SUPPORTED_EXTENSIONS = %w[.md].freeze
+ SUPPORTED_EXTENSIONS = %w[.md .md.erb].freeze
def initialize(repository, ref, file_path, return_url)
@repository = repository
@@ -20,7 +20,7 @@ module Gitlab
commit_id: commit_id,
project_id: project.id,
project: project.path,
- namespace: project.namespace.path,
+ namespace: project.namespace.full_path,
return_url: sanitize_url(return_url),
is_supported_content: supported_content?.to_s,
base_url: Gitlab::Routing.url_helpers.project_show_sse_path(project, full_path)
@@ -42,11 +42,11 @@ module Gitlab
end
def extension_supported?
- File.extname(file_path).in?(SUPPORTED_EXTENSIONS)
+ SUPPORTED_EXTENSIONS.any? { |ext| file_path.end_with?(ext) }
end
def file_exists?
- commit_id.present? && repository.blob_at(commit_id, file_path).present?
+ commit_id.present? && !repository.blob_at(commit_id, file_path).nil?
end
def full_path
diff --git a/lib/gitlab/suggestions/file_suggestion.rb b/lib/gitlab/suggestions/file_suggestion.rb
index 73b9800f0b8..7805b27902d 100644
--- a/lib/gitlab/suggestions/file_suggestion.rb
+++ b/lib/gitlab/suggestions/file_suggestion.rb
@@ -7,17 +7,14 @@ module Gitlab
SuggestionForDifferentFileError = Class.new(StandardError)
- def initialize
- @suggestions = []
- end
-
- def add_suggestion(new_suggestion)
- if for_different_file?(new_suggestion)
- raise SuggestionForDifferentFileError,
- 'Only add suggestions for the same file.'
- end
+ attr_reader :file_path
+ attr_reader :blob
+ attr_reader :suggestions
- suggestions << new_suggestion
+ def initialize(file_path, suggestions)
+ @file_path = file_path
+ @suggestions = suggestions.sort_by(&:from_line_index)
+ @blob = suggestions.first&.diff_file&.new_blob
end
def line_conflict?
@@ -30,18 +27,8 @@ module Gitlab
@new_content ||= _new_content
end
- def file_path
- @file_path ||= _file_path
- end
-
private
- attr_accessor :suggestions
-
- def blob
- first_suggestion&.diff_file&.new_blob
- end
-
def blob_data_lines
blob.load_all_data!
blob.data.lines
@@ -53,31 +40,19 @@ module Gitlab
def _new_content
current_content.tap do |content|
+ # NOTE: We need to cater for line number changes when the range is more than one line.
+ offset = 0
+
suggestions.each do |suggestion|
- range = line_range(suggestion)
+ range = line_range(suggestion, offset)
content[range] = suggestion.to_content
+ offset += range.count - 1
end
end.join
end
- def line_range(suggestion)
- suggestion.from_line_index..suggestion.to_line_index
- end
-
- def for_different_file?(suggestion)
- file_path && file_path != suggestion_file_path(suggestion)
- end
-
- def suggestion_file_path(suggestion)
- suggestion&.diff_file&.file_path
- end
-
- def first_suggestion
- suggestions.first
- end
-
- def _file_path
- suggestion_file_path(first_suggestion)
+ def line_range(suggestion, offset = 0)
+ (suggestion.from_line_index - offset)..(suggestion.to_line_index - offset)
end
def _line_conflict?
diff --git a/lib/gitlab/suggestions/suggestion_set.rb b/lib/gitlab/suggestions/suggestion_set.rb
index 22abef98bf0..abb05ba56a7 100644
--- a/lib/gitlab/suggestions/suggestion_set.rb
+++ b/lib/gitlab/suggestions/suggestion_set.rb
@@ -26,10 +26,10 @@ module Gitlab
end
def actions
- @actions ||= suggestions_per_file.map do |file_path, file_suggestion|
+ @actions ||= suggestions_per_file.map do |file_suggestion|
{
action: 'update',
- file_path: file_path,
+ file_path: file_suggestion.file_path,
content: file_suggestion.new_content
}
end
@@ -50,19 +50,9 @@ module Gitlab
end
def _suggestions_per_file
- suggestions.each_with_object({}) do |suggestion, result|
- file_path = suggestion.diff_file.file_path
- file_suggestion = result[file_path] ||= FileSuggestion.new
- file_suggestion.add_suggestion(suggestion)
- end
- end
-
- def file_suggestions
- suggestions_per_file.values
- end
-
- def first_file_suggestion
- file_suggestions.first
+ suggestions
+ .group_by { |suggestion| suggestion.diff_file.file_path }
+ .map { |file_path, group| FileSuggestion.new(file_path, group) }
end
def _error_message
@@ -72,7 +62,7 @@ module Gitlab
return message if message
end
- has_line_conflict = file_suggestions.any? do |file_suggestion|
+ has_line_conflict = suggestions_per_file.any? do |file_suggestion|
file_suggestion.line_conflict?
end
diff --git a/lib/gitlab/template/service_desk_template.rb b/lib/gitlab/template/service_desk_template.rb
new file mode 100644
index 00000000000..edc62a92004
--- /dev/null
+++ b/lib/gitlab/template/service_desk_template.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Template
+ class ServiceDeskTemplate < BaseTemplate
+ class << self
+ def extension
+ '.md'
+ end
+
+ def base_dir
+ '.gitlab/service_desk_templates/'
+ end
+
+ def finder(project)
+ Gitlab::Template::Finders::RepoTemplateFinder.new(project, self.base_dir, self.extension, self.categories)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/tracking/incident_management.rb b/lib/gitlab/tracking/incident_management.rb
index bd8d1669dd3..5fa819b3696 100644
--- a/lib/gitlab/tracking/incident_management.rb
+++ b/lib/gitlab/tracking/incident_management.rb
@@ -32,6 +32,9 @@ module Gitlab
},
send_email: {
name: 'sending_emails'
+ },
+ pagerduty_active: {
+ name: 'pagerduty_webhook'
}
}.with_indifferent_access.freeze
end
diff --git a/lib/gitlab/tree_summary.rb b/lib/gitlab/tree_summary.rb
index 4ec43e62c19..9b67599668a 100644
--- a/lib/gitlab/tree_summary.rb
+++ b/lib/gitlab/tree_summary.rb
@@ -97,7 +97,7 @@ module Gitlab
File.join(*[path, ""])
end
- commits_hsh = repository.list_last_commits_for_tree(commit.id, ensured_path, offset: offset, limit: limit)
+ commits_hsh = repository.list_last_commits_for_tree(commit.id, ensured_path, offset: offset, limit: limit, literal_pathspec: true)
prerender_commit_full_titles!(commits_hsh.values)
entries.each do |entry|
diff --git a/lib/gitlab/updated_notes_paginator.rb b/lib/gitlab/updated_notes_paginator.rb
new file mode 100644
index 00000000000..3d3d0e5bf9e
--- /dev/null
+++ b/lib/gitlab/updated_notes_paginator.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+module Gitlab
+ # UpdatedNotesPaginator implements a rudimentary form of keyset pagination on
+ # top of a notes relation that has been initialized with a `last_fetched_at`
+ # value. This class will attempt to limit the number of notes returned, and
+ # specify a new value for `last_fetched_at` that will pick up where the last
+ # page of notes left off.
+ class UpdatedNotesPaginator
+ LIMIT = 50
+ MICROSECOND = 1_000_000
+
+ attr_reader :next_fetched_at, :notes
+
+ def initialize(relation, last_fetched_at:)
+ @last_fetched_at = last_fetched_at
+ @now = Time.current
+
+ notes, more = fetch_page(relation)
+ if more
+ init_middle_page(notes)
+ else
+ init_final_page(notes)
+ end
+ end
+
+ def metadata
+ { last_fetched_at: next_fetched_at_microseconds, more: more }
+ end
+
+ private
+
+ attr_reader :last_fetched_at, :more, :now
+
+ def next_fetched_at_microseconds
+ (next_fetched_at.to_i * MICROSECOND) + next_fetched_at.usec
+ end
+
+ def fetch_page(relation)
+ relation = relation.by_updated_at
+ notes = relation.at_most(LIMIT + 1).to_a
+
+ return [notes, false] unless notes.size > LIMIT
+
+ marker = notes.pop # Remove the marker note
+
+ # Although very unlikely, it is possible that more notes with the same
+ # updated_at may exist, e.g., if created in bulk. Add them all to the page
+ # if this is detected, so pagination won't get stuck indefinitely
+ if notes.last.updated_at == marker.updated_at
+ notes += relation
+ .with_updated_at(marker.updated_at)
+ .id_not_in(notes.map(&:id))
+ .to_a
+ end
+
+ [notes, true]
+ end
+
+ def init_middle_page(notes)
+ @more = true
+
+ # The fetch overlap can be ignored if we're in an intermediate page.
+ @next_fetched_at = notes.last.updated_at + NotesFinder::FETCH_OVERLAP
+ @notes = notes
+ end
+
+ def init_final_page(notes)
+ @more = false
+ @next_fetched_at = now
+ @notes = notes
+ end
+ end
+end
diff --git a/lib/gitlab/url_builder.rb b/lib/gitlab/url_builder.rb
index cd15130cee6..1e522ae63b6 100644
--- a/lib/gitlab/url_builder.rb
+++ b/lib/gitlab/url_builder.rb
@@ -71,7 +71,11 @@ module Gitlab
end
def snippet_url(snippet, **options)
- if options.delete(:raw).present?
+ if options[:file].present?
+ file, ref = options.values_at(:file, :ref)
+
+ instance.gitlab_raw_snippet_blob_url(snippet, file, ref)
+ elsif options.delete(:raw).present?
instance.gitlab_raw_snippet_url(snippet, **options)
else
instance.gitlab_snippet_url(snippet, **options)
@@ -81,9 +85,11 @@ module Gitlab
def wiki_url(wiki, **options)
return wiki_page_url(wiki, Wiki::HOMEPAGE, **options) unless options[:action]
- options[:controller] = 'projects/wikis'
- options[:namespace_id] = wiki.container.namespace
- options[:project_id] = wiki.container
+ if wiki.container.is_a?(Project)
+ options[:controller] = 'projects/wikis'
+ options[:namespace_id] = wiki.container.namespace
+ options[:project_id] = wiki.container
+ end
instance.url_for(**options)
end
diff --git a/lib/gitlab/usage_data.rb b/lib/gitlab/usage_data.rb
index 7b6f5e69ee1..9d7e6536608 100644
--- a/lib/gitlab/usage_data.rb
+++ b/lib/gitlab/usage_data.rb
@@ -18,7 +18,6 @@ module Gitlab
class << self
include Gitlab::Utils::UsageData
include Gitlab::Utils::StrongMemoize
- include Gitlab::UsageDataConcerns::Topology
def data(force_refresh: false)
Rails.cache.fetch('usage_data', force: force_refresh, expires_in: 2.weeks) do
@@ -27,16 +26,21 @@ module Gitlab
end
def uncached_data
- clear_memoized_limits
-
- license_usage_data
- .merge(system_usage_data)
- .merge(features_usage_data)
- .merge(components_usage_data)
- .merge(cycle_analytics_usage_data)
- .merge(object_store_usage_data)
- .merge(topology_usage_data)
- .merge(recording_ce_finish_data)
+ clear_memoized
+
+ with_finished_at(:recording_ce_finished_at) do
+ license_usage_data
+ .merge(system_usage_data)
+ .merge(system_usage_data_monthly)
+ .merge(features_usage_data)
+ .merge(components_usage_data)
+ .merge(cycle_analytics_usage_data)
+ .merge(object_store_usage_data)
+ .merge(topology_usage_data)
+ .merge(usage_activity_by_stage)
+ .merge(usage_activity_by_stage(:usage_activity_by_stage_monthly, last_28_days_time_period))
+ .merge(analytics_unique_visits_data)
+ end
end
def to_json(force_refresh: false)
@@ -59,17 +63,11 @@ module Gitlab
Time.now
end
- def recording_ce_finish_data
- {
- recording_ce_finished_at: Time.now
- }
- end
-
# rubocop: disable Metrics/AbcSize
# rubocop: disable CodeReuse/ActiveRecord
def system_usage_data
- alert_bot_incident_count = count(::Issue.authored(::User.alert_bot))
- issues_created_manually_from_alerts = count(Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot))
+ alert_bot_incident_count = count(::Issue.authored(::User.alert_bot), start: issue_minimum_id, finish: issue_maximum_id)
+ issues_created_manually_from_alerts = count(Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot), start: issue_minimum_id, finish: issue_maximum_id)
{
counts: {
@@ -86,9 +84,9 @@ module Gitlab
auto_devops_enabled: count(::ProjectAutoDevops.enabled),
auto_devops_disabled: count(::ProjectAutoDevops.disabled),
deploy_keys: count(DeployKey),
- deployments: count(Deployment),
- successful_deployments: count(Deployment.success),
- failed_deployments: count(Deployment.failed),
+ deployments: deployment_count(Deployment),
+ successful_deployments: deployment_count(Deployment.success),
+ failed_deployments: deployment_count(Deployment.failed),
environments: count(::Environment),
clusters: count(::Clusters::Cluster),
clusters_enabled: count(::Clusters::Cluster.enabled),
@@ -111,11 +109,12 @@ module Gitlab
clusters_applications_knative: count(::Clusters::Applications::Knative.available),
clusters_applications_elastic_stack: count(::Clusters::Applications::ElasticStack.available),
clusters_applications_jupyter: count(::Clusters::Applications::Jupyter.available),
+ clusters_applications_cilium: count(::Clusters::Applications::Cilium.available),
clusters_management_project: count(::Clusters::Cluster.with_management_project),
in_review_folder: count(::Environment.in_review_folder),
grafana_integrated_projects: count(GrafanaIntegration.enabled),
groups: count(Group),
- issues: count(Issue),
+ issues: count(Issue, start: issue_minimum_id, finish: issue_maximum_id),
issues_created_from_gitlab_error_tracking_ui: count(SentryIssue),
issues_with_associated_zoom_link: count(ZoomMeeting.added_to_issue),
issues_using_zoom_quick_actions: distinct_count(ZoomMeeting, :issue_id),
@@ -125,7 +124,7 @@ module Gitlab
issues_created_manually_from_alerts: issues_created_manually_from_alerts,
incident_issues: alert_bot_incident_count,
alert_bot_incident_issues: alert_bot_incident_count,
- incident_labeled_issues: count(::Issue.with_label_attributes(IncidentManagement::CreateIssueService::INCIDENT_LABEL)),
+ incident_labeled_issues: count(::Issue.with_label_attributes(::IncidentManagement::CreateIncidentLabelService::LABEL_PROPERTIES), start: issue_minimum_id, finish: issue_maximum_id),
keys: count(Key),
label_lists: count(List.label),
lfs_objects: count(LfsObject),
@@ -144,7 +143,6 @@ module Gitlab
protected_branches: count(ProtectedBranch),
releases: count(Release),
remote_mirrors: count(RemoteMirror),
- snippets: count(Snippet),
personal_snippets: count(PersonalSnippet),
project_snippets: count(ProjectSnippet),
suggestions: count(Suggestion),
@@ -161,14 +159,29 @@ module Gitlab
usage_counters,
user_preferences_usage,
ingress_modsecurity_usage,
- container_expiration_policies_usage,
- merge_requests_usage(default_time_period)
- )
+ container_expiration_policies_usage
+ ).tap do |data|
+ data[:snippets] = data[:personal_snippets] + data[:project_snippets]
+ end
}
end
- # rubocop: enable CodeReuse/ActiveRecord
# rubocop: enable Metrics/AbcSize
+ def system_usage_data_monthly
+ {
+ counts_monthly: {
+ deployments: deployment_count(Deployment.where(last_28_days_time_period)),
+ successful_deployments: deployment_count(Deployment.success.where(last_28_days_time_period)),
+ failed_deployments: deployment_count(Deployment.failed.where(last_28_days_time_period)),
+ personal_snippets: count(PersonalSnippet.where(last_28_days_time_period)),
+ project_snippets: count(ProjectSnippet.where(last_28_days_time_period))
+ }.tap do |data|
+ data[:snippets] = data[:personal_snippets] + data[:project_snippets]
+ end
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
def cycle_analytics_usage_data
Gitlab::CycleAnalytics::UsageData.new.to_json
rescue ActiveRecord::StatementInvalid
@@ -197,6 +210,7 @@ module Gitlab
ldap_enabled: alt_usage_data(fallback: nil) { Gitlab.config.ldap.enabled },
mattermost_enabled: alt_usage_data(fallback: nil) { Gitlab.config.mattermost.enabled },
omniauth_enabled: alt_usage_data(fallback: nil) { Gitlab::Auth.omniauth_enabled? },
+ prometheus_enabled: alt_usage_data(fallback: nil) { Gitlab::Prometheus::Internal.prometheus_enabled? },
prometheus_metrics_enabled: alt_usage_data(fallback: nil) { Gitlab::Metrics.prometheus_metrics_enabled? },
reply_by_email_enabled: alt_usage_data(fallback: nil) { Gitlab::IncomingEmail.enabled? },
signup_enabled: alt_usage_data(fallback: nil) { Gitlab::CurrentSettings.allow_signup? },
@@ -290,6 +304,10 @@ module Gitlab
}
end
+ def topology_usage_data
+ Gitlab::UsageData::Topology.new.topology_usage_data
+ end
+
def ingress_modsecurity_usage
##
# This method measures usage of the Modsecurity Web Application Firewall across the entire
@@ -336,15 +354,9 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord
def services_usage
- results = Service.available_services_names.without('jira').each_with_object({}) do |service_name, response|
+ Service.available_services_names.without('jira').each_with_object({}) do |service_name, response|
response["projects_#{service_name}_active".to_sym] = count(Service.active.where(template: false, type: "#{service_name}_service".camelize))
- end
-
- # Keep old Slack keys for backward compatibility, https://gitlab.com/gitlab-data/analytics/issues/3241
- results[:projects_slack_notifications_active] = results[:projects_slack_active]
- results[:projects_slack_slash_active] = results[:projects_slack_slash_commands_active]
-
- results.merge(jira_usage).merge(jira_import_usage)
+ end.merge(jira_usage).merge(jira_import_usage)
end
def jira_usage
@@ -357,18 +369,15 @@ module Gitlab
projects_jira_active: 0
}
- Service.active
- .by_type(:JiraService)
- .includes(:jira_tracker_data)
- .find_in_batches(batch_size: BATCH_SIZE) do |services|
+ JiraService.active.includes(:jira_tracker_data).find_in_batches(batch_size: BATCH_SIZE) do |services|
counts = services.group_by do |service|
# TODO: Simplify as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
service_url = service.data_fields&.url || (service.properties && service.properties['url'])
service_url&.include?('.atlassian.net') ? :cloud : :server
end
- results[:projects_jira_server_active] += counts[:server].count if counts[:server]
- results[:projects_jira_cloud_active] += counts[:cloud].count if counts[:cloud]
+ results[:projects_jira_server_active] += counts[:server].size if counts[:server]
+ results[:projects_jira_cloud_active] += counts[:cloud].size if counts[:cloud]
results[:projects_jira_active] += services.size
end
@@ -400,23 +409,18 @@ module Gitlab
end
# rubocop: disable CodeReuse/ActiveRecord
- def merge_requests_usage(time_period)
+ def merge_requests_users(time_period)
query =
Event
.where(target_type: Event::TARGET_TYPES[:merge_request].to_s)
.where(time_period)
- merge_request_users = distinct_count(
+ distinct_count(
query,
:author_id,
- batch_size: 5_000, # Based on query performance, this is the optimal batch size.
- start: User.minimum(:id),
- finish: User.maximum(:id)
+ start: user_minimum_id,
+ finish: user_maximum_id
)
-
- {
- merge_requests_users: merge_request_users
- }
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -428,19 +432,207 @@ module Gitlab
end
end
- def default_time_period
+ def last_28_days_time_period
{ created_at: 28.days.ago..Time.current }
end
+ # Source: https://gitlab.com/gitlab-data/analytics/blob/master/transform/snowflake-dbt/data/ping_metrics_to_stage_mapping_data.csv
+ def usage_activity_by_stage(key = :usage_activity_by_stage, time_period = {})
+ {
+ key => {
+ configure: usage_activity_by_stage_configure(time_period),
+ create: usage_activity_by_stage_create(time_period),
+ manage: usage_activity_by_stage_manage(time_period),
+ monitor: usage_activity_by_stage_monitor(time_period),
+ package: usage_activity_by_stage_package(time_period),
+ plan: usage_activity_by_stage_plan(time_period),
+ release: usage_activity_by_stage_release(time_period),
+ secure: usage_activity_by_stage_secure(time_period),
+ verify: usage_activity_by_stage_verify(time_period)
+ }
+ }
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_configure(time_period)
+ {
+ clusters_applications_cert_managers: cluster_applications_user_distinct_count(::Clusters::Applications::CertManager, time_period),
+ clusters_applications_helm: cluster_applications_user_distinct_count(::Clusters::Applications::Helm, time_period),
+ clusters_applications_ingress: cluster_applications_user_distinct_count(::Clusters::Applications::Ingress, time_period),
+ clusters_applications_knative: cluster_applications_user_distinct_count(::Clusters::Applications::Knative, time_period),
+ clusters_management_project: clusters_user_distinct_count(::Clusters::Cluster.with_management_project, time_period),
+ clusters_disabled: clusters_user_distinct_count(::Clusters::Cluster.disabled, time_period),
+ clusters_enabled: clusters_user_distinct_count(::Clusters::Cluster.enabled, time_period),
+ clusters_platforms_gke: clusters_user_distinct_count(::Clusters::Cluster.gcp_installed.enabled, time_period),
+ clusters_platforms_eks: clusters_user_distinct_count(::Clusters::Cluster.aws_installed.enabled, time_period),
+ clusters_platforms_user: clusters_user_distinct_count(::Clusters::Cluster.user_provided.enabled, time_period),
+ instance_clusters_disabled: clusters_user_distinct_count(::Clusters::Cluster.disabled.instance_type, time_period),
+ instance_clusters_enabled: clusters_user_distinct_count(::Clusters::Cluster.enabled.instance_type, time_period),
+ group_clusters_disabled: clusters_user_distinct_count(::Clusters::Cluster.disabled.group_type, time_period),
+ group_clusters_enabled: clusters_user_distinct_count(::Clusters::Cluster.enabled.group_type, time_period),
+ project_clusters_disabled: clusters_user_distinct_count(::Clusters::Cluster.disabled.project_type, time_period),
+ project_clusters_enabled: clusters_user_distinct_count(::Clusters::Cluster.enabled.project_type, time_period)
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_create(time_period)
+ {
+ deploy_keys: distinct_count(::DeployKey.where(time_period), :user_id),
+ keys: distinct_count(::Key.regular_keys.where(time_period), :user_id),
+ merge_requests: distinct_count(::MergeRequest.where(time_period), :author_id),
+ projects_with_disable_overriding_approvers_per_merge_request: count(::Project.where(time_period.merge(disable_overriding_approvers_per_merge_request: true))),
+ projects_without_disable_overriding_approvers_per_merge_request: count(::Project.where(time_period.merge(disable_overriding_approvers_per_merge_request: [false, nil]))),
+ remote_mirrors: distinct_count(::Project.with_remote_mirrors.where(time_period), :creator_id),
+ snippets: distinct_count(::Snippet.where(time_period), :author_id)
+ }.tap do |h|
+ if time_period.present?
+ h[:merge_requests_users] = merge_requests_users(time_period)
+ h.merge!(action_monthly_active_users(time_period))
+ end
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Omitted because no user, creator or author associated: `campaigns_imported_from_github`, `ldap_group_links`
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_manage(time_period)
+ {
+ events: distinct_count(::Event.where(time_period), :author_id),
+ groups: distinct_count(::GroupMember.where(time_period), :user_id),
+ users_created: count(::User.where(time_period), start: user_minimum_id, finish: user_maximum_id),
+ omniauth_providers: filtered_omniauth_provider_names.reject { |name| name == 'group_saml' }
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_monitor(time_period)
+ {
+ clusters: distinct_count(::Clusters::Cluster.where(time_period), :user_id),
+ clusters_applications_prometheus: cluster_applications_user_distinct_count(::Clusters::Applications::Prometheus, time_period),
+ operations_dashboard_default_dashboard: count(::User.active.with_dashboard('operations').where(time_period),
+ start: user_minimum_id,
+ finish: user_maximum_id)
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def usage_activity_by_stage_package(time_period)
+ {}
+ end
+
+ # Omitted because no user, creator or author associated: `boards`, `labels`, `milestones`, `uploads`
+ # Omitted because too expensive: `epics_deepest_relationship_level`
+ # Omitted because of encrypted properties: `projects_jira_cloud_active`, `projects_jira_server_active`
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_plan(time_period)
+ {
+ issues: distinct_count(::Issue.where(time_period), :author_id),
+ notes: distinct_count(::Note.where(time_period), :author_id),
+ projects: distinct_count(::Project.where(time_period), :creator_id),
+ todos: distinct_count(::Todo.where(time_period), :author_id)
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Omitted because no user, creator or author associated: `environments`, `feature_flags`, `in_review_folder`, `pages_domains`
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_release(time_period)
+ {
+ deployments: distinct_count(::Deployment.where(time_period), :user_id),
+ failed_deployments: distinct_count(::Deployment.failed.where(time_period), :user_id),
+ releases: distinct_count(::Release.where(time_period), :author_id),
+ successful_deployments: distinct_count(::Deployment.success.where(time_period), :user_id)
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Omitted because no user, creator or author associated: `ci_runners`
+ # rubocop: disable CodeReuse/ActiveRecord
+ def usage_activity_by_stage_verify(time_period)
+ {
+ ci_builds: distinct_count(::Ci::Build.where(time_period), :user_id),
+ ci_external_pipelines: distinct_count(::Ci::Pipeline.external.where(time_period), :user_id, start: user_minimum_id, finish: user_maximum_id),
+ ci_internal_pipelines: distinct_count(::Ci::Pipeline.internal.where(time_period), :user_id, start: user_minimum_id, finish: user_maximum_id),
+ ci_pipeline_config_auto_devops: distinct_count(::Ci::Pipeline.auto_devops_source.where(time_period), :user_id, start: user_minimum_id, finish: user_maximum_id),
+ ci_pipeline_config_repository: distinct_count(::Ci::Pipeline.repository_source.where(time_period), :user_id, start: user_minimum_id, finish: user_maximum_id),
+ ci_pipeline_schedules: distinct_count(::Ci::PipelineSchedule.where(time_period), :owner_id),
+ ci_pipelines: distinct_count(::Ci::Pipeline.where(time_period), :user_id, start: user_minimum_id, finish: user_maximum_id),
+ ci_triggers: distinct_count(::Ci::Trigger.where(time_period), :owner_id),
+ clusters_applications_runner: cluster_applications_user_distinct_count(::Clusters::Applications::Runner, time_period)
+ }
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # Currently too complicated and to get reliable counts for these stats:
+ # container_scanning_jobs, dast_jobs, dependency_scanning_jobs, license_management_jobs, sast_jobs, secret_detection_jobs
+ # Once https://gitlab.com/gitlab-org/gitlab/merge_requests/17568 is merged, this might be doable
+ def usage_activity_by_stage_secure(time_period)
+ {}
+ end
+
+ def analytics_unique_visits_data
+ results = ::Gitlab::Analytics::UniqueVisits::TARGET_IDS.each_with_object({}) do |target_id, hash|
+ hash[target_id] = redis_usage_data { unique_visit_service.weekly_unique_visits_for_target(target_id) }
+ end
+ results['analytics_unique_visits_for_any_target'] = redis_usage_data { unique_visit_service.weekly_unique_visits_for_any_target }
+
+ { analytics_unique_visits: results }
+ end
+
+ def action_monthly_active_users(time_period)
+ return {} unless Feature.enabled?(Gitlab::UsageDataCounters::TrackUniqueActions::FEATURE_FLAG)
+
+ counter = Gitlab::UsageDataCounters::TrackUniqueActions
+
+ project_count = redis_usage_data do
+ counter.count_unique_events(
+ event_action: Gitlab::UsageDataCounters::TrackUniqueActions::PUSH_ACTION,
+ date_from: time_period[:created_at].first,
+ date_to: time_period[:created_at].last
+ )
+ end
+
+ design_count = redis_usage_data do
+ counter.count_unique_events(
+ event_action: Gitlab::UsageDataCounters::TrackUniqueActions::DESIGN_ACTION,
+ date_from: time_period[:created_at].first,
+ date_to: time_period[:created_at].last
+ )
+ end
+
+ wiki_count = redis_usage_data do
+ counter.count_unique_events(
+ event_action: Gitlab::UsageDataCounters::TrackUniqueActions::WIKI_ACTION,
+ date_from: time_period[:created_at].first,
+ date_to: time_period[:created_at].last
+ )
+ end
+
+ {
+ action_monthly_active_users_project_repo: project_count,
+ action_monthly_active_users_design_management: design_count,
+ action_monthly_active_users_wiki_repo: wiki_count
+ }
+ end
+
private
+ def unique_visit_service
+ strong_memoize(:unique_visit_service) do
+ ::Gitlab::Analytics::UniqueVisits.new
+ end
+ end
+
def total_alert_issues
# Remove prometheus table queries once they are deprecated
# To be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/217407.
[
- count(Issue.with_alert_management_alerts),
- count(::Issue.with_self_managed_prometheus_alert_events),
- count(::Issue.with_prometheus_alert_events)
+ count(Issue.with_alert_management_alerts, start: issue_minimum_id, finish: issue_maximum_id),
+ count(::Issue.with_self_managed_prometheus_alert_events, start: issue_minimum_id, finish: issue_maximum_id),
+ count(::Issue.with_prometheus_alert_events, start: issue_minimum_id, finish: issue_maximum_id)
].reduce(:+)
end
@@ -456,9 +648,66 @@ module Gitlab
end
end
- def clear_memoized_limits
+ def issue_minimum_id
+ strong_memoize(:issue_minimum_id) do
+ ::Issue.minimum(:id)
+ end
+ end
+
+ def issue_maximum_id
+ strong_memoize(:issue_maximum_id) do
+ ::Issue.maximum(:id)
+ end
+ end
+
+ def deployment_minimum_id
+ strong_memoize(:deployment_minimum_id) do
+ ::Deployment.minimum(:id)
+ end
+ end
+
+ def deployment_maximum_id
+ strong_memoize(:deployment_maximum_id) do
+ ::Deployment.maximum(:id)
+ end
+ end
+
+ def clear_memoized
+ clear_memoization(:issue_minimum_id)
+ clear_memoization(:issue_maximum_id)
clear_memoization(:user_minimum_id)
clear_memoization(:user_maximum_id)
+ clear_memoization(:unique_visit_service)
+ clear_memoization(:deployment_minimum_id)
+ clear_memoization(:deployment_maximum_id)
+ clear_memoization(:approval_merge_request_rule_minimum_id)
+ clear_memoization(:approval_merge_request_rule_maximum_id)
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def cluster_applications_user_distinct_count(applications, time_period)
+ distinct_count(applications.where(time_period).available.joins(:cluster), 'clusters.user_id')
+ end
+
+ def clusters_user_distinct_count(clusters, time_period)
+ distinct_count(clusters.where(time_period), :user_id)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def omniauth_provider_names
+ ::Gitlab.config.omniauth.providers.map(&:name)
+ end
+
+ # LDAP provider names are set by customers and could include
+ # sensitive info (server names, etc). LDAP providers normally
+ # don't appear in omniauth providers but filter to ensure
+ # no internal details leak via usage ping.
+ def filtered_omniauth_provider_names
+ omniauth_provider_names.reject { |name| name.starts_with?('ldap') }
+ end
+
+ def deployment_count(relation)
+ count relation, start: deployment_minimum_id, finish: deployment_maximum_id
end
end
end
diff --git a/lib/gitlab/usage_data/topology.rb b/lib/gitlab/usage_data/topology.rb
new file mode 100644
index 00000000000..4bca2cb07e4
--- /dev/null
+++ b/lib/gitlab/usage_data/topology.rb
@@ -0,0 +1,258 @@
+# frozen_string_literal: true
+
+module Gitlab
+ class UsageData
+ class Topology
+ include Gitlab::Utils::UsageData
+
+ JOB_TO_SERVICE_NAME = {
+ 'gitlab-rails' => 'web',
+ 'gitlab-sidekiq' => 'sidekiq',
+ 'gitlab-workhorse' => 'workhorse',
+ 'redis' => 'redis',
+ 'postgres' => 'postgres',
+ 'gitaly' => 'gitaly',
+ 'prometheus' => 'prometheus',
+ 'node' => 'node-exporter',
+ 'registry' => 'registry'
+ }.freeze
+
+ CollectionFailure = Struct.new(:query, :error) do
+ def to_h
+ { query => error }
+ end
+ end
+
+ def topology_usage_data
+ @failures = []
+ @instances = Set[]
+ topology_data, duration = measure_duration { topology_fetch_all_data }
+ {
+ topology: topology_data
+ .merge(duration_s: duration)
+ .merge(failures: @failures.map(&:to_h))
+ }
+ end
+
+ private
+
+ def topology_fetch_all_data
+ with_prometheus_client(fallback: {}) do |client|
+ {
+ application_requests_per_hour: topology_app_requests_per_hour(client),
+ nodes: topology_node_data(client)
+ }.compact
+ end
+ rescue => e
+ @failures << CollectionFailure.new('other', e.class.to_s)
+
+ {}
+ end
+
+ def topology_app_requests_per_hour(client)
+ result = query_safely('gitlab_usage_ping:ops:rate5m', 'app_requests', fallback: nil) do |query|
+ client.query(one_week_average(query)).first
+ end
+
+ return unless result
+
+ # the metric is recorded as a per-second rate
+ (result['value'].last.to_f * 1.hour).to_i
+ end
+
+ def topology_node_data(client)
+ # node-level data
+ by_instance_mem = topology_node_memory(client)
+ by_instance_cpus = topology_node_cpus(client)
+ by_instance_uname_info = topology_node_uname_info(client)
+ # service-level data
+ by_instance_by_job_by_type_memory = topology_all_service_memory(client)
+ by_instance_by_job_process_count = topology_all_service_process_count(client)
+ by_instance_by_job_server_types = topology_all_service_server_types(client)
+
+ @instances.map do |instance|
+ {
+ node_memory_total_bytes: by_instance_mem[instance],
+ node_cpus: by_instance_cpus[instance],
+ node_uname_info: by_instance_uname_info[instance],
+ node_services:
+ topology_node_services(
+ instance, by_instance_by_job_process_count, by_instance_by_job_by_type_memory, by_instance_by_job_server_types
+ )
+ }.compact
+ end
+ end
+
+ def topology_node_memory(client)
+ query_safely('gitlab_usage_ping:node_memory_total_bytes:avg', 'node_memory', fallback: {}) do |query|
+ aggregate_by_instance(client, one_week_average(query))
+ end
+ end
+
+ def topology_node_cpus(client)
+ query_safely('gitlab_usage_ping:node_cpus:count', 'node_cpus', fallback: {}) do |query|
+ aggregate_by_instance(client, one_week_average(query))
+ end
+ end
+
+ def topology_node_uname_info(client)
+ node_uname_info = query_safely('node_uname_info', 'node_uname_info', fallback: []) do |query|
+ client.query(query)
+ end
+
+ map_instance_labels(node_uname_info, %w(machine sysname release))
+ end
+
+ def topology_all_service_memory(client)
+ {
+ rss: topology_service_memory_rss(client),
+ uss: topology_service_memory_uss(client),
+ pss: topology_service_memory_pss(client)
+ }
+ end
+
+ def topology_service_memory_rss(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_process_resident_memory_bytes:avg', 'service_rss', fallback: {}
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
+ end
+
+ def topology_service_memory_uss(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_process_unique_memory_bytes:avg', 'service_uss', fallback: {}
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
+ end
+
+ def topology_service_memory_pss(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_process_proportional_memory_bytes:avg', 'service_pss', fallback: {}
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
+ end
+
+ def topology_all_service_process_count(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_process:count', 'service_process_count', fallback: {}
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
+ end
+
+ def topology_all_service_server_types(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_app_server_workers:sum', 'service_workers', fallback: {}
+ ) { |query| aggregate_by_labels(client, query) }
+ end
+
+ def query_safely(query, query_name, fallback:)
+ result = yield query
+
+ return result if result.present?
+
+ @failures << CollectionFailure.new(query_name, 'empty_result')
+ fallback
+ rescue => e
+ @failures << CollectionFailure.new(query_name, e.class.to_s)
+ fallback
+ end
+
+ def topology_node_services(instance, all_process_counts, all_process_memory, all_server_types)
+ # returns all node service data grouped by service name as the key
+ instance_service_data =
+ topology_instance_service_process_count(instance, all_process_counts)
+ .deep_merge(topology_instance_service_memory(instance, all_process_memory))
+ .deep_merge(topology_instance_service_server_types(instance, all_server_types))
+
+ # map to list of hashes where service names become values instead, and remove
+ # unknown services, since they might not be ours
+ instance_service_data.each_with_object([]) do |entry, list|
+ service, service_metrics = entry
+ gitlab_service = JOB_TO_SERVICE_NAME[service.to_s]
+ next unless gitlab_service
+
+ list << { name: gitlab_service }.merge(service_metrics)
+ end
+ end
+
+ def topology_instance_service_process_count(instance, all_instance_data)
+ topology_data_for_instance(instance, all_instance_data).to_h do |metric, count|
+ [metric['job'], { process_count: count }]
+ end
+ end
+
+ # Given a hash mapping memory set types to Prometheus response data, returns a hash
+ # mapping instance/node names to services and their respective memory use in bytes
+ def topology_instance_service_memory(instance, instance_data_by_type)
+ result = {}
+ instance_data_by_type.each do |memory_type, instance_data|
+ topology_data_for_instance(instance, instance_data).each do |metric, memory_bytes|
+ job = metric['job']
+ key = "process_memory_#{memory_type}".to_sym
+
+ result[job] ||= {}
+ result[job][key] ||= memory_bytes
+ end
+ end
+
+ result
+ end
+
+ def topology_instance_service_server_types(instance, all_instance_data)
+ topology_data_for_instance(instance, all_instance_data).to_h do |metric, _value|
+ [metric['job'], { server: metric['server'] }]
+ end
+ end
+
+ def topology_data_for_instance(instance, all_instance_data)
+ all_instance_data.filter { |metric, _value| metric['instance'] == instance }
+ end
+
+ def normalize_instance_label(instance)
+ normalize_localhost_address(drop_port_number(instance))
+ end
+
+ def normalize_localhost_address(instance)
+ ip_addr = IPAddr.new(instance)
+ is_local_ip = ip_addr.loopback? || ip_addr.to_i.zero?
+
+ is_local_ip ? 'localhost' : instance
+ rescue IPAddr::InvalidAddressError
+ # This most likely means it was a host name, not an IP address
+ instance
+ end
+
+ def drop_port_number(instance)
+ instance.gsub(/:\d+$/, '')
+ end
+
+ def normalize_and_track_instance(instance)
+ normalize_instance_label(instance).tap do |normalized_instance|
+ @instances << normalized_instance
+ end
+ end
+
+ def one_week_average(query)
+ "avg_over_time (#{query}[1w])"
+ end
+
+ def aggregate_by_instance(client, query)
+ client.aggregate(query) { |metric| normalize_and_track_instance(metric['instance']) }
+ end
+
+ # Will retain a composite key that values are mapped to
+ def aggregate_by_labels(client, query)
+ client.aggregate(query) do |metric|
+ metric['instance'] = normalize_and_track_instance(metric['instance'])
+ metric
+ end
+ end
+
+ # Given query result vector, map instance to a hash of target labels key/value.
+ # @return [Hash] mapping instance to a hash of target labels key/value, or the empty hash if input empty vector
+ def map_instance_labels(query_result_vector, target_labels)
+ query_result_vector.to_h do |result|
+ key = normalize_and_track_instance(result['metric']['instance'])
+ value = result['metric'].slice(*target_labels).symbolize_keys
+ [key, value]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage_data_concerns/topology.rb b/lib/gitlab/usage_data_concerns/topology.rb
deleted file mode 100644
index 6e1d29f2a17..00000000000
--- a/lib/gitlab/usage_data_concerns/topology.rb
+++ /dev/null
@@ -1,137 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module UsageDataConcerns
- module Topology
- include Gitlab::Utils::UsageData
-
- JOB_TO_SERVICE_NAME = {
- 'gitlab-rails' => 'web',
- 'gitlab-sidekiq' => 'sidekiq',
- 'gitlab-workhorse' => 'workhorse',
- 'redis' => 'redis',
- 'postgres' => 'postgres',
- 'gitaly' => 'gitaly',
- 'prometheus' => 'prometheus',
- 'node' => 'node-exporter'
- }.freeze
-
- def topology_usage_data
- topology_data, duration = measure_duration do
- alt_usage_data(fallback: {}) do
- {
- nodes: topology_node_data
- }.compact
- end
- end
- { topology: topology_data.merge(duration_s: duration) }
- end
-
- private
-
- def topology_node_data
- with_prometheus_client do |client|
- # node-level data
- by_instance_mem = topology_node_memory(client)
- by_instance_cpus = topology_node_cpus(client)
- # service-level data
- by_instance_by_job_by_metric_memory = topology_all_service_memory(client)
- by_instance_by_job_process_count = topology_all_service_process_count(client)
-
- instances = Set.new(by_instance_mem.keys + by_instance_cpus.keys)
- instances.map do |instance|
- {
- node_memory_total_bytes: by_instance_mem[instance],
- node_cpus: by_instance_cpus[instance],
- node_services:
- topology_node_services(instance, by_instance_by_job_process_count, by_instance_by_job_by_metric_memory)
- }.compact
- end
- end
- end
-
- def topology_node_memory(client)
- aggregate_single(client, 'avg (node_memory_MemTotal_bytes) by (instance)')
- end
-
- def topology_node_cpus(client)
- aggregate_single(client, 'count (node_cpu_seconds_total{mode="idle"}) by (instance)')
- end
-
- def topology_all_service_memory(client)
- aggregate_many(
- client,
- 'avg ({__name__ =~ "(ruby_){0,1}process_(resident|unique|proportional)_memory_bytes", job != "gitlab_exporter_process"}) by (instance, job, __name__)'
- )
- end
-
- def topology_all_service_process_count(client)
- aggregate_many(client, 'count ({__name__ =~ "(ruby_){0,1}process_start_time_seconds", job != "gitlab_exporter_process"}) by (instance, job)')
- end
-
- def topology_node_services(instance, all_process_counts, all_process_memory)
- # returns all node service data grouped by service name as the key
- instance_service_data =
- topology_instance_service_process_count(instance, all_process_counts)
- .deep_merge(topology_instance_service_memory(instance, all_process_memory))
-
- # map to list of hashes where service names become values instead, and remove
- # unknown services, since they might not be ours
- instance_service_data.each_with_object([]) do |entry, list|
- service, service_metrics = entry
- gitlab_service = JOB_TO_SERVICE_NAME[service.to_s]
- next unless gitlab_service
-
- list << { name: gitlab_service }.merge(service_metrics)
- end
- end
-
- def topology_instance_service_process_count(instance, all_instance_data)
- topology_data_for_instance(instance, all_instance_data).to_h do |metric, count|
- [metric['job'], { process_count: count }]
- end
- end
-
- def topology_instance_service_memory(instance, all_instance_data)
- topology_data_for_instance(instance, all_instance_data).each_with_object({}) do |entry, hash|
- metric, memory = entry
- job = metric['job']
- key =
- case metric['__name__']
- when match_process_memory_metric_for_type('resident') then :process_memory_rss
- when match_process_memory_metric_for_type('unique') then :process_memory_uss
- when match_process_memory_metric_for_type('proportional') then :process_memory_pss
- end
-
- hash[job] ||= {}
- hash[job][key] ||= memory
- end
- end
-
- def match_process_memory_metric_for_type(type)
- /(ruby_){0,1}process_#{type}_memory_bytes/
- end
-
- def topology_data_for_instance(instance, all_instance_data)
- all_instance_data.filter { |metric, _value| metric['instance'] == instance }
- end
-
- def drop_port(instance)
- instance.gsub(/:.+$/, '')
- end
-
- # Will retain a single `instance` key that values are mapped to
- def aggregate_single(client, query)
- client.aggregate(query) { |metric| drop_port(metric['instance']) }
- end
-
- # Will retain a composite key that values are mapped to
- def aggregate_many(client, query)
- client.aggregate(query) do |metric|
- metric['instance'] = drop_port(metric['instance'])
- metric
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/usage_data_counters/track_unique_actions.rb b/lib/gitlab/usage_data_counters/track_unique_actions.rb
new file mode 100644
index 00000000000..9fb5a29748e
--- /dev/null
+++ b/lib/gitlab/usage_data_counters/track_unique_actions.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module UsageDataCounters
+ module TrackUniqueActions
+ KEY_EXPIRY_LENGTH = 29.days
+ FEATURE_FLAG = :track_unique_actions
+
+ WIKI_ACTION = :wiki_action
+ DESIGN_ACTION = :design_action
+ PUSH_ACTION = :project_action
+
+ ACTION_TRANSFORMATIONS = HashWithIndifferentAccess.new({
+ wiki: {
+ created: WIKI_ACTION,
+ updated: WIKI_ACTION,
+ destroyed: WIKI_ACTION
+ },
+ design: {
+ created: DESIGN_ACTION,
+ updated: DESIGN_ACTION,
+ destroyed: DESIGN_ACTION
+ },
+ project: {
+ pushed: PUSH_ACTION
+ }
+ }).freeze
+
+ class << self
+ def track_action(event_action:, event_target:, author_id:, time: Time.zone.now)
+ return unless Gitlab::CurrentSettings.usage_ping_enabled
+ return unless Feature.enabled?(FEATURE_FLAG)
+ return unless valid_target?(event_target)
+ return unless valid_action?(event_action)
+
+ transformed_target = transform_target(event_target)
+ transformed_action = transform_action(event_action, transformed_target)
+
+ add_event(transformed_action, author_id, time)
+ end
+
+ def count_unique_events(event_action:, date_from:, date_to:)
+ keys = (date_from.to_date..date_to.to_date).map { |date| key(event_action, date) }
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.pfcount(*keys)
+ end
+ end
+
+ private
+
+ def transform_action(event_action, event_target)
+ ACTION_TRANSFORMATIONS.dig(event_target, event_action) || event_action
+ end
+
+ def transform_target(event_target)
+ Event::TARGET_TYPES.key(event_target)
+ end
+
+ def valid_target?(target)
+ Event::TARGET_TYPES.value?(target)
+ end
+
+ def valid_action?(action)
+ Event.actions.key?(action)
+ end
+
+ def key(event_action, date)
+ year_day = date.strftime('%G-%j')
+ "#{year_day}-{#{event_action}}"
+ end
+
+ def add_event(event_action, author_id, date)
+ target_key = key(event_action, date)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.multi do |multi|
+ multi.pfadd(target_key, author_id)
+ multi.expire(target_key, KEY_EXPIRY_LENGTH)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/user_access.rb b/lib/gitlab/user_access.rb
index 5e0a4faeba8..1551548d9b4 100644
--- a/lib/gitlab/user_access.rb
+++ b/lib/gitlab/user_access.rb
@@ -92,12 +92,6 @@ module Gitlab
end
end
- def can_read_project?
- return false unless can_access_git?
-
- user.can?(:read_project, project)
- end
-
private
def permission_cache
diff --git a/lib/gitlab/utils.rb b/lib/gitlab/utils.rb
index e80cc51dc3b..8f5c1eda456 100644
--- a/lib/gitlab/utils.rb
+++ b/lib/gitlab/utils.rb
@@ -56,7 +56,7 @@ module Gitlab
# * Maximum length is 63 bytes
# * First/Last Character is not a hyphen
def slugify(str)
- return str.downcase
+ str.downcase
.gsub(/[^a-z0-9]/, '-')[0..62]
.gsub(/(\A-+|-+\z)/, '')
end
@@ -178,5 +178,15 @@ module Gitlab
.group_by(&:first)
.transform_values { |kvs| kvs.map(&:last) }
end
+
+ # This sort is stable (see https://en.wikipedia.org/wiki/Sorting_algorithm#Stability)
+ # contrary to the bare Ruby sort_by method. Using just sort_by leads to
+ # instability across different platforms (e.g., x86_64-linux and x86_64-darwin18)
+ # which in turn leads to different sorting results for the equal elements across
+ # these platforms.
+ # This method uses a list item's original index position to break ties.
+ def stable_sort_by(list)
+ list.sort_by.with_index { |x, idx| [yield(x), idx] }
+ end
end
end
diff --git a/lib/gitlab/utils/markdown.rb b/lib/gitlab/utils/markdown.rb
new file mode 100644
index 00000000000..82c4a0e3b23
--- /dev/null
+++ b/lib/gitlab/utils/markdown.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Utils
+ module Markdown
+ PUNCTUATION_REGEXP = /[^\p{Word}\- ]/u.freeze
+
+ def string_to_anchor(string)
+ string
+ .strip
+ .downcase
+ .gsub(PUNCTUATION_REGEXP, '') # remove punctuation
+ .tr(' ', '-') # replace spaces with dash
+ .squeeze('-') # replace multiple dashes with one
+ .gsub(/\A(\d+)\z/, 'anchor-\1') # digits-only hrefs conflict with issue refs
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/utils/usage_data.rb b/lib/gitlab/utils/usage_data.rb
index afc4e000977..625e1076a54 100644
--- a/lib/gitlab/utils/usage_data.rb
+++ b/lib/gitlab/utils/usage_data.rb
@@ -77,11 +77,11 @@ module Gitlab
end
end
- def with_prometheus_client
- if Gitlab::Prometheus::Internal.prometheus_enabled?
- prometheus_address = Gitlab::Prometheus::Internal.uri
- yield Gitlab::PrometheusClient.new(prometheus_address, allow_local_requests: true)
- end
+ def with_prometheus_client(fallback: nil)
+ return fallback unless Gitlab::Prometheus::Internal.prometheus_enabled?
+
+ prometheus_address = Gitlab::Prometheus::Internal.uri
+ yield Gitlab::PrometheusClient.new(prometheus_address, allow_local_requests: true)
end
def measure_duration
@@ -92,6 +92,10 @@ module Gitlab
[result, duration]
end
+ def with_finished_at(key, &block)
+ yield.merge(key => Time.now)
+ end
+
private
def redis_usage_counter
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index c91d1b05440..6d935bb8828 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -216,8 +216,8 @@ module Gitlab
def gitaly_server_hash(repository)
{
- address: Gitlab::GitalyClient.address(repository.container.repository_storage),
- token: Gitlab::GitalyClient.token(repository.container.repository_storage),
+ address: Gitlab::GitalyClient.address(repository.shard),
+ token: Gitlab::GitalyClient.token(repository.shard),
features: Feature::Gitaly.server_feature_flags
}
end