summaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/allowed_warnings.txt14
-rw-r--r--scripts/api/base.rb28
-rwxr-xr-xscripts/api/cancel_pipeline.rb14
-rw-r--r--scripts/api/commit_merge_requests.rb19
-rw-r--r--scripts/api/create_issue.rb24
-rw-r--r--scripts/api/create_issue_discussion.rb24
-rw-r--r--scripts/api/create_merge_request_discussion.rb33
-rw-r--r--scripts/api/find_issues.rb24
-rwxr-xr-xscripts/api/get_job_id.rb19
-rw-r--r--scripts/api/get_package_and_test_job.rb49
-rw-r--r--scripts/api/pipeline_failed_jobs.rb21
-rw-r--r--scripts/api/update_issue.rb29
-rwxr-xr-xscripts/build_assets_image2
-rwxr-xr-xscripts/construct-release-environments-versions.rb37
-rwxr-xr-xscripts/create-pipeline-failure-incident.rb272
-rw-r--r--scripts/database/schema_validator.rb34
-rwxr-xr-xscripts/db_tasks3
-rwxr-xr-xscripts/decomposition/generate-loose-foreign-key16
-rwxr-xr-xscripts/failed_tests.rb4
-rwxr-xr-xscripts/frontend/download_fixtures.sh56
-rw-r--r--scripts/frontend/extract_gettext_all.js10
-rwxr-xr-xscripts/frontend/po_to_json.js208
-rw-r--r--scripts/frontend/startup_css/constants.js4
-rwxr-xr-xscripts/generate-e2e-pipeline43
-rwxr-xr-xscripts/generate-failed-package-and-test-mr-message.rb81
-rwxr-xr-xscripts/generate-failed-pipeline-slack-message.rb181
-rwxr-xr-xscripts/generate-rspec-foss-impact-pipeline66
-rwxr-xr-xscripts/generate_rspec_pipeline.rb205
-rwxr-xr-xscripts/gitaly-test-build7
-rwxr-xr-xscripts/gitaly-test-spawn2
-rw-r--r--scripts/gitlab_component_helpers.sh151
-rwxr-xr-xscripts/lint-docs-blueprints.rb4
-rwxr-xr-xscripts/lint-rugged3
-rwxr-xr-xscripts/packages/automated_cleanup.rb4
-rw-r--r--scripts/packages/helpers.sh59
-rwxr-xr-xscripts/pipeline/create_test_failure_issues.rb263
-rwxr-xr-xscripts/pipeline_test_report_builder.rb10
-rw-r--r--scripts/prepare_build.sh15
-rwxr-xr-xscripts/review_apps/automated_cleanup.rb68
-rw-r--r--scripts/review_apps/base-config.yaml98
-rwxr-xr-xscripts/review_apps/k8s-resources-count-checks.sh9
-rwxr-xr-xscripts/review_apps/review-apps.sh116
-rw-r--r--scripts/rspec_helpers.sh52
-rwxr-xr-xscripts/setup-test-env59
-rwxr-xr-xscripts/trigger-build.rb1
-rw-r--r--scripts/utils.sh51
-rwxr-xr-xscripts/validate_schema_changes7
-rwxr-xr-xscripts/verify-tff-mapping55
48 files changed, 1639 insertions, 915 deletions
diff --git a/scripts/allowed_warnings.txt b/scripts/allowed_warnings.txt
index 19bd5d51a20..cc7d14c1d3c 100644
--- a/scripts/allowed_warnings.txt
+++ b/scripts/allowed_warnings.txt
@@ -13,3 +13,17 @@ Type application/netcdf is already registered as a variant of application/netcdf
# This warning is emitted by scripts/static-analysis.
\*\*\*\* .+ had the following warning\(s\):
+
+# Ruby 3 extracts net-protocol into a separate gem, while Ruby 2 has it built-in.
+# This can be removed when support for Ruby 2 is dropped.
+2\.7\.0\/gems\/net-protocol-0\.1\.3\/lib\/net\/protocol\.rb:208: warning: already initialized constant Net::BufferedIO::BUFSIZE
+ruby\/2\.7\.0\/net\/protocol\.rb:206: warning: previous definition of BUFSIZE was here
+2\.7\.0\/gems\/net-protocol-0\.1\.3\/lib\/net\/protocol\.rb:504: warning: already initialized constant Net::NetPrivate::Socket
+ruby\/2\.7\.0\/net\/protocol\.rb:503: warning: previous definition of Socket was here
+2\.7\.0\/gems\/net-protocol-0\.1\.3\/lib\/net\/protocol\.rb:68: warning: already initialized constant Net::ProtocRetryError
+ruby\/2\.7\.0\/net\/protocol\.rb:66: warning: previous definition of ProtocRetryError was here
+
+# Ruby 3 does not emit warnings for pattern matching, and if it's working
+# fine in both Ruby 2 and Ruby 3, it's unlikely it'll change again.
+# This can be removed when support for Ruby 2 is dropped.
+warning: Pattern matching is experimental, and the behavior may change in future versions of Ruby!
diff --git a/scripts/api/base.rb b/scripts/api/base.rb
new file mode 100644
index 00000000000..972b461a09a
--- /dev/null
+++ b/scripts/api/base.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'gitlab'
+require_relative 'default_options'
+
+class Base
+ def initialize(options)
+ @project = options.fetch(:project)
+
+ # If api_token is nil, it's set to '' to allow unauthenticated requests (for forks).
+ api_token = options[:api_token] || ''
+
+ warn "No API token given." if api_token.empty?
+
+ @client = Gitlab.client(
+ endpoint: options.fetch(:endpoint, API::DEFAULT_OPTIONS[:endpoint]),
+ private_token: api_token
+ )
+ end
+
+ def execute
+ raise NotImplementedError
+ end
+
+ private
+
+ attr_reader :project, :client
+end
diff --git a/scripts/api/cancel_pipeline.rb b/scripts/api/cancel_pipeline.rb
index 2667cfb9733..5069527368b 100755
--- a/scripts/api/cancel_pipeline.rb
+++ b/scripts/api/cancel_pipeline.rb
@@ -1,19 +1,13 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
-require 'gitlab'
require 'optparse'
-require_relative 'default_options'
+require_relative 'base'
-class CancelPipeline
+class CancelPipeline < Base
def initialize(options)
- @project = options.delete(:project)
+ super
@pipeline_id = options.delete(:pipeline_id)
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
- private_token: options.delete(:api_token)
- )
end
def execute
@@ -22,7 +16,7 @@ class CancelPipeline
private
- attr_reader :project, :pipeline_id, :client
+ attr_reader :pipeline_id
end
if $PROGRAM_NAME == __FILE__
diff --git a/scripts/api/commit_merge_requests.rb b/scripts/api/commit_merge_requests.rb
index 3cf8dc87497..523d2e769f0 100644
--- a/scripts/api/commit_merge_requests.rb
+++ b/scripts/api/commit_merge_requests.rb
@@ -1,22 +1,11 @@
# frozen_string_literal: true
-require 'gitlab'
-require_relative 'default_options'
+require_relative 'base'
-class CommitMergeRequests
+class CommitMergeRequests < Base
def initialize(options)
- @project = options.fetch(:project)
+ super
@sha = options.fetch(:sha)
-
- # If api_token is nil, it's set to '' to allow unauthenticated requests (for forks).
- api_token = options.fetch(:api_token, '')
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.fetch(:endpoint, API::DEFAULT_OPTIONS[:endpoint]),
- private_token: api_token
- )
end
def execute
@@ -25,5 +14,5 @@ class CommitMergeRequests
private
- attr_reader :project, :sha, :client
+ attr_reader :sha
end
diff --git a/scripts/api/create_issue.rb b/scripts/api/create_issue.rb
index 2117c285771..1c385ce41f2 100644
--- a/scripts/api/create_issue.rb
+++ b/scripts/api/create_issue.rb
@@ -1,29 +1,9 @@
# frozen_string_literal: true
-require 'gitlab'
-require_relative 'default_options'
-
-class CreateIssue
- def initialize(options)
- @project = options.fetch(:project)
-
- # Force the token to be a string so that if api_token is nil, it's set to '',
- # allowing unauthenticated requests (for forks).
- api_token = options.delete(:api_token).to_s
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
- private_token: api_token
- )
- end
+require_relative 'base'
+class CreateIssue < Base
def execute(issue_data)
client.create_issue(project, issue_data.delete(:title), issue_data)
end
-
- private
-
- attr_reader :project, :client
end
diff --git a/scripts/api/create_issue_discussion.rb b/scripts/api/create_issue_discussion.rb
index 74a9f3ae378..6471a5c2579 100644
--- a/scripts/api/create_issue_discussion.rb
+++ b/scripts/api/create_issue_discussion.rb
@@ -1,32 +1,12 @@
# frozen_string_literal: true
-require 'gitlab'
-require_relative 'default_options'
-
-class CreateIssueDiscussion
- def initialize(options)
- @project = options.fetch(:project)
-
- # Force the token to be a string so that if api_token is nil, it's set to '',
- # allowing unauthenticated requests (for forks).
- api_token = options.delete(:api_token).to_s
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
- private_token: api_token
- )
- end
+require_relative 'base'
+class CreateIssueDiscussion < Base
def execute(discussion_data)
client.post(
"/projects/#{client.url_encode project}/issues/#{discussion_data.delete(:issue_iid)}/discussions",
body: discussion_data
)
end
-
- private
-
- attr_reader :project, :client
end
diff --git a/scripts/api/create_merge_request_discussion.rb b/scripts/api/create_merge_request_discussion.rb
new file mode 100644
index 00000000000..2b380d2e216
--- /dev/null
+++ b/scripts/api/create_merge_request_discussion.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'gitlab'
+require_relative 'default_options'
+
+class CreateMergeRequestDiscussion
+ def initialize(options)
+ @merge_request = options.fetch(:merge_request)
+ @project = options.fetch(:project)
+
+ # If api_token is nil, it's set to '' to allow unauthenticated requests (for forks).
+ api_token = options.fetch(:api_token, '')
+
+ warn "No API token given." if api_token.empty?
+
+ @client = Gitlab.client(
+ endpoint: options.fetch(:endpoint, API::DEFAULT_OPTIONS[:endpoint]),
+ private_token: api_token
+ )
+ end
+
+ def execute(content)
+ client.create_merge_request_discussion(
+ project,
+ merge_request.fetch('iid'),
+ body: content
+ )
+ end
+
+ private
+
+ attr_reader :merge_request, :client, :project
+end
diff --git a/scripts/api/find_issues.rb b/scripts/api/find_issues.rb
index a1c37030319..f74f815fba9 100644
--- a/scripts/api/find_issues.rb
+++ b/scripts/api/find_issues.rb
@@ -1,29 +1,9 @@
# frozen_string_literal: true
-require 'gitlab'
-require_relative 'default_options'
-
-class FindIssues
- def initialize(options)
- @project = options.fetch(:project)
-
- # Force the token to be a string so that if api_token is nil, it's set to '',
- # allowing unauthenticated requests (for forks).
- api_token = options.delete(:api_token).to_s
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
- private_token: api_token
- )
- end
+require_relative 'base'
+class FindIssues < Base
def execute(search_data)
client.issues(project, search_data)
end
-
- private
-
- attr_reader :project, :client
end
diff --git a/scripts/api/get_job_id.rb b/scripts/api/get_job_id.rb
index 12535106a4c..babe8f5dee0 100755
--- a/scripts/api/get_job_id.rb
+++ b/scripts/api/get_job_id.rb
@@ -1,11 +1,10 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
-require 'gitlab'
require 'optparse'
-require_relative 'default_options'
+require_relative 'base'
-class JobFinder
+class JobFinder < Base
DEFAULT_OPTIONS = API::DEFAULT_OPTIONS.merge(
pipeline_query: {}.freeze,
job_query: {}.freeze
@@ -13,22 +12,12 @@ class JobFinder
MAX_PIPELINES_TO_ITERATE = 20
def initialize(options)
- @project = options.delete(:project)
+ super
@pipeline_query = options.delete(:pipeline_query) || DEFAULT_OPTIONS[:pipeline_query]
@job_query = options.delete(:job_query) || DEFAULT_OPTIONS[:job_query]
@pipeline_id = options.delete(:pipeline_id)
@job_name = options.delete(:job_name)
@artifact_path = options.delete(:artifact_path)
-
- # Force the token to be a string so that if api_token is nil, it's set to '', allowing unauthenticated requests (for forks).
- api_token = options.delete(:api_token).to_s
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || DEFAULT_OPTIONS[:endpoint],
- private_token: api_token
- )
end
def execute
@@ -37,7 +26,7 @@ class JobFinder
private
- attr_reader :project, :pipeline_query, :job_query, :pipeline_id, :job_name, :artifact_path, :client
+ attr_reader :pipeline_query, :job_query, :pipeline_id, :job_name, :artifact_path
def find_job_with_artifact
return if artifact_path.nil?
diff --git a/scripts/api/get_package_and_test_job.rb b/scripts/api/get_package_and_test_job.rb
new file mode 100644
index 00000000000..e9430c5f107
--- /dev/null
+++ b/scripts/api/get_package_and_test_job.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'gitlab'
+require_relative 'default_options'
+
+class GetPackageAndTestJob
+ FAILED_STATUS = [
+ 'failed',
+ 'passed with warnings',
+ 'canceled'
+ ].freeze
+
+ def initialize(options)
+ @project = options.fetch(:project)
+ @pipeline_id = options.fetch(:pipeline_id)
+
+ # If api_token is nil, it's set to '' to allow unauthenticated requests (for forks).
+ api_token = options.fetch(:api_token, '')
+
+ warn "No API token given." if api_token.empty?
+
+ @client = Gitlab.client(
+ endpoint: options.fetch(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
+ private_token: api_token
+ )
+ end
+
+ def execute
+ package_and_test_bridge = client
+ .pipeline_bridges(project, pipeline_id, per_page: 100)
+ .auto_paginate
+ .find { |job| job.name.include?('package-and-test-ee') }
+
+ return if package_and_test_bridge&.downstream_pipeline.nil?
+
+ package_and_test_pipeline = client
+ .pipeline(project, package_and_test_bridge.downstream_pipeline.id)
+
+ return if package_and_test_pipeline.nil?
+
+ status = package_and_test_pipeline.detailed_status
+
+ return package_and_test_pipeline if FAILED_STATUS.include?(status&.label)
+ end
+
+ private
+
+ attr_reader :project, :pipeline_id, :client
+end
diff --git a/scripts/api/pipeline_failed_jobs.rb b/scripts/api/pipeline_failed_jobs.rb
index df9a7e76dcd..9012d48994f 100644
--- a/scripts/api/pipeline_failed_jobs.rb
+++ b/scripts/api/pipeline_failed_jobs.rb
@@ -1,25 +1,12 @@
# frozen_string_literal: true
-require 'gitlab'
+require_relative 'base'
-require_relative 'default_options'
-
-class PipelineFailedJobs
+class PipelineFailedJobs < Base
def initialize(options)
- @project = options.delete(:project)
+ super
@pipeline_id = options.delete(:pipeline_id)
@exclude_allowed_to_fail_jobs = options.delete(:exclude_allowed_to_fail_jobs)
-
- # Force the token to be a string so that if api_token is nil, it's set to '',
- # allowing unauthenticated requests (for forks).
- api_token = options.delete(:api_token).to_s
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
- private_token: api_token
- )
end
def execute
@@ -43,5 +30,5 @@ class PipelineFailedJobs
private
- attr_reader :project, :pipeline_id, :exclude_allowed_to_fail_jobs, :client
+ attr_reader :pipeline_id, :exclude_allowed_to_fail_jobs
end
diff --git a/scripts/api/update_issue.rb b/scripts/api/update_issue.rb
new file mode 100644
index 00000000000..ce296ebc358
--- /dev/null
+++ b/scripts/api/update_issue.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'gitlab'
+require_relative 'default_options'
+
+class UpdateIssue
+ def initialize(options)
+ @project = options.fetch(:project)
+
+ # Force the token to be a string so that if api_token is nil, it's set to '',
+ # allowing unauthenticated requests (for forks).
+ api_token = options.delete(:api_token).to_s
+
+ warn "No API token given." if api_token.empty?
+
+ @client = Gitlab.client(
+ endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
+ private_token: api_token
+ )
+ end
+
+ def execute(issue_iid, issue_data)
+ client.edit_issue(project, issue_iid, issue_data)
+ end
+
+ private
+
+ attr_reader :project, :client
+end
diff --git a/scripts/build_assets_image b/scripts/build_assets_image
index ee8623c826e..00f21e5dede 100755
--- a/scripts/build_assets_image
+++ b/scripts/build_assets_image
@@ -29,7 +29,7 @@ fi
ASSETS_IMAGE_NAME="gitlab-assets-ce"
# `dev.gitlab-org` still has gitlab-ee.
-if [ "${CI_PROJECT_NAME}" = "gitlab" ] || [ "${CI_PROJECT_NAME}" = "gitlab-ee" ]; then
+if ([ "${CI_PROJECT_NAME}" = "gitlab" ] && [ "${FOSS_ONLY}" != "1" ]) || ([ "${CI_PROJECT_NAME}" = "gitlab-ee" ] && [ "${FOSS_ONLY}" != "1" ]); then
ASSETS_IMAGE_NAME="gitlab-assets-ee"
fi
diff --git a/scripts/construct-release-environments-versions.rb b/scripts/construct-release-environments-versions.rb
new file mode 100755
index 00000000000..1af45923731
--- /dev/null
+++ b/scripts/construct-release-environments-versions.rb
@@ -0,0 +1,37 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'json'
+
+class ReleaseEnvironmentsModel
+ COMPONENTS = %w[gitaly registry kas mailroom pages gitlab shell].freeze
+
+ # Will generate a json object that has a key for every component and a value which is the environment combined with
+ # short sha
+ # Example:
+ # {
+ # "gitaly": "15-10-stable-c7c5131c",
+ # "registry": "15-10-stable-c7c5131c",
+ # "kas": "15-10-stable-c7c5131c",
+ # "mailroom": "15-10-stable-c7c5131c",
+ # "pages": "15-10-stable-c7c5131c",
+ # "gitlab": "15-10-stable-c7c5131c",
+ # "shell": "15-10-stable-c7c5131c"
+ # }
+ def generate_json(environment)
+ output_json = {}
+ COMPONENTS.each do |component|
+ output_json[component.to_s] = "#{environment}-#{ENV['CI_COMMIT_SHORT_SHA']}"
+ end
+ JSON.generate(output_json)
+ end
+end
+
+# Outputs in `dotenv` format the ENVIRONMENT and VERSIONS to pass to release environments e.g.
+# ENVIRONMENT=15-10-stable
+# VERSIONS={"gitaly":"15-10-stable-c7c5131c","registry":"15-10-stable-c7c5131c","kas":"15-10-stable-c7c5131c", ...
+if $PROGRAM_NAME == __FILE__
+ environment = ENV['CI_COMMIT_REF_SLUG'].sub("-ee", "")
+ puts "ENVIRONMENT=#{environment}"
+ puts "VERSIONS=#{ReleaseEnvironmentsModel.new.generate_json(environment)}"
+end
diff --git a/scripts/create-pipeline-failure-incident.rb b/scripts/create-pipeline-failure-incident.rb
deleted file mode 100755
index bd57abf3740..00000000000
--- a/scripts/create-pipeline-failure-incident.rb
+++ /dev/null
@@ -1,272 +0,0 @@
-#!/usr/bin/env ruby
-
-# frozen_string_literal: true
-
-require 'optparse'
-require 'json'
-
-require_relative 'api/commit_merge_requests'
-require_relative 'api/create_issue'
-require_relative 'api/create_issue_discussion'
-require_relative 'api/pipeline_failed_jobs'
-
-class CreatePipelineFailureIncident
- DEFAULT_OPTIONS = {
- project: nil,
- incident_json_file: 'incident.json'
- }.freeze
- DEFAULT_LABELS = ['Engineering Productivity', 'master-broken::undetermined'].freeze
-
- def initialize(options)
- @project = options.delete(:project)
- @api_token = options.delete(:api_token)
- end
-
- def execute
- payload = {
- issue_type: 'incident',
- title: title,
- description: description,
- labels: incident_labels
- }
-
- payload[:assignee_ids] = assignee_ids if stable_branch_incident?
-
- CreateIssue.new(project: project, api_token: api_token).execute(payload).tap do |incident|
- CreateIssueDiscussion.new(project: project, api_token: api_token)
- .execute(issue_iid: incident.iid, body: "## Root Cause Analysis")
- CreateIssueDiscussion.new(project: project, api_token: api_token)
- .execute(issue_iid: incident.iid, body: "## Investigation Steps")
- end
- end
-
- private
-
- attr_reader :project, :api_token
-
- def stable_branch_incident?
- ENV['CI_COMMIT_REF_NAME'] =~ /^[\d-]+-stable(-ee)?$/
- end
-
- def failed_jobs
- @failed_jobs ||= PipelineFailedJobs.new(API::DEFAULT_OPTIONS.merge(exclude_allowed_to_fail_jobs: true)).execute
- end
-
- def merge_request
- @merge_request ||= CommitMergeRequests.new(
- API::DEFAULT_OPTIONS.merge(sha: ENV['CI_COMMIT_SHA'])
- ).execute.first
- end
-
- def now
- @now ||= Time.now.utc
- end
-
- def title
- @title ||= begin
- full_title = "#{now.strftime('%A %F %R UTC')} - `#{ENV['CI_PROJECT_PATH']}` " \
- "broken `#{ENV['CI_COMMIT_REF_NAME']}` with #{failed_jobs.map(&:name).join(', ')}"
-
- if full_title.size >= 255
- "#{full_title[...252]}..." # max title length is 255, and we add an elipsis
- else
- full_title
- end
- end
- end
-
- def description
- return broken_stable_description_content if stable_branch_incident?
-
- broken_master_description_content
- end
-
- def broken_master_description_content
- <<~MARKDOWN
- ## #{project_link} pipeline #{pipeline_link} failed
-
- **Branch: #{branch_link}**
-
- **Commit: #{commit_link}**
-
- **Triggered by** #{triggered_by_link} • **Source:** #{source} • **Duration:** #{pipeline_duration} minutes
-
- **Failed jobs (#{failed_jobs.size}):**
-
- #{failed_jobs_list}
-
- ### General guidelines
-
- Follow the [Broken `master` handbook guide](https://about.gitlab.com/handbook/engineering/workflow/#broken-master).
-
- ### Investigation
-
- **Be sure to fill the `Timeline` for this incident.**
-
- 1. If the failure is new, and looks like a potential flaky failure, you can retry the failing job.
- Make sure to mention the retry in the `Timeline` and leave a link to the retried job.
- 1. If the failure looks like a broken `master`, communicate the broken `master` in Slack using the "Broadcast Master Broken" workflow:
- - Click the Shortcut lightning bolt icon in the `#master-broken` channel and select "Broadcast Master Broken".
- - Click "Continue the broadcast" after the automated message in `#master-broken`.
-
- ### Pre-resolution
-
- If you believe that there's an easy resolution by either:
-
- - Reverting a particular merge request.
- - Making a quick fix (for example, one line or a few similar simple changes in a few lines).
- You can create a merge request, assign to any available maintainer, and ping people that were involved/related to the introduction of the failure.
- Additionally, a message can be posted in `#backend_maintainers` or `#frontend_maintainers` to get a maintainer take a look at the fix ASAP.
-
- In both cases, make sure to add the ~"pipeline:expedite" label, and `master:broken` or `master:foss-broken` label, to speed up the `master`-fixing pipelines.
-
- ### Resolution
-
- Follow [the Resolution steps from the handbook](https://about.gitlab.com/handbook/engineering/workflow/#responsibilities-of-the-resolution-dri).
- MARKDOWN
- end
-
- def broken_stable_description_content
- <<~MARKDOWN
- ## #{project_link} pipeline #{pipeline_link} failed
-
- **Branch: #{branch_link}**
-
- **Commit: #{commit_link}**
-
- **Merge Request: #{merge_request_link}**
-
- **Triggered by** #{triggered_by_link} • **Source:** #{source} • **Duration:** #{pipeline_duration} minutes
-
- **Failed jobs (#{failed_jobs.size}):**
-
- #{failed_jobs_list}
-
- ### General guidelines
-
- A broken stable branch prevents patch releases from being built.
- Fixing the pipeline is a priority to prevent any delays in releases.
-
- The process in the [Broken `master` handbook guide](https://about.gitlab.com/handbook/engineering/workflow/#broken-master) can be referenced since much of that process also applies here.
-
- ### Investigation
-
- **Be sure to fill the `Timeline` for this incident.**
-
- 1. If the failure is new, and looks like a potential flaky failure, you can retry the failing job.
- Make sure to mention the retry in the `Timeline` and leave a link to the retried job.
- 1. Search for similar master-broken issues in https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues
- 1. If one exists, ask the DRI of the master-broken issue to cherry-pick any resulting merge requests into the stable branch
-
- @gitlab-org/release/managers if the merge request author or maintainer is not available, this can be escalated using the dev-on-call process in the [#dev-escalation slack channel](https://gitlab.slack.com/archives/CLKLMSUR4).
-
- ### Pre-resolution
-
- If you believe that there's an easy resolution by either:
-
- - Reverting a particular merge request.
- - Making a quick fix (for example, one line or a few similar simple changes in a few lines).
- You can create a merge request, assign to any available maintainer, and ping people that were involved/related to the introduction of the failure.
- Additionally, a message can be posted in `#backend_maintainers` or `#frontend_maintainers` to get a maintainer take a look at the fix ASAP.
- - Cherry picking a change that was used to fix a similar master-broken issue.
-
- ### Resolution
-
- Add a comment to this issue describing how this incident could have been prevented earlier in the Merge Request pipeline (rather than the merge commit pipeline).
-
- MARKDOWN
- end
-
- def incident_labels
- return ['release-blocker'] if stable_branch_incident?
-
- master_broken_label =
- if ENV['CI_PROJECT_NAME'] == 'gitlab-foss'
- 'master:foss-broken'
- else
- 'master:broken'
- end
-
- DEFAULT_LABELS.dup << master_broken_label
- end
-
- def assignee_ids
- ids = [ENV['GITLAB_USER_ID'].to_i]
- ids << merge_request['author']['id'].to_i if merge_request
- ids
- end
-
- def pipeline_link
- "[##{ENV['CI_PIPELINE_ID']}](#{ENV['CI_PIPELINE_URL']})"
- end
-
- def branch_link
- "[`#{ENV['CI_COMMIT_REF_NAME']}`](#{ENV['CI_PROJECT_URL']}/-/commits/#{ENV['CI_COMMIT_REF_NAME']})"
- end
-
- def pipeline_duration
- ((Time.now - Time.parse(ENV['CI_PIPELINE_CREATED_AT'])) / 60.to_f).round(2)
- end
-
- def commit_link
- "[#{ENV['CI_COMMIT_TITLE']}](#{ENV['CI_PROJECT_URL']}/-/commit/#{ENV['CI_COMMIT_SHA']})"
- end
-
- def merge_request_link
- return 'N/A' unless merge_request
-
- "[#{merge_request['title']}](#{merge_request['web_url']})"
- end
-
- def source
- "`#{ENV['CI_PIPELINE_SOURCE']}`"
- end
-
- def project_link
- "[#{ENV['CI_PROJECT_PATH']}](#{ENV['CI_PROJECT_URL']})"
- end
-
- def triggered_by_link
- "[#{ENV['GITLAB_USER_NAME']}](#{ENV['CI_SERVER_URL']}/#{ENV['GITLAB_USER_LOGIN']})"
- end
-
- def failed_jobs_list_for_title
- failed_jobs.map(&:name).join(', ')
- end
-
- def failed_jobs_list
- failed_jobs.map { |job| "- [#{job.name}](#{job.web_url})" }.join("\n")
- end
-end
-
-if $PROGRAM_NAME == __FILE__
- options = CreatePipelineFailureIncident::DEFAULT_OPTIONS.dup
-
- OptionParser.new do |opts|
- opts.on("-p", "--project PROJECT", String, "Project where to create the incident (defaults to "\
- "`#{CreatePipelineFailureIncident::DEFAULT_OPTIONS[:project]}`)") do |value|
- options[:project] = value
- end
-
- opts.on("-f", "--incident-json-file file_path", String, "Path to a file where to save the incident JSON data "\
- "(defaults to `#{CreatePipelineFailureIncident::DEFAULT_OPTIONS[:incident_json_file]}`)") do |value|
- options[:incident_json_file] = value
- end
-
- opts.on("-t", "--api-token API_TOKEN", String, "A valid Project token with the `Reporter` role and `api` scope "\
- "to create the incident") do |value|
- options[:api_token] = value
- end
-
- opts.on("-h", "--help", "Prints this help") do
- puts opts
- exit
- end
- end.parse!
-
- incident_json_file = options.delete(:incident_json_file)
-
- CreatePipelineFailureIncident.new(options).execute.tap do |incident|
- File.write(incident_json_file, JSON.pretty_generate(incident.to_h)) if incident_json_file
- end
-end
diff --git a/scripts/database/schema_validator.rb b/scripts/database/schema_validator.rb
new file mode 100644
index 00000000000..11a53faa945
--- /dev/null
+++ b/scripts/database/schema_validator.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require_relative '../migration_schema_validator'
+
+class SchemaValidator < MigrationSchemaValidator
+ ALLOW_SCHEMA_CHANGES = 'ALLOW_SCHEMA_CHANGES'
+ COMMIT_MESSAGE_SKIP_TAG = 'skip-db-structure-check'
+
+ def validate!
+ return if should_skip?
+
+ return if schema_changes.empty?
+
+ die "#{FILENAME} was changed, and no migrations were added:\n#{schema_changes}" if committed_migrations.empty?
+ end
+
+ private
+
+ def schema_changes
+ @schema_changes ||= run("git diff #{diff_target} HEAD -- #{FILENAME}")
+ end
+
+ def should_skip?
+ skip_env_present? || skip_commit_present?
+ end
+
+ def skip_env_present?
+ !ENV[ALLOW_SCHEMA_CHANGES].to_s.empty?
+ end
+
+ def skip_commit_present?
+ run("git show -s --format=%B -n 1").to_s.include?(COMMIT_MESSAGE_SKIP_TAG)
+ end
+end
diff --git a/scripts/db_tasks b/scripts/db_tasks
index 36040877abf..645e46e02d9 100755
--- a/scripts/db_tasks
+++ b/scripts/db_tasks
@@ -5,6 +5,7 @@ require 'yaml'
rails_env = ENV.fetch('RAILS_ENV')
database_config = YAML.load_file(File.join(File.expand_path('..', __dir__), 'config', 'database.yml'))[rails_env]
+database_config.reject! { |_k, v| v["database_tasks"] == false }
task = ARGV.shift
raise ArgumentError, 'You need to pass a task name!' unless task
@@ -14,4 +15,4 @@ cmd = ['bundle', 'exec', 'rake', task, *ARGV]
puts "Running: `#{cmd.join(' ')}`"
-system(*cmd)
+exit 1 unless system(*cmd)
diff --git a/scripts/decomposition/generate-loose-foreign-key b/scripts/decomposition/generate-loose-foreign-key
index ad7d6e32aa0..fbffebb6086 100755
--- a/scripts/decomposition/generate-loose-foreign-key
+++ b/scripts/decomposition/generate-loose-foreign-key
@@ -110,15 +110,12 @@ def add_definition_to_yaml(definition)
content = YAML.load_file(Rails.root.join('config/gitlab_loose_foreign_keys.yml'))
table_definitions = content[definition.from_table]
- # insert new entry at random place to avoid conflicts
+ # insert new entry in alphabetic order
unless table_definitions
table_definitions = []
- insert_idx = rand(content.count+1)
- # insert at a given index in ordered hash
- content = content.to_a
- content.insert(insert_idx, [definition.from_table, table_definitions])
- content = content.to_h
+ content[definition.from_table] = table_definitions
+ content = content.sort.to_h
end
on_delete =
@@ -171,9 +168,8 @@ def generate_migration(definition)
return unless foreign_key_exists?(:#{definition.from_table}, :#{definition.to_table}, name: "#{definition.name}")
with_lock_retries do
- execute('LOCK #{definition.to_table}, #{definition.from_table} IN ACCESS EXCLUSIVE MODE') if transaction_open?
-
- remove_foreign_key_if_exists(:#{definition.from_table}, :#{definition.to_table}, name: "#{definition.name}")
+ remove_foreign_key_if_exists(:#{definition.from_table}, :#{definition.to_table},
+ name: "#{definition.name}", reverse_lock_order: true)
end
end
@@ -217,7 +213,7 @@ def add_test_to_specs(definition)
puts "Adding test to #{spec_path}..."
spec_test = <<-EOF.strip_heredoc.indent(2)
- context 'loose foreign key on #{definition.from_table}.#{definition.column}' do
+ context 'with loose foreign key on #{definition.from_table}.#{definition.column}' do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:#{definition.to_table.singularize}) }
let!(:model) { create(:#{definition.from_table.singularize}, #{definition.column.delete_suffix("_id").singularize}: parent) }
diff --git a/scripts/failed_tests.rb b/scripts/failed_tests.rb
index 786d3c24c74..0ba454894b7 100755
--- a/scripts/failed_tests.rb
+++ b/scripts/failed_tests.rb
@@ -12,8 +12,8 @@ class FailedTests
previous_tests_report_path: 'test_results/previous/test_reports.json',
output_directory: 'tmp/previous_failed_tests/',
format: :oneline,
- rspec_pg_regex: /rspec .+ pg12( .+)?/,
- rspec_ee_pg_regex: /rspec-ee .+ pg12( .+)?/
+ rspec_pg_regex: /rspec .+ pg13( .+)?/,
+ rspec_ee_pg_regex: /rspec-ee .+ pg13( .+)?/
}.freeze
def initialize(options)
diff --git a/scripts/frontend/download_fixtures.sh b/scripts/frontend/download_fixtures.sh
new file mode 100755
index 00000000000..47a57401bb9
--- /dev/null
+++ b/scripts/frontend/download_fixtures.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+
+#
+# Downloads the most recent frontend fixtures for the current commit, going up the commit parent
+# chain up to max-commits commits (defaults to 50 commits).
+#
+
+source scripts/packages/helpers.sh
+
+print_help() {
+ echo "Usage: scripts/frontend/download_fixtures.sh [--branch <branch-name>] [--max-commits <number>]"
+ echo
+ echo "Looks for a frontend fixture package in the package registry for commits on a local branch."
+ echo
+ echo "If --branch isn't specified, the script will use the current branch as a commit reference."
+ echo "If --max-commits isn't specified, the default is 50 commits."
+
+ return
+}
+
+branch="HEAD"
+max_commits_count=50
+
+while [ $# -gt 0 ]; do
+ case "$1" in
+ --branch)
+ shift
+ branch="$1"
+ ;;
+ --max-commits)
+ shift
+ max_commits_count="$1"
+ ;;
+ *)
+ print_help
+ exit
+ ;;
+ esac
+ shift
+done
+
+for commit_sha in $(git rev-list ${branch} --max-count="${max_commits_count}"); do
+ API_PACKAGES_BASE_URL=https://gitlab.com/api/v4/projects/278964/packages/generic
+ FIXTURES_PACKAGE="fixtures-${commit_sha}.tar.gz"
+ FIXTURES_PACKAGE_URL="${API_PACKAGES_BASE_URL}/fixtures/${commit_sha}/${FIXTURES_PACKAGE}"
+
+ echo "Looking for frontend fixtures for commit ${commit_sha}..."
+
+ if ! archive_doesnt_exist "${FIXTURES_PACKAGE_URL}" > /dev/null 2>&1; then
+ echo "We have found frontend fixtures at ${FIXTURES_PACKAGE_URL}!"
+
+ read_curl_package "${FIXTURES_PACKAGE_URL}" | extract_package
+
+ break
+ fi
+done
diff --git a/scripts/frontend/extract_gettext_all.js b/scripts/frontend/extract_gettext_all.js
index 922aa85241f..0cd6ab99a3a 100644
--- a/scripts/frontend/extract_gettext_all.js
+++ b/scripts/frontend/extract_gettext_all.js
@@ -19,7 +19,7 @@ extractor.addMessageTransformFunction(ensureSingleLine);
const jsParser = extractor.createJsParser([
// Place all the possible expressions to extract here:
- JsExtractors.callExpression('__', {
+ JsExtractors.callExpression(['__', 's__'], {
arguments: {
text: 0,
},
@@ -30,15 +30,13 @@ const jsParser = extractor.createJsParser([
textPlural: 1,
},
}),
- JsExtractors.callExpression('s__', {
- arguments: {
- text: 0,
- },
- }),
]);
const vueParser = decorateJSParserWithVueSupport(jsParser, {
vue2TemplateCompiler,
+ // All of our expressions contain `__`.
+ // So we can safely ignore parsing files _not_ containing it.
+ guard: '__',
});
function printJson() {
diff --git a/scripts/frontend/po_to_json.js b/scripts/frontend/po_to_json.js
new file mode 100755
index 00000000000..fba68a61814
--- /dev/null
+++ b/scripts/frontend/po_to_json.js
@@ -0,0 +1,208 @@
+#!/usr/bin/env node
+
+const fs = require('fs/promises');
+const path = require('path');
+
+async function isDir(dirPath) {
+ if (!dirPath) {
+ return false;
+ }
+ try {
+ const stat = await fs.stat(dirPath);
+ return stat.isDirectory();
+ } catch (e) {
+ return false;
+ }
+}
+
+/**
+ * This is the main function which starts multiple workers
+ * in order to speed up the po file => app.js
+ * locale conversions
+ */
+async function main({ localeRoot, outputDir } = {}) {
+ if (!(await isDir(localeRoot))) {
+ throw new Error(`Provided localeRoot: '${localeRoot}' doesn't seem to be a folder`);
+ }
+
+ if (!(await isDir(outputDir))) {
+ throw new Error(`Provided outputDir '${outputDir}' doesn't seem to be a folder`);
+ }
+
+ // eslint-disable-next-line global-require
+ const glob = require('glob');
+ // eslint-disable-next-line global-require
+ const { Worker } = require('jest-worker');
+
+ const locales = glob.sync('*/*.po', { cwd: localeRoot });
+
+ const worker = new Worker(__filename, {
+ exposedMethods: ['convertPoFileForLocale'],
+ silent: false,
+ enableWorkerThreads: true,
+ });
+ worker.getStdout().pipe(process.stdout);
+ worker.getStderr().pipe(process.stderr);
+
+ await Promise.all(
+ locales.map((localeFile) => {
+ const locale = path.dirname(localeFile);
+ return worker.convertPoFileForLocale({
+ locale,
+ localeFile: path.join(localeRoot, localeFile),
+ resultDir: path.join(outputDir, locale),
+ });
+ }),
+ );
+
+ await worker.end();
+
+ console.log('Done converting all the po files');
+}
+
+/**
+ * This is the conversion logic for: po => JS object for jed
+ */
+function convertPoToJed(data, locale) {
+ // eslint-disable-next-line global-require
+ const { parse } = require('gettext-parser/lib/poparser');
+ const DEFAULT_CONTEXT = '';
+
+ /**
+ * TODO: This replacer might be unnecessary _or_ even cause bugs.
+ * due to potential unnecessary double escaping.
+ * But for now it is here to ensure that the old and new output
+ * are equivalent.
+ * @param str
+ * @returns {string}
+ */
+ function escapeMsgid(str) {
+ return `${str}`.replace(/([\\"])/g, '\\$1');
+ }
+
+ /**
+ * TODO: This replacer might be unnecessary _or_ even cause bugs.
+ * due to potential unnecessary double escaping.
+ * But for now it is here to ensure that the old and new output
+ * are equivalent.
+ *
+ * NOTE: The replacements of `\n` and `\t` need to be iterated on,
+ * because: In the cases where we see those chars, they:
+ * - likely need or could be trimmed because they do nothing
+ * - they seem to escaped in a way that is broken anyhow
+ * @param str
+ * @returns {string}
+ */
+ function escapeMsgstr(str) {
+ return `${str}`.replace(/[\t\n"\\]/g, (match) => {
+ if (match === '\n') {
+ return '\\n';
+ }
+ if (match === '\t') {
+ return '\\t';
+ }
+ return `\\${match}`;
+ });
+ }
+
+ const { headers = {}, translations: parsed } = parse(data);
+
+ const translations = Object.values(parsed[DEFAULT_CONTEXT] ?? {}).reduce((acc, entry) => {
+ const { msgid, msgstr } = entry;
+
+ /* TODO: If a msgid has no translation, we can just drop the whole key,
+ as jed will just fallback to the keys
+ We are not doing that yet, because we do want to ensure that
+ the results of the new and old way of generating the files matches.
+ if (msgstr.every((x) => x === '')) {
+ return acc;
+ }
+ */
+
+ acc[escapeMsgid(msgid)] = msgstr.map(escapeMsgstr);
+
+ return acc;
+ }, {});
+
+ // Do not bother if the file has no actual translations
+ if (!Object.keys(translations).length) {
+ return { jed: null };
+ }
+
+ if (headers['Plural-Forms']) {
+ headers.plural_forms = headers['Plural-Forms'];
+ }
+
+ // Format required for jed: http://messageformat.github.io/Jed/
+ const jed = {
+ domain: 'app',
+ locale_data: {
+ app: {
+ ...translations,
+ // Ensure that the header data which is attached to a message with id ""
+ // is not accidentally overwritten by an empty externalized string
+ '': {
+ ...headers,
+ domain: 'app',
+ lang: locale,
+ },
+ },
+ },
+ };
+
+ return { jed };
+}
+
+/**
+ * This is the function which the workers actually execute
+ * 1. It reads the po
+ * 2. converts it with convertPoToJed
+ * 3. writes the file to
+ */
+async function convertPoFileForLocale({ locale, localeFile, resultDir }) {
+ const poContent = await fs.readFile(localeFile);
+
+ const { jed } = await convertPoToJed(poContent, locale);
+
+ if (jed === null) {
+ console.log(`${locale}: No translations. Skipping creation of app.js`);
+ return;
+ }
+
+ await fs.mkdir(resultDir, { recursive: true });
+
+ await fs.writeFile(
+ path.join(resultDir, 'app.js'),
+ `window.translations = ${JSON.stringify(jed)}`,
+ 'utf8',
+ );
+ console.log(`Created app.js in ${resultDir}`);
+}
+
+/*
+ Start the main thread only if we are not part of a worker
+ */
+if (!process.env.JEST_WORKER_ID) {
+ // eslint-disable-next-line global-require
+ const argumentsParser = require('commander');
+
+ const args = argumentsParser
+ .option('-l, --locale-root <locale_root>', 'Extract messages from subfolders in this directory')
+ .option('-o, --output-dir <output_dir>', 'Write app.js files into subfolders in this directory')
+ .parse(process.argv);
+
+ main(args).catch((e) => {
+ console.warn(`Something went wrong: ${e.message}`);
+ console.warn(args.printHelp());
+ process.exitCode = 1;
+ });
+}
+
+/*
+ Expose the function for workers
+ */
+module.exports = {
+ main,
+ convertPoToJed,
+ convertPoFileForLocale,
+};
diff --git a/scripts/frontend/startup_css/constants.js b/scripts/frontend/startup_css/constants.js
index 5143c04dc37..bf9774daea5 100644
--- a/scripts/frontend/startup_css/constants.js
+++ b/scripts/frontend/startup_css/constants.js
@@ -51,12 +51,14 @@ const createMainOutput = ({ outFile, cssKeys, type }) => ({
htmlPaths: [
path.join(FIXTURES_ROOT, `startup_css/project-${type}.html`),
path.join(FIXTURES_ROOT, `startup_css/project-${type}-signed-out.html`),
- path.join(FIXTURES_ROOT, `startup_css/project-${type}-search-ff-off.html`),
+ path.join(FIXTURES_ROOT, `startup_css/project-${type}-super-sidebar.html`),
],
cssKeys,
purgeOptions: {
safelist: {
standard: [
+ 'page-with-super-sidebar',
+ 'page-with-super-sidebar-collapsed',
'page-with-icon-sidebar',
'sidebar-collapsed-desktop',
// We want to include the root dropdown-menu style since it should be hidden by default
diff --git a/scripts/generate-e2e-pipeline b/scripts/generate-e2e-pipeline
index c612a700f90..3f30fb86ccc 100755
--- a/scripts/generate-e2e-pipeline
+++ b/scripts/generate-e2e-pipeline
@@ -5,44 +5,61 @@ set -e
# Script to generate e2e test child pipeline
# This is required because environment variables that are generated dynamically are not picked up by rules in child pipelines
+source "$(dirname "$0")/utils.sh"
source $ENV_FILE
-echo "Generating child pipeline yml definitions for review-app and package-and-test child pipelines"
+echoinfo "Generating child pipeline yml definitions for e2e test pipelines child pipelines"
+
+declare -A qa_pipelines
+
+# key/value pairs for qa pipeline yml definitions
+qa_pipelines["package-and-test-pipeline.yml"]="package-and-test/main.gitlab-ci.yml"
+qa_pipelines["package-and-test-nightly-pipeline.yml"]="package-and-test-nightly/main.gitlab-ci.yml"
+qa_pipelines["review-app-pipeline.yml"]="review-apps/main.gitlab-ci.yml"
+qa_pipelines["test-on-gdk-pipeline.yml"]="test-on-gdk/main.gitlab-ci.yml"
if [ "$QA_SKIP_ALL_TESTS" == "true" ]; then
skip_pipeline=".gitlab/ci/_skip.yml"
- echo "Using ${skip_pipeline} due to QA_SKIP_ALL_TESTS set to 'true'"
- cp $skip_pipeline "$OMNIBUS_PIPELINE_YML"
- cp $skip_pipeline "$REVIEW_PIPELINE_YML"
+ echoinfo "Using ${skip_pipeline} for all e2e test pipelines due to QA_SKIP_ALL_TESTS set to 'true'"
+ for key in "${!qa_pipelines[@]}"; do
+ cp $skip_pipeline "$key"
+ done
+
exit
fi
# set custom cache key to override default cache in pipeline-common because we use bundle to install gitlab-qa gem
qa_cache_key="qa-e2e-ruby-${RUBY_VERSION}-$(md5sum qa/Gemfile.lock | awk '{ print $1 }')"
+# these variables are used across all qa child pipelines
+# it allows to use all features across all child pipelines like skipping all tests, selective test execution etc
variables=$(cat <<YML
variables:
- COLORIZED_LOGS: "true"
GIT_DEPTH: "20"
GIT_STRATEGY: "clone" # 'GIT_STRATEGY: clone' optimizes the pack-objects cache hit ratio
GIT_SUBMODULE_STRATEGY: "none"
GITLAB_QA_CACHE_KEY: "$qa_cache_key"
- GITLAB_VERSION: "$(cat VERSION)"
+ GITLAB_SEMVER_VERSION: "$(cat VERSION)"
+ SKIP_OMNIBUS_TRIGGER: "false"
QA_EXPORT_TEST_METRICS: "${QA_EXPORT_TEST_METRICS:-true}"
QA_FEATURE_FLAGS: "${QA_FEATURE_FLAGS}"
QA_FRAMEWORK_CHANGES: "${QA_FRAMEWORK_CHANGES:-false}"
QA_RUN_ALL_TESTS: "${QA_RUN_ALL_TESTS:-false}"
+ QA_RUN_ALL_E2E_LABEL: "${QA_RUN_ALL_E2E_LABEL:-false}"
QA_SAVE_TEST_METRICS: "${QA_SAVE_TEST_METRICS:-false}"
QA_SUITES: "$QA_SUITES"
QA_TESTS: "$QA_TESTS"
YML
)
-echo "Using .gitlab/ci/review-apps/main.gitlab-ci.yml and .gitlab/ci/package-and-test/main.gitlab-ci.yml"
-cp .gitlab/ci/review-apps/main.gitlab-ci.yml "$REVIEW_PIPELINE_YML"
-echo "$variables" >>"$REVIEW_PIPELINE_YML"
-cp .gitlab/ci/package-and-test/main.gitlab-ci.yml "$OMNIBUS_PIPELINE_YML"
-echo "$variables" >>"$OMNIBUS_PIPELINE_YML"
+echo "***Saving generated qa pipeline files***"
+for key in "${!qa_pipelines[@]}"; do
+ echo "Generating $key"
+
+ cp ".gitlab/ci/${qa_pipelines[$key]}" "$key"
+
+ echo >>"$key" # add empty line so it's easier to read if debugging
+ echo "$variables" >>"$key"
+done
-echo "Successfully generated review-app and package-and-test pipeline with following variables section:"
-echo "$variables"
+echoinfo "Successfully generated qa pipeline files"
diff --git a/scripts/generate-failed-package-and-test-mr-message.rb b/scripts/generate-failed-package-and-test-mr-message.rb
new file mode 100755
index 00000000000..c57f132d563
--- /dev/null
+++ b/scripts/generate-failed-package-and-test-mr-message.rb
@@ -0,0 +1,81 @@
+#!/usr/bin/env ruby
+
+# frozen_string_literal: true
+
+require 'optparse'
+require 'json'
+
+require_relative 'api/create_merge_request_discussion'
+require_relative 'api/commit_merge_requests'
+require_relative 'api/get_package_and_test_job'
+
+class GenerateFailedPackageAndTestMrMessage
+ DEFAULT_OPTIONS = {
+ project: nil
+ }.freeze
+
+ def initialize(options)
+ @project = options.fetch(:project)
+ end
+
+ def execute
+ return unless failed_package_and_test_pipeline
+
+ add_discussion_to_mr
+ end
+
+ private
+
+ attr_reader :project
+
+ def add_discussion_to_mr
+ CreateMergeRequestDiscussion.new(
+ API::DEFAULT_OPTIONS.merge(merge_request: merge_request)
+ ).execute(content)
+ end
+
+ def failed_package_and_test_pipeline
+ @failed_package_and_test_pipeline ||= GetPackageAndTestJob.new(API::DEFAULT_OPTIONS).execute
+ end
+
+ def merge_request
+ @merge_request ||= CommitMergeRequests.new(
+ API::DEFAULT_OPTIONS.merge(sha: ENV['CI_MERGE_REQUEST_SOURCE_BRANCH_SHA'])
+ ).execute.first
+ end
+
+ def content
+ <<~MARKDOWN
+ :warning: @#{author_username} The `e2e:package-and-test-ee` job has failed.
+
+ - `e2e:package-and-test-ee` pipeline: #{failed_package_and_test_pipeline['web_url']}
+
+ `e2e:package-and-test-ee` pipeline is allowed to fail due its [flakiness](#{package_and_test_link}). Failures should be
+ investigated to guarantee this backport complies with the Quality standards.
+
+ Ping your team's associated Software Engineer in Test (SET) to confirm the failures are unrelated to the merge request.
+ If there's no SET assigned, ask for assistance on the `#quality` Slack channel.
+ MARKDOWN
+ end
+
+ def author_username
+ merge_request['author']['username'] if merge_request
+ end
+
+ def package_and_test_link
+ "https://about.gitlab.com/handbook/engineering/quality/quality-engineering/test-metrics-dashboards/#package-and-test"
+ end
+end
+
+if $PROGRAM_NAME == __FILE__
+ options = GenerateFailedPackageAndTestMrMessage::DEFAULT_OPTIONS.dup
+
+ OptionParser.new do |opts|
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+ end.parse!
+
+ GenerateFailedPackageAndTestMrMessage.new(options).execute
+end
diff --git a/scripts/generate-failed-pipeline-slack-message.rb b/scripts/generate-failed-pipeline-slack-message.rb
deleted file mode 100755
index eefdebd5db5..00000000000
--- a/scripts/generate-failed-pipeline-slack-message.rb
+++ /dev/null
@@ -1,181 +0,0 @@
-#!/usr/bin/env ruby
-
-# frozen_string_literal: true
-
-require 'optparse'
-require 'json'
-
-require_relative 'api/pipeline_failed_jobs'
-
-class GenerateFailedPipelineSlackMessage
- DEFAULT_OPTIONS = {
- failed_pipeline_slack_message_file: 'failed_pipeline_slack_message.json',
- incident_json_file: 'incident.json'
- }.freeze
-
- def initialize(options)
- @incident_json_file = options.delete(:incident_json_file)
- end
-
- def execute
- {
- channel: ENV['SLACK_CHANNEL'],
- username: "Failed pipeline reporter",
- icon_emoji: ":boom:",
- text: "*#{title}*",
- blocks: [
- {
- type: "section",
- text: {
- type: "mrkdwn",
- text: "*#{title}*"
- },
- accessory: {
- type: "button",
- text: {
- type: "plain_text",
- text: incident_button_text
- },
- url: incident_button_link
- }
- },
- {
- type: "section",
- text: {
- type: "mrkdwn",
- text: "*Branch*: #{branch_link}"
- }
- },
- {
- type: "section",
- text: {
- type: "mrkdwn",
- text: "*Commit*: #{commit_link}"
- }
- },
- {
- type: "section",
- text: {
- type: "mrkdwn",
- text: "*Triggered by* #{triggered_by_link} • *Source:* #{source} • *Duration:* #{pipeline_duration} minutes"
- }
- },
- {
- type: "section",
- text: {
- type: "mrkdwn",
- text: "*Failed jobs (#{failed_jobs.size}):* #{failed_jobs_list}"
- }
- }
- ]
- }
- end
-
- private
-
- attr_reader :incident_json_file
-
- def failed_jobs
- @failed_jobs ||= PipelineFailedJobs.new(API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)).execute
- end
-
- def title
- "#{project_link} pipeline #{pipeline_link} failed"
- end
-
- def incident_exist?
- return @incident_exist if defined?(@incident_exist)
-
- @incident_exist = File.exist?(incident_json_file)
- end
-
- def incident
- return unless incident_exist?
-
- @incident ||= JSON.parse(File.read(incident_json_file))
- end
-
- def incident_button_text
- if incident_exist?
- "View incident ##{incident['iid']}"
- else
- 'Create incident'
- end
- end
-
- def incident_button_link
- if incident_exist?
- incident['web_url']
- else
- "#{ENV['CI_SERVER_URL']}/#{ENV['BROKEN_BRANCH_INCIDENTS_PROJECT']}/-/issues/new?" \
- "issuable_template=incident&issue%5Bissue_type%5D=incident"
- end
- end
-
- def pipeline_link
- "<#{ENV['CI_PIPELINE_URL']}|##{ENV['CI_PIPELINE_ID']}>"
- end
-
- def branch_link
- "<#{ENV['CI_PROJECT_URL']}/-/commits/#{ENV['CI_COMMIT_REF_NAME']}|`#{ENV['CI_COMMIT_REF_NAME']}`>"
- end
-
- def pipeline_duration
- ((Time.now - Time.parse(ENV['CI_PIPELINE_CREATED_AT'])) / 60.to_f).round(2)
- end
-
- def commit_link
- "<#{ENV['CI_PROJECT_URL']}/-/commit/#{ENV['CI_COMMIT_SHA']}|#{ENV['CI_COMMIT_TITLE']}>"
- end
-
- def source
- "`#{ENV['CI_PIPELINE_SOURCE']}#{schedule_type}`"
- end
-
- def schedule_type
- ENV['CI_PIPELINE_SOURCE'] == 'schedule' ? ": #{ENV['SCHEDULE_TYPE']}" : ''
- end
-
- def project_link
- "<#{ENV['CI_PROJECT_URL']}|#{ENV['CI_PROJECT_PATH']}>"
- end
-
- def triggered_by_link
- "<#{ENV['CI_SERVER_URL']}/#{ENV['GITLAB_USER_LOGIN']}|#{ENV['GITLAB_USER_NAME']}>"
- end
-
- def failed_jobs_list
- failed_jobs.map { |job| "<#{job.web_url}|#{job.name}>" }.join(', ')
- end
-end
-
-if $PROGRAM_NAME == __FILE__
- options = GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS.dup
-
- OptionParser.new do |opts|
- opts.on("-i", "--incident-json-file file_path", String, "Path to a file where the incident JSON data "\
- "can be found (defaults to "\
- "`#{GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS[:incident_json_file]}`)") do |value|
- options[:incident_json_file] = value
- end
-
- opts.on("-f", "--failed-pipeline-slack-message-file file_path", String, "Path to a file where to save the Slack "\
- "message (defaults to "\
- "`#{GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS[:failed_pipeline_slack_message_file]}`)") do |value|
- options[:failed_pipeline_slack_message_file] = value
- end
-
- opts.on("-h", "--help", "Prints this help") do
- puts opts
- exit
- end
- end.parse!
-
- failed_pipeline_slack_message_file = options.delete(:failed_pipeline_slack_message_file)
-
- GenerateFailedPipelineSlackMessage.new(options).execute.tap do |message_payload|
- if failed_pipeline_slack_message_file
- File.write(failed_pipeline_slack_message_file, JSON.pretty_generate(message_payload))
- end
- end
-end
diff --git a/scripts/generate-rspec-foss-impact-pipeline b/scripts/generate-rspec-foss-impact-pipeline
deleted file mode 100755
index 3277f38ebe1..00000000000
--- a/scripts/generate-rspec-foss-impact-pipeline
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env bash
-set -euo pipefail
-
-# Script to generate `rspec foss-impact` test child pipeline with dynamically parallelized jobs.
-
-source scripts/utils.sh
-
-rspec_matching_tests_foss_path="${1}"
-pipeline_yml="${2}"
-
-test_file_count=$(wc -w "${rspec_matching_tests_foss_path}" | awk '{ print $1 }')
-echoinfo "test_file_count: ${test_file_count}"
-
-if [[ "${test_file_count}" -eq 0 ]]; then
- skip_pipeline=".gitlab/ci/_skip.yml"
-
- echo "Using ${skip_pipeline} due to no impacted FOSS rspec tests to run"
- cp $skip_pipeline "$pipeline_yml"
- exit
-fi
-
-# As of 2022-09-01:
-# $ find spec -type f | wc -l
-# 12825
-# and
-# $ find ee/spec -type f | wc -l
-# 5610
-# which gives a total of 18435 test files (`number_of_tests_in_total_in_the_test_suite`).
-#
-# Total time to run all tests (based on https://gitlab-org.gitlab.io/rspec_profiling_stats/) is 170183 seconds (`duration_of_the_test_suite_in_seconds`).
-#
-# This gives an approximate 170183 / 18435 = 9.2 seconds per test file (`average_test_file_duration_in_seconds`).
-#
-# If we want each test job to finish in 10 minutes, given we have 3 minutes of setup (`setup_duration_in_seconds`), then we need to give 7 minutes of testing to each test node (`optimal_test_runtime_duration_in_seconds`).
-# (7 * 60) / 9.2 = 45.6
-#
-# So if we'd want to run the full test suites in 10 minutes (`optimal_test_job_duration_in_seconds`), we'd need to run at max 45 test file per nodes (`optimal_test_file_count_per_node`).
-number_of_tests_in_total_in_the_test_suite=18435
-duration_of_the_test_suite_in_seconds=170183
-optimal_test_job_duration_in_seconds=600 # 10 minutes
-setup_duration_in_seconds=180 # 3 minutes
-
-optimal_test_runtime_duration_in_seconds=$(( optimal_test_job_duration_in_seconds - setup_duration_in_seconds ))
-echoinfo "optimal_test_runtime_duration_in_seconds: ${optimal_test_runtime_duration_in_seconds}"
-
-average_test_file_duration_in_seconds=$(( duration_of_the_test_suite_in_seconds / number_of_tests_in_total_in_the_test_suite ))
-echoinfo "average_test_file_duration_in_seconds: ${average_test_file_duration_in_seconds}"
-
-optimal_test_file_count_per_node=$(( optimal_test_runtime_duration_in_seconds / average_test_file_duration_in_seconds ))
-echoinfo "optimal_test_file_count_per_node: ${optimal_test_file_count_per_node}"
-
-node_count=$(( test_file_count / optimal_test_file_count_per_node ))
-echoinfo "node_count: ${node_count}"
-
-echoinfo "Optimal node count for 'rspec foss-impact' jobs is ${node_count}."
-
-MAX_NODES_COUNT=50 # Maximum parallelization allowed by GitLab
-if [[ "${node_count}" -gt "${MAX_NODES_COUNT}" ]]; then
- echoinfo "We don't want to parallelize 'rspec foss-impact' to more than ${MAX_NODES_COUNT} jobs for now! Decreasing the parallelization to ${MAX_NODES_COUNT}."
- node_count=${MAX_NODES_COUNT}
-fi
-
-ruby -rerb -e "puts ERB.new(File.read('.gitlab/ci/rails/rspec-foss-impact.gitlab-ci.yml.erb')).result_with_hash(parallel_value: ${node_count})" > "${pipeline_yml}"
-
-echosuccess "Generated ${pipeline_yml} pipeline with following content:"
-cat "${pipeline_yml}"
diff --git a/scripts/generate_rspec_pipeline.rb b/scripts/generate_rspec_pipeline.rb
new file mode 100755
index 00000000000..292b3d85b20
--- /dev/null
+++ b/scripts/generate_rspec_pipeline.rb
@@ -0,0 +1,205 @@
+#!/usr/bin/env ruby
+
+# frozen_string_literal: true
+
+require 'optparse'
+require 'json'
+require 'fileutils'
+require 'erb'
+require_relative '../tooling/quality/test_level'
+
+# Class to generate RSpec test child pipeline with dynamically parallelized jobs.
+class GenerateRspecPipeline
+ SKIP_PIPELINE_YML_FILE = ".gitlab/ci/_skip.yml"
+ TEST_LEVELS = %i[migration background_migration unit integration system].freeze
+ MAX_NODES_COUNT = 50 # Maximum parallelization allowed by GitLab
+
+ OPTIMAL_TEST_JOB_DURATION_IN_SECONDS = 600 # 10 MINUTES
+ SETUP_DURATION_IN_SECONDS = 180.0 # 3 MINUTES
+ OPTIMAL_TEST_RUNTIME_DURATION_IN_SECONDS = OPTIMAL_TEST_JOB_DURATION_IN_SECONDS - SETUP_DURATION_IN_SECONDS
+
+ # As of 2022-09-01:
+ # $ find spec -type f | wc -l
+ # 12825
+ # and
+ # $ find ee/spec -type f | wc -l
+ # 5610
+ # which gives a total of 18435 test files (`NUMBER_OF_TESTS_IN_TOTAL_IN_THE_TEST_SUITE`).
+ #
+ # Total time to run all tests (based on https://gitlab-org.gitlab.io/rspec_profiling_stats/)
+ # is 170183 seconds (`DURATION_OF_THE_TEST_SUITE_IN_SECONDS`).
+ #
+ # This gives an approximate 170183 / 18435 = 9.2 seconds per test file
+ # (`DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS`).
+ #
+ # If we want each test job to finish in 10 minutes, given we have 3 minutes of setup (`SETUP_DURATION_IN_SECONDS`),
+ # then we need to give 7 minutes of testing to each test node (`OPTIMAL_TEST_RUNTIME_DURATION_IN_SECONDS`).
+ # (7 * 60) / 9.2 = 45.6
+ #
+ # So if we'd want to run the full test suites in 10 minutes (`OPTIMAL_TEST_JOB_DURATION_IN_SECONDS`),
+ # we'd need to run at max 45 test file per nodes (`#optimal_test_file_count_per_node_per_test_level`).
+ NUMBER_OF_TESTS_IN_TOTAL_IN_THE_TEST_SUITE = 18_435
+ DURATION_OF_THE_TEST_SUITE_IN_SECONDS = 170_183
+ DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS =
+ DURATION_OF_THE_TEST_SUITE_IN_SECONDS / NUMBER_OF_TESTS_IN_TOTAL_IN_THE_TEST_SUITE
+
+ # pipeline_template_path: A YAML pipeline configuration template to generate the final pipeline config from
+ # rspec_files_path: A file containing RSpec files to run, separated by a space
+ # knapsack_report_path: A file containing a Knapsack report
+ # test_suite_prefix: An optional test suite folder prefix (e.g. `ee/` or `jh/`)
+ # generated_pipeline_path: An optional filename where to write the pipeline config (defaults to
+ # `"#{pipeline_template_path}.yml"`)
+ def initialize(
+ pipeline_template_path:, rspec_files_path: nil, knapsack_report_path: nil, test_suite_prefix: nil,
+ generated_pipeline_path: nil)
+ @pipeline_template_path = pipeline_template_path.to_s
+ @rspec_files_path = rspec_files_path.to_s
+ @knapsack_report_path = knapsack_report_path.to_s
+ @test_suite_prefix = test_suite_prefix
+ @generated_pipeline_path = generated_pipeline_path || "#{pipeline_template_path}.yml"
+
+ raise ArgumentError unless File.exist?(@pipeline_template_path)
+ end
+
+ def generate!
+ if all_rspec_files.empty?
+ info "Using #{SKIP_PIPELINE_YML_FILE} due to no RSpec files to run"
+ FileUtils.cp(SKIP_PIPELINE_YML_FILE, generated_pipeline_path)
+ return
+ end
+
+ info "pipeline_template_path: #{pipeline_template_path}"
+ info "generated_pipeline_path: #{generated_pipeline_path}"
+
+ File.open(generated_pipeline_path, 'w') do |handle|
+ pipeline_yaml = ERB.new(File.read(pipeline_template_path)).result_with_hash(**erb_binding)
+ handle.write(pipeline_yaml.squeeze("\n").strip)
+ end
+ end
+
+ private
+
+ attr_reader :pipeline_template_path, :rspec_files_path, :knapsack_report_path, :test_suite_prefix,
+ :generated_pipeline_path
+
+ def info(text)
+ $stdout.puts "[#{self.class.name}] #{text}"
+ end
+
+ def all_rspec_files
+ @all_rspec_files ||= File.exist?(rspec_files_path) ? File.read(rspec_files_path).split(' ') : []
+ end
+
+ def erb_binding
+ {
+ rspec_files_per_test_level: rspec_files_per_test_level,
+ test_suite_prefix: test_suite_prefix
+ }
+ end
+
+ def rspec_files_per_test_level
+ @rspec_files_per_test_level ||= begin
+ all_remaining_rspec_files = all_rspec_files.dup
+ TEST_LEVELS.each_with_object(Hash.new { |h, k| h[k] = {} }) do |test_level, memo| # rubocop:disable Rails/IndexWith
+ memo[test_level][:files] = all_remaining_rspec_files
+ .grep(test_level_service.regexp(test_level, true))
+ .tap { |files| files.each { |file| all_remaining_rspec_files.delete(file) } }
+ memo[test_level][:parallelization] = optimal_nodes_count(test_level, memo[test_level][:files])
+ end
+ end
+ end
+
+ def optimal_nodes_count(test_level, rspec_files)
+ nodes_count = (rspec_files.size / optimal_test_file_count_per_node_per_test_level(test_level)).ceil
+ info "Optimal node count for #{rspec_files.size} #{test_level} RSpec files is #{nodes_count}."
+
+ if nodes_count > MAX_NODES_COUNT
+ info "We don't want to parallelize to more than #{MAX_NODES_COUNT} jobs for now! " \
+ "Decreasing the parallelization to #{MAX_NODES_COUNT}."
+
+ MAX_NODES_COUNT
+ else
+ nodes_count
+ end
+ end
+
+ def optimal_test_file_count_per_node_per_test_level(test_level)
+ [
+ (OPTIMAL_TEST_RUNTIME_DURATION_IN_SECONDS / average_test_file_duration_in_seconds_per_test_level[test_level]),
+ 1
+ ].max
+ end
+
+ def average_test_file_duration_in_seconds_per_test_level
+ @optimal_test_file_count_per_node_per_test_level ||=
+ if knapsack_report.any?
+ remaining_knapsack_report = knapsack_report.dup
+ TEST_LEVELS.each_with_object({}) do |test_level, memo|
+ matching_data_per_test_level = remaining_knapsack_report
+ .select { |test_file, _| test_file.match?(test_level_service.regexp(test_level, true)) }
+ .tap { |test_data| test_data.each { |file, _| remaining_knapsack_report.delete(file) } }
+
+ memo[test_level] =
+ if matching_data_per_test_level.empty?
+ DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS
+ else
+ matching_data_per_test_level.values.sum / matching_data_per_test_level.keys.size
+ end
+ end
+ else
+ TEST_LEVELS.each_with_object({}) do |test_level, memo| # rubocop:disable Rails/IndexWith
+ memo[test_level] = DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS
+ end
+ end
+ end
+
+ def knapsack_report
+ @knapsack_report ||=
+ begin
+ File.exist?(knapsack_report_path) ? JSON.parse(File.read(knapsack_report_path)) : {}
+ rescue JSON::ParserError => e
+ info "[ERROR] Knapsack report at #{knapsack_report_path} couldn't be parsed! Error:\n#{e}"
+ {}
+ end
+ end
+
+ def test_level_service
+ @test_level_service ||= Quality::TestLevel.new(test_suite_prefix)
+ end
+end
+
+if $PROGRAM_NAME == __FILE__
+ options = {}
+
+ OptionParser.new do |opts|
+ opts.on("-f", "--rspec-files-path path", String, "Path to a file containing RSpec files to run, " \
+ "separated by a space") do |value|
+ options[:rspec_files_path] = value
+ end
+
+ opts.on("-t", "--pipeline-template-path PATH", String, "Path to a YAML pipeline configuration template to " \
+ "generate the final pipeline config from") do |value|
+ options[:pipeline_template_path] = value
+ end
+
+ opts.on("-k", "--knapsack-report-path path", String, "Path to a Knapsack report") do |value|
+ options[:knapsack_report_path] = value
+ end
+
+ opts.on("-p", "--test-suite-prefix test_suite_prefix", String, "Test suite folder prefix") do |value|
+ options[:test_suite_prefix] = value
+ end
+
+ opts.on("-o", "--generated-pipeline-path generated_pipeline_path", String, "Path where to write the pipeline " \
+ "config") do |value|
+ options[:generated_pipeline_path] = value
+ end
+
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+ end.parse!
+
+ GenerateRspecPipeline.new(**options).generate!
+end
diff --git a/scripts/gitaly-test-build b/scripts/gitaly-test-build
index adc9b56ca4f..6901593009a 100755
--- a/scripts/gitaly-test-build
+++ b/scripts/gitaly-test-build
@@ -6,8 +6,8 @@ require 'fileutils'
require_relative '../spec/support/helpers/gitaly_setup'
# This script assumes tmp/tests/gitaly already contains the correct
-# Gitaly version. We just have to compile it and run its 'bundle
-# install'. We have this separate script for that to avoid bundle
+# Gitaly version. We just have to compile it.
+# We have this separate script for that to avoid bundle
# poisoning in CI. This script should only be run in CI.
class GitalyTestBuild
include GitalySetup
@@ -16,14 +16,11 @@ class GitalyTestBuild
# If we have the binaries from the cache, we can skip building them again
if File.exist?(tmp_tests_gitaly_bin_dir)
GitalySetup::LOGGER.debug "Gitaly binary already built. Skip building...\n"
- # We still need to install the gems in that case
- install_gitaly_gems
else
abort 'gitaly build failed' unless build_gitaly
end
ensure_gitlab_shell_secret!
- check_gitaly_config!
# Starting gitaly further validates its configuration
gitaly_pid = start_gitaly
diff --git a/scripts/gitaly-test-spawn b/scripts/gitaly-test-spawn
index b9c78b88555..475c7715bdd 100755
--- a/scripts/gitaly-test-spawn
+++ b/scripts/gitaly-test-spawn
@@ -9,8 +9,6 @@ class GitalyTestSpawn
include GitalySetup
def run
- install_gitaly_gems
-
# Run Praefect migrations
setup_praefect
diff --git a/scripts/gitlab_component_helpers.sh b/scripts/gitlab_component_helpers.sh
index c46dbb57a58..d7e5519f057 100644
--- a/scripts/gitlab_component_helpers.sh
+++ b/scripts/gitlab_component_helpers.sh
@@ -2,6 +2,9 @@
set -euo pipefail
+# Generic helper functions for archives/packages
+source scripts/packages/helpers.sh
+
export CURL_TOKEN_HEADER="${CURL_TOKEN_HEADER:-"JOB-TOKEN"}"
export GITLAB_COM_CANONICAL_PROJECT_ID="278964" # https://gitlab.com/gitlab-org/gitlab
@@ -51,56 +54,9 @@ export GITLAB_ASSETS_HASH="${GITLAB_ASSETS_HASH:-"NO_HASH"}"
export GITLAB_ASSETS_PACKAGE="assets-${NODE_ENV}-${GITLAB_EDITION}-${GITLAB_ASSETS_HASH}-${GITLAB_ASSETS_PACKAGE_VERSION}.tar.gz"
export GITLAB_ASSETS_PACKAGE_URL="${API_PACKAGES_BASE_URL}/assets/${NODE_ENV}-${GITLAB_EDITION}-${GITLAB_ASSETS_HASH}/${GITLAB_ASSETS_PACKAGE}"
-# Generic helper functions
-function archive_doesnt_exist() {
- local package_url="${1}"
-
- status=$(curl -I --silent --retry 3 --output /dev/null -w "%{http_code}" "${package_url}")
-
- [[ "${status}" != "200" ]]
-}
-
-function create_package() {
- local archive_filename="${1}"
- local paths_to_archive="${2}"
- local tar_working_folder="${3:-.}"
-
- echoinfo "Running 'tar -czvf ${archive_filename} -C ${tar_working_folder} ${paths_to_archive}'"
- tar -czf ${archive_filename} -C ${tar_working_folder} ${paths_to_archive}
- du -h ${archive_filename}
-}
-
-function upload_package() {
- local archive_filename="${1}"
- local package_url="${2}"
- local token_header="${CURL_TOKEN_HEADER}"
- local token="${CI_JOB_TOKEN}"
-
- if [[ "${UPLOAD_PACKAGE_FLAG}" = "false" ]]; then
- echoerr "The archive ${archive_filename} isn't supposed to be uploaded for this instance (${CI_SERVER_HOST}) & project (${CI_PROJECT_PATH})!"
- exit 1
- fi
-
- echoinfo "Uploading ${archive_filename} to ${package_url} ..."
- curl --fail --silent --retry 3 --header "${token_header}: ${token}" --upload-file "${archive_filename}" "${package_url}"
-}
-
-function read_curl_package() {
- local package_url="${1}"
-
- echoinfo "Downloading from ${package_url} ..."
-
- curl --fail --silent --retry 3 "${package_url}"
-}
-
-function extract_package() {
- local tar_working_folder="${1:-.}"
- mkdir -p "${tar_working_folder}"
-
- echoinfo "Extracting archive to ${tar_working_folder}"
-
- tar -xz -C ${tar_working_folder} < /dev/stdin
-}
+# Fixtures constants
+export FIXTURES_PATH="tmp/tests/frontend/**/*"
+export REUSE_FRONTEND_FIXTURES_ENABLED="${REUSE_FRONTEND_FIXTURES_ENABLED:-"true"}"
# Workhorse functions
function gitlab_workhorse_archive_doesnt_exist() {
@@ -147,3 +103,98 @@ function create_gitlab_assets_package() {
function upload_gitlab_assets_package() {
upload_package "${GITLAB_ASSETS_PACKAGE}" "${GITLAB_ASSETS_PACKAGE_URL}"
}
+
+# Fixtures functions
+function check_fixtures_download() {
+ if [[ "${REUSE_FRONTEND_FIXTURES_ENABLED}" != "true" ]]; then
+ echoinfo "INFO: Reusing frontend fixtures is disabled due to REUSE_FRONTEND_FIXTURES_ENABLED=${REUSE_FRONTEND_FIXTURES_ENABLED}."
+ return 1
+ fi
+
+ if [[ "${CI_PROJECT_NAME}" != "gitlab" ]] || [[ "${CI_JOB_NAME}" =~ "foss" ]]; then
+ echoinfo "INFO: Reusing frontend fixtures is only supported in EE."
+ return 1
+ fi
+
+ if [[ -z "${CI_MERGE_REQUEST_IID:-}" ]]; then
+ return 1
+ else
+ if tooling/bin/find_only_allowed_files_changes && ! fixtures_archive_doesnt_exist; then
+ return 0
+ else
+ return 1
+ fi
+ fi
+}
+
+function check_fixtures_reuse() {
+ if [[ "${REUSE_FRONTEND_FIXTURES_ENABLED}" != "true" ]]; then
+ echoinfo "INFO: Reusing frontend fixtures is disabled due to REUSE_FRONTEND_FIXTURES_ENABLED=${REUSE_FRONTEND_FIXTURES_ENABLED}."
+ rm -rf "tmp/tests/frontend";
+ return 1
+ fi
+
+ if [[ "${CI_PROJECT_NAME}" != "gitlab" ]] || [[ "${CI_JOB_NAME}" =~ "foss" ]]; then
+ echoinfo "INFO: Reusing frontend fixtures is only supported in EE."
+ rm -rf "tmp/tests/frontend";
+ return 1
+ fi
+
+ if [[ -d "tmp/tests/frontend" ]]; then
+ # Remove tmp/tests/frontend/ except on the first parallelized job so that depending
+ # jobs don't download the exact same artifact multiple times.
+ if [[ -n "${CI_NODE_INDEX:-}" ]] && [[ "${CI_NODE_INDEX:-}" -ne 1 ]]; then
+ echoinfo "INFO: Removing 'tmp/tests/frontend' as we're on node ${CI_NODE_INDEX:-}. Dependent jobs will use the artifacts from the first parallelized job.";
+ rm -rf "tmp/tests/frontend";
+ fi
+ return 0
+ else
+ echoinfo "INFO: 'tmp/tests/frontend' does not exist.";
+ return 1
+ fi
+}
+
+function create_fixtures_package() {
+ create_package "${FIXTURES_PACKAGE}" "${FIXTURES_PATH}"
+}
+
+function download_and_extract_fixtures() {
+ read_curl_package "${FIXTURES_PACKAGE_URL}" | extract_package
+}
+
+function export_fixtures_package_variables() {
+ export FIXTURES_PACKAGE="fixtures-${FIXTURES_SHA}.tar.gz"
+ export FIXTURES_PACKAGE_URL="${API_PACKAGES_BASE_URL}/fixtures/${FIXTURES_SHA}/${FIXTURES_PACKAGE}"
+}
+
+function export_fixtures_sha_for_download() {
+ export FIXTURES_SHA="${CI_MERGE_REQUEST_TARGET_BRANCH_SHA:-${CI_MERGE_REQUEST_DIFF_BASE_SHA:-$CI_COMMIT_SHA}}"
+ export_fixtures_package_variables
+}
+
+function export_fixtures_sha_for_upload() {
+ export FIXTURES_SHA="${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA:-$CI_COMMIT_SHA}"
+ export_fixtures_package_variables
+}
+
+function fixtures_archive_doesnt_exist() {
+ echoinfo "Checking if the package is available at ${FIXTURES_PACKAGE_URL} ..."
+
+ archive_doesnt_exist "${FIXTURES_PACKAGE_URL}"
+}
+
+function fixtures_directory_exists() {
+ local fixtures_directory="tmp/tests/frontend/"
+
+ if [[ -d "${fixtures_directory}" ]]; then
+ echo "${fixtures_directory} directory exists"
+ return 0
+ else
+ echo "${fixtures_directory} directory does not exist"
+ return 1
+ fi
+}
+
+function upload_fixtures_package() {
+ upload_package "${FIXTURES_PACKAGE}" "${FIXTURES_PACKAGE_URL}"
+}
diff --git a/scripts/lint-docs-blueprints.rb b/scripts/lint-docs-blueprints.rb
index 35e0013cb34..d0a0a6a05de 100755
--- a/scripts/lint-docs-blueprints.rb
+++ b/scripts/lint-docs-blueprints.rb
@@ -22,7 +22,7 @@ def extract_front_matter(path)
end
class BlueprintFrontMatter
- STATUSES = %w[proposed accepted ongoing implemented rejected]
+ STATUSES = %w[proposed accepted ongoing implemented postponed rejected]
attr_reader :errors
@@ -32,6 +32,8 @@ class BlueprintFrontMatter
end
def validate
+ return if @metadata['redirect_to']
+
validate_status
validate_authors
validate_creation_date
diff --git a/scripts/lint-rugged b/scripts/lint-rugged
index ae5eddbe3b4..73708b52772 100755
--- a/scripts/lint-rugged
+++ b/scripts/lint-rugged
@@ -5,9 +5,6 @@ ALLOWED = [
# https://gitlab.com/gitlab-org/gitaly/issues/760
'lib/elasticsearch/git/repository.rb',
- # Needed to handle repositories that are not in any storage
- 'lib/gitlab/bare_repository_import/repository.rb',
-
# Needed to avoid using the git binary to validate a branch name
'lib/gitlab/git_ref_validator.rb',
diff --git a/scripts/packages/automated_cleanup.rb b/scripts/packages/automated_cleanup.rb
index 2b5a0011079..8d9ba1e4a34 100755
--- a/scripts/packages/automated_cleanup.rb
+++ b/scripts/packages/automated_cleanup.rb
@@ -123,4 +123,8 @@ if $PROGRAM_NAME == __FILE__
timed('"assets" packages cleanup') do
automated_cleanup.perform_gitlab_package_cleanup!(package_name: 'assets', days_for_delete: 7)
end
+
+ timed('"fixtures" packages cleanup') do
+ automated_cleanup.perform_gitlab_package_cleanup!(package_name: 'fixtures', days_for_delete: 14)
+ end
end
diff --git a/scripts/packages/helpers.sh b/scripts/packages/helpers.sh
new file mode 100644
index 00000000000..2917338aeb8
--- /dev/null
+++ b/scripts/packages/helpers.sh
@@ -0,0 +1,59 @@
+#!/usr/bin/env bash
+
+source scripts/utils.sh
+
+function archive_doesnt_exist() {
+ local package_url="${1}"
+
+ status=$(curl -I --silent --retry 3 --output /dev/null -w "%{http_code}" "${package_url}")
+
+ if [[ "${status}" = "200" ]]; then
+ echoinfo "The archive was found. The server returned status ${status}."
+ return 1
+ else
+ echoinfo "The archive was not found. The server returned status ${status}."
+ return 0
+ fi
+}
+
+function create_package() {
+ local archive_filename="${1}"
+ local paths_to_archive="${2}"
+ local tar_working_folder="${3:-.}"
+
+ echoinfo "Running 'tar -czvf ${archive_filename} -C ${tar_working_folder} ${paths_to_archive}'"
+ tar -czf ${archive_filename} -C ${tar_working_folder} ${paths_to_archive}
+ du -h ${archive_filename}
+}
+
+function upload_package() {
+ local archive_filename="${1}"
+ local package_url="${2}"
+ local token_header="${CURL_TOKEN_HEADER}"
+ local token="${CI_JOB_TOKEN}"
+
+ if [[ "${UPLOAD_PACKAGE_FLAG}" = "false" ]]; then
+ echoerr "The archive ${archive_filename} isn't supposed to be uploaded for this instance (${CI_SERVER_HOST}) & project (${CI_PROJECT_PATH})!"
+ exit 1
+ fi
+
+ echoinfo "Uploading ${archive_filename} to ${package_url} ..."
+ curl --fail --silent --retry 3 --header "${token_header}: ${token}" --upload-file "${archive_filename}" "${package_url}"
+}
+
+function read_curl_package() {
+ local package_url="${1}"
+
+ echoinfo "Downloading from ${package_url} ..."
+
+ curl --fail --silent --retry 3 "${package_url}"
+}
+
+function extract_package() {
+ local tar_working_folder="${1:-.}"
+ mkdir -p "${tar_working_folder}"
+
+ echoinfo "Extracting archive to ${tar_working_folder}"
+
+ tar -xz -C ${tar_working_folder} < /dev/stdin
+}
diff --git a/scripts/pipeline/create_test_failure_issues.rb b/scripts/pipeline/create_test_failure_issues.rb
new file mode 100755
index 00000000000..e4bcabb6223
--- /dev/null
+++ b/scripts/pipeline/create_test_failure_issues.rb
@@ -0,0 +1,263 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'optparse'
+require 'json'
+require 'httparty'
+
+require_relative '../api/create_issue'
+require_relative '../api/find_issues'
+require_relative '../api/update_issue'
+
+class CreateTestFailureIssues
+ DEFAULT_OPTIONS = {
+ project: nil,
+ tests_report_file: 'tests_report.json',
+ issue_json_folder: 'tmp/issues/'
+ }.freeze
+
+ def initialize(options)
+ @options = options
+ end
+
+ def execute
+ puts "[CreateTestFailureIssues] No failed tests!" if failed_tests.empty?
+
+ failed_tests.each_with_object([]) do |failed_test, existing_issues|
+ CreateTestFailureIssue.new(options.dup).upsert(failed_test, existing_issues).tap do |issue|
+ existing_issues << issue
+ File.write(File.join(options[:issue_json_folder], "issue-#{issue.iid}.json"), JSON.pretty_generate(issue.to_h))
+ end
+ end
+ end
+
+ private
+
+ attr_reader :options
+
+ def failed_tests
+ @failed_tests ||=
+ if File.exist?(options[:tests_report_file])
+ JSON.parse(File.read(options[:tests_report_file]))
+ else
+ puts "[CreateTestFailureIssues] #{options[:tests_report_file]} doesn't exist!"
+ []
+ end
+ end
+end
+
+class CreateTestFailureIssue
+ MAX_TITLE_LENGTH = 255
+ WWW_GITLAB_COM_SITE = 'https://about.gitlab.com'
+ WWW_GITLAB_COM_GROUPS_JSON = "#{WWW_GITLAB_COM_SITE}/groups.json".freeze
+ WWW_GITLAB_COM_CATEGORIES_JSON = "#{WWW_GITLAB_COM_SITE}/categories.json".freeze
+ FEATURE_CATEGORY_METADATA_REGEX = /(?<=feature_category: :)\w+/
+ DEFAULT_LABELS = ['type::maintenance', 'test'].freeze
+
+ def self.server_host
+ @server_host ||= ENV.fetch('CI_SERVER_HOST', 'gitlab.com')
+ end
+
+ def self.project_path
+ @project_path ||= ENV.fetch('CI_PROJECT_PATH', 'gitlab-org/gitlab')
+ end
+
+ def self.file_base_url
+ @file_base_url ||= "https://#{server_host}/#{project_path}/-/blob/master/"
+ end
+
+ def self.report_item_regex
+ @report_item_regex ||= %r{^1\. \d{4}-\d{2}-\d{2}: https://#{server_host}/#{project_path}/-/jobs/.+$}
+ end
+
+ def initialize(options)
+ @project = options.delete(:project)
+ @api_token = options.delete(:api_token)
+ end
+
+ def upsert(failed_test, existing_issues = [])
+ existing_issue = find(failed_test, existing_issues)
+
+ if existing_issue
+ update_reports(existing_issue, failed_test)
+ existing_issue
+ else
+ create(failed_test)
+ end
+ end
+
+ private
+
+ attr_reader :project, :api_token
+
+ def find(failed_test, existing_issues = [])
+ test_hash = failed_test_hash(failed_test)
+ issue_from_existing_issues = existing_issues.find { |issue| issue.title.include?(test_hash) }
+ issue_from_issue_tracker = FindIssues
+ .new(project: project, api_token: api_token)
+ .execute(state: :opened, search: test_hash, in: :title, per_page: 1)
+ .first
+
+ existing_issue = issue_from_existing_issues || issue_from_issue_tracker
+
+ return unless existing_issue
+
+ puts "[CreateTestFailureIssue] Found issue '#{existing_issue.title}': #{existing_issue.web_url}!"
+
+ existing_issue
+ end
+
+ def update_reports(existing_issue, failed_test)
+ # We count the number of existing reports.
+ reports_count = existing_issue.description
+ .scan(self.class.report_item_regex)
+ .size.to_i + 1
+
+ # We include the number of reports in the header, for visibility.
+ issue_description = existing_issue.description.sub(/^### Reports.*$/, "### Reports (#{reports_count})")
+
+ # We add the current failure to the list of reports.
+ issue_description = "#{issue_description}\n#{report_list_item(failed_test)}"
+
+ UpdateIssue
+ .new(project: project, api_token: api_token)
+ .execute(
+ existing_issue.iid,
+ description: issue_description,
+ weight: reports_count
+ )
+ puts "[CreateTestFailureIssue] Added a report in '#{existing_issue.title}': #{existing_issue.web_url}!"
+ end
+
+ def create(failed_test)
+ payload = {
+ title: failed_test_issue_title(failed_test),
+ description: failed_test_issue_description(failed_test),
+ labels: failed_test_issue_labels(failed_test),
+ weight: 1
+ }
+
+ CreateIssue.new(project: project, api_token: api_token).execute(payload).tap do |issue|
+ puts "[CreateTestFailureIssue] Created issue '#{issue.title}': #{issue.web_url}!"
+ end
+ end
+
+ def failed_test_hash(failed_test)
+ Digest::SHA256.hexdigest(failed_test['file'] + failed_test['name'])[0...12]
+ end
+
+ def failed_test_issue_title(failed_test)
+ title = "#{failed_test['file']} [test-hash:#{failed_test_hash(failed_test)}]"
+
+ raise "Title is too long!" if title.size > MAX_TITLE_LENGTH
+
+ title
+ end
+
+ def test_file_link(failed_test)
+ "[`#{failed_test['file']}`](#{self.class.file_base_url}#{failed_test['file']})"
+ end
+
+ def report_list_item(failed_test)
+ "1. #{Time.new.utc.strftime('%F')}: #{failed_test['job_url']} (#{ENV['CI_PIPELINE_URL']})"
+ end
+
+ def failed_test_issue_description(failed_test)
+ <<~DESCRIPTION
+ ### Test description
+
+ `#{search_safe(failed_test['name'])}`
+
+ ### Test file path
+
+ #{test_file_link(failed_test)}
+
+ <!-- Don't add anything after the report list since it's updated automatically -->
+ ### Reports (1)
+
+ #{report_list_item(failed_test)}
+ DESCRIPTION
+ end
+
+ def failed_test_issue_labels(failed_test)
+ labels = DEFAULT_LABELS + category_and_group_labels_for_test_file(failed_test['file'])
+
+ # make sure we don't spam people who are notified to actual labels
+ labels.map { |label| "wip-#{label}" }
+ end
+
+ def category_and_group_labels_for_test_file(test_file)
+ feature_categories = File.open(File.expand_path(File.join('..', '..', test_file), __dir__))
+ .read
+ .scan(FEATURE_CATEGORY_METADATA_REGEX)
+
+ category_labels = feature_categories.filter_map { |category| categories_mapping.dig(category, 'label') }.uniq
+
+ groups = feature_categories.filter_map { |category| categories_mapping.dig(category, 'group') }
+ group_labels = groups.map { |group| groups_mapping.dig(group, 'label') }.uniq
+
+ (category_labels + [group_labels.first]).compact
+ end
+
+ def categories_mapping
+ @categories_mapping ||= self.class.fetch_json(WWW_GITLAB_COM_CATEGORIES_JSON)
+ end
+
+ def groups_mapping
+ @groups_mapping ||= self.class.fetch_json(WWW_GITLAB_COM_GROUPS_JSON)
+ end
+
+ def search_safe(value)
+ value.delete('"')
+ end
+
+ def self.fetch_json(json_url)
+ json = with_retries { HTTParty.get(json_url, format: :plain) } # rubocop:disable Gitlab/HTTParty
+ JSON.parse(json)
+ end
+
+ def self.with_retries(attempts: 3)
+ yield
+ rescue Errno::ECONNRESET, OpenSSL::SSL::SSLError, Net::OpenTimeout
+ retry if (attempts -= 1) > 0
+ raise
+ end
+ private_class_method :with_retries
+end
+
+if $PROGRAM_NAME == __FILE__
+ options = CreateTestFailureIssues::DEFAULT_OPTIONS.dup
+
+ OptionParser.new do |opts|
+ opts.on("-p", "--project PROJECT", String,
+ "Project where to create the issue (defaults to " \
+ "`#{CreateTestFailureIssues::DEFAULT_OPTIONS[:project]}`)") do |value|
+ options[:project] = value
+ end
+
+ opts.on("-r", "--tests-report-file file_path", String,
+ "Path to a JSON file which contains the current pipeline's tests report (defaults to " \
+ "`#{CreateTestFailureIssues::DEFAULT_OPTIONS[:tests_report_file]}`)"
+ ) do |value|
+ options[:tests_report_file] = value
+ end
+
+ opts.on("-f", "--issues-json-folder file_path", String,
+ "Path to a folder where to save the issues JSON data (defaults to " \
+ "`#{CreateTestFailureIssues::DEFAULT_OPTIONS[:issue_json_folder]}`)") do |value|
+ options[:issue_json_folder] = value
+ end
+
+ opts.on("-t", "--api-token API_TOKEN", String,
+ "A valid Project token with the `Reporter` role and `api` scope to create the issue") do |value|
+ options[:api_token] = value
+ end
+
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+ end.parse!
+
+ CreateTestFailureIssues.new(options).execute
+end
diff --git a/scripts/pipeline_test_report_builder.rb b/scripts/pipeline_test_report_builder.rb
index 6f69a5c692f..c84acf2fd94 100755
--- a/scripts/pipeline_test_report_builder.rb
+++ b/scripts/pipeline_test_report_builder.rb
@@ -19,7 +19,8 @@ require_relative 'api/default_options'
# Push into expected format for failed tests
class PipelineTestReportBuilder
DEFAULT_OPTIONS = {
- target_project: Host::DEFAULT_OPTIONS[:target_project],
+ target_project: Host::DEFAULT_OPTIONS[:target_project] || API::DEFAULT_OPTIONS[:project],
+ current_pipeline_id: API::DEFAULT_OPTIONS[:pipeline_id],
mr_iid: Host::DEFAULT_OPTIONS[:mr_iid],
api_endpoint: API::DEFAULT_OPTIONS[:endpoint],
output_file_path: 'test_results/test_reports.json',
@@ -28,6 +29,7 @@ class PipelineTestReportBuilder
def initialize(options)
@target_project = options.delete(:target_project)
+ @current_pipeline_id = options.delete(:current_pipeline_id)
@mr_iid = options.delete(:mr_iid)
@api_endpoint = options.delete(:api_endpoint).to_s
@output_file_path = options.delete(:output_file_path).to_s
@@ -47,7 +49,7 @@ class PipelineTestReportBuilder
end
def latest_pipeline
- pipelines_sorted_descending[0]
+ fetch("#{target_project_api_base_url}/pipelines/#{current_pipeline_id}")
end
def previous_pipeline
@@ -58,6 +60,8 @@ class PipelineTestReportBuilder
private
+ attr_reader :target_project, :current_pipeline_id, :mr_iid, :api_endpoint, :output_file_path, :pipeline_index
+
def pipeline
@pipeline ||=
case pipeline_index
@@ -76,8 +80,6 @@ class PipelineTestReportBuilder
pipelines_for_mr.sort_by { |a| -a['id'] }
end
- attr_reader :target_project, :mr_iid, :api_endpoint, :output_file_path, :pipeline_index
-
def pipeline_project_api_base_url(pipeline)
"#{api_endpoint}/projects/#{pipeline['project_id']}"
end
diff --git a/scripts/prepare_build.sh b/scripts/prepare_build.sh
index 500e61ab76a..53674e9cb90 100644
--- a/scripts/prepare_build.sh
+++ b/scripts/prepare_build.sh
@@ -14,7 +14,13 @@ if [ "$DECOMPOSED_DB" == "true" ]; then
echo "Using decomposed database config (config/database.yml.decomposed-postgresql)"
cp config/database.yml.decomposed-postgresql config/database.yml
else
+ echo "Using decomposed database config (config/database.yml.postgresql)"
cp config/database.yml.postgresql config/database.yml
+
+ if [ "$CI_CONNECTION_DB" != "true" ]; then
+ echo "Disabling ci connection in config/database.yml"
+ sed -i "/ci:$/, /geo:$/ {s|^|#|;s|# geo:| geo:|;}" config/database.yml
+ fi
fi
# Set up Geo database if the job name matches `rspec-ee` or `geo`.
@@ -26,6 +32,15 @@ else
sed -i '/geo:/,/^$/d' config/database.yml
fi
+# Set up Embedding database if the job name matches `rspec-ee`
+# Since Embedding is an EE feature, we shouldn't set it up for non-EE tests.
+if [[ "${CI_JOB_NAME}" =~ "rspec-ee" ]]; then
+ echoinfo "Embedding DB will be set up."
+else
+ echoinfo "Embedding DB won't be set up."
+ sed -i '/embedding:/,/^$/d' config/database.yml
+fi
+
# Set user to a non-superuser to ensure we test permissions
sed -i 's/username: root/username: gitlab/g' config/database.yml
diff --git a/scripts/review_apps/automated_cleanup.rb b/scripts/review_apps/automated_cleanup.rb
index 7e606b74de9..154a73462bb 100755
--- a/scripts/review_apps/automated_cleanup.rb
+++ b/scripts/review_apps/automated_cleanup.rb
@@ -24,6 +24,25 @@ module ReviewApps
].freeze
ENVIRONMENTS_NOT_FOUND_THRESHOLD = 3
+ def self.parse_args(argv)
+ options = {
+ dry_run: false
+ }
+
+ OptionParser.new do |opts|
+ opts.on("-d BOOLEAN", "--dry-run BOOLEAN", String, "Whether to perform a dry-run or not.") do |value|
+ options[:dry_run] = true if value == 'true'
+ end
+
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+ end.parse!(argv)
+
+ options
+ end
+
# $GITLAB_PROJECT_REVIEW_APP_CLEANUP_API_TOKEN => `Automated Review App Cleanup` project token
def initialize(
project_path: ENV['CI_PROJECT_PATH'],
@@ -36,8 +55,6 @@ module ReviewApps
@api_endpoint = api_endpoint
@dry_run = options[:dry_run]
@environments_not_found_count = 0
-
- puts "Dry-run mode." if dry_run
end
def gitlab
@@ -52,19 +69,16 @@ module ReviewApps
end
end
- def review_apps_namespace
- 'review-apps'
- end
-
def helm
@helm ||= Tooling::Helm3Client.new
end
def kubernetes
- @kubernetes ||= Tooling::KubernetesClient.new(namespace: review_apps_namespace)
+ @kubernetes ||= Tooling::KubernetesClient.new
end
def perform_gitlab_environment_cleanup!(days_for_delete:)
+ puts "Dry-run mode." if dry_run
puts "Checking for Review Apps not updated in the last #{days_for_delete} days..."
checked_environments = []
@@ -106,6 +120,7 @@ module ReviewApps
end
def perform_gitlab_docs_environment_cleanup!(days_for_stop:, days_for_delete:)
+ puts "Dry-run mode." if dry_run
puts "Checking for Docs Review Apps not updated in the last #{days_for_stop} days..."
checked_environments = []
@@ -140,6 +155,7 @@ module ReviewApps
end
def perform_helm_releases_cleanup!(days:)
+ puts "Dry-run mode." if dry_run
puts "Checking for Helm releases that are failed or not updated in the last #{days} days..."
threshold = threshold_time(days: days)
@@ -162,13 +178,9 @@ module ReviewApps
end
def perform_stale_namespace_cleanup!(days:)
- kubernetes_client = Tooling::KubernetesClient.new(namespace: nil)
-
- kubernetes_client.cleanup_review_app_namespaces(created_before: threshold_time(days: days), wait: false) unless dry_run
- end
+ puts "Dry-run mode." if dry_run
- def perform_stale_pvc_cleanup!(days:)
- kubernetes.cleanup_by_created_at(resource_type: 'pvc', created_before: threshold_time(days: days), wait: false) unless dry_run
+ kubernetes.cleanup_namespaces_by_created_at(created_before: threshold_time(days: days)) unless dry_run
end
private
@@ -242,7 +254,7 @@ module ReviewApps
releases_names = releases.map(&:name)
unless dry_run
helm.delete(release_name: releases_names)
- kubernetes.cleanup_by_release(release_name: releases_names, wait: false)
+ kubernetes.delete_namespaces(releases_names)
end
rescue Tooling::Helm3Client::CommandFailedError => ex
@@ -256,7 +268,11 @@ module ReviewApps
end
def threshold_time(days:)
- Time.now - days * 24 * 3600
+ days_integer = days.to_i
+
+ raise "days should be an integer between 1 and 365 inclusive! Got #{days_integer}" unless days_integer.between?(1, 365)
+
+ Time.now - days_integer * 24 * 3600
end
def ignore_exception?(exception_message, exceptions_ignored)
@@ -276,21 +292,7 @@ def timed(task)
end
if $PROGRAM_NAME == __FILE__
- options = {
- dry_run: false
- }
-
- OptionParser.new do |opts|
- opts.on("-d", "--dry-run", "Whether to perform a dry-run or not.") do |value|
- options[:dry_run] = true
- end
-
- opts.on("-h", "--help", "Prints this help") do
- puts opts
- exit
- end
- end.parse!
-
+ options = ReviewApps::AutomatedCleanup.parse_args(ARGV)
automated_cleanup = ReviewApps::AutomatedCleanup.new(options: options)
timed('Docs Review Apps cleanup') do
@@ -314,10 +316,4 @@ if $PROGRAM_NAME == __FILE__
timed('Stale Namespace cleanup') do
automated_cleanup.perform_stale_namespace_cleanup!(days: 3)
end
-
- puts
-
- timed('Stale PVC cleanup') do
- automated_cleanup.perform_stale_pvc_cleanup!(days: 30)
- end
end
diff --git a/scripts/review_apps/base-config.yaml b/scripts/review_apps/base-config.yaml
index 1d062a76191..9b77ff80d42 100644
--- a/scripts/review_apps/base-config.yaml
+++ b/scripts/review_apps/base-config.yaml
@@ -1,9 +1,18 @@
safe-to-evict: &safe-to-evict
cluster-autoscaler.kubernetes.io/safe-to-evict: "true"
+# We disabled the upgrade checks, as they were giving too many false positives
+#
+# See https://gitlab.com/gitlab-org/quality/engineering-productivity/review-apps-broken-incidents/-/issues/33
+upgradeCheck:
+ enabled: false
+
global:
appConfig:
enableUsagePing: false
+ extraEnv:
+ GITLAB_LICENSE_MODE: test
+ CUSTOMER_PORTAL_URL: https://customers.staging.gitlab.com
image:
pullPolicy: Always
ingress:
@@ -23,10 +32,17 @@ gitlab:
gitaly:
resources:
requests:
- cpu: 1200m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22gitaly%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.100vCPU (setting request accordingly). Note that this is a guesstimate based on the chart.
+ #
+ # The maximum CPU usage was 0.196vCPU (setting limit accordingly)
+ cpu: 150m
memory: 600Mi
limits:
- cpu: 1800m
+ cpu: 300m
memory: 1000Mi
persistence:
size: 10Gi
@@ -42,10 +58,17 @@ gitlab:
gitlab-shell:
resources:
requests:
- cpu: 500m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22gitlab-shell%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.01vCPU (setting request accordingly). Note that this is a guesstimate based on the chart.
+ #
+ # The maximum CPU usage was 0.127vCPU (setting limit accordingly)
+ cpu: 10m
memory: 100Mi
limits:
- cpu: 750m
+ cpu: 150m
memory: 150Mi
minReplicas: 1
maxReplicas: 1
@@ -69,7 +92,7 @@ gitlab:
cpu: 400m
memory: 920Mi
limits:
- cpu: 800m
+ cpu: 1000m
memory: 1380Mi
sidekiq:
@@ -87,7 +110,14 @@ gitlab:
toolbox:
resources:
requests:
- cpu: 300m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22toolbox%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.100vCPU. Note that this is a guesstimate based on the chart.
+ #
+ # The maximum CPU usage was 0.250vCPU (setting limit accordingly)
+ cpu: 150m
memory: 1927Mi
limits:
cpu: 450m
@@ -124,9 +154,17 @@ gitlab:
gitlab-runner:
resources:
requests:
- cpu: 675m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3Dmonitoring.regex.full_match(%5C%22.*gitlab-runner$%5C%22)%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.01vCPU. Note that this is a guesstimate based on the chart.
+ #
+ # The maximum CPU usage was 0.015vCPU (setting limit accordingly)
+ cpu: 10m
memory: 100Mi
limits:
+ # In case somebody would like to use runners in review-apps, we set the limit higher than the requests
cpu: 1015m
memory: 150Mi
nodeSelector:
@@ -153,10 +191,17 @@ nginx-ingress:
ssl-ciphers: ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4
resources:
requests:
- cpu: 300m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22controller%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.02vCPU. Note that this is a guesstimate based on the chart.
+ #
+ # The maximum CPU usage was 0.07vCPU (setting limit accordingly)
+ cpu: 10m
memory: 450Mi
limits:
- cpu: 600m
+ cpu: 20m
memory: 675Mi
service:
enableHttp: false
@@ -182,11 +227,18 @@ postgresql:
enabled: false
resources:
requests:
- cpu: 600m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3Dmonitoring.regex.full_match(%5C%22.*-postgresql$%5C%22)%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.150vCPU. Note that this is a guesstimate based on the chart.
+ #
+ # The maximum CPU usage was 0.420vCPU (setting limit accordingly)
+ cpu: 150m
memory: 1000Mi
limits:
- cpu: 1300m
- memory: 1600Mi
+ cpu: 1000m
+ memory: 1800Mi
master:
nodeSelector:
preemptible: "false"
@@ -201,10 +253,17 @@ redis:
enabled: false
resources:
requests:
- cpu: 100m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22redis%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.03vCPU. Note that this is a guesstimate based on the chart.
+ #
+ # The maximum CPU usage was 0.500vCPU (setting limit accordingly)
+ cpu: 10m
memory: 60Mi
limits:
- cpu: 200m
+ cpu: 500m
memory: 130Mi
master:
nodeSelector:
@@ -217,11 +276,18 @@ registry:
minReplicas: 1
maxReplicas: 1
resources:
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22registry%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.0005vCPU. Note that this is a guesstimate based on the chart.
+ #
+ # The maximum CPU usage was 0.0.003vCPU (setting limit accordingly)
requests:
- cpu: 100m
+ cpu: 10m
memory: 30Mi
limits:
- cpu: 200m
+ cpu: 50m
memory: 45Mi
nodeSelector:
preemptible: "true"
diff --git a/scripts/review_apps/k8s-resources-count-checks.sh b/scripts/review_apps/k8s-resources-count-checks.sh
index b63fa043065..7ce98f8d164 100755
--- a/scripts/review_apps/k8s-resources-count-checks.sh
+++ b/scripts/review_apps/k8s-resources-count-checks.sh
@@ -15,6 +15,9 @@ function k8s_resource_count() {
SERVICES_COUNT_THRESHOLD=3000
REVIEW_APPS_COUNT_THRESHOLD=200
+# One review app currently deploys 4 PVCs
+PVCS_COUNT_THRESHOLD=$((REVIEW_APPS_COUNT_THRESHOLD * 4))
+
exit_with_error=false
# In the current GKE cluster configuration, we should never go higher than 4096 services per cluster.
@@ -36,6 +39,12 @@ if [ "$(echo $(($namespaces_count - $review_apps_count)) | sed 's/-//')" -gt 30
exit_with_error=true
fi
+pvcs_count=$(kubectl get pvc -A | wc -l | xargs)
+if [ "${pvcs_count}" -gt "${PVCS_COUNT_THRESHOLD}" ]; then
+ >&2 echo "❌ [ERROR] PVCs are above ${PVCS_COUNT_THRESHOLD} (currently at ${pvcs_count})"
+ exit_with_error=true
+fi
+
if [ "${exit_with_error}" = true ] ; then
exit 1
fi
diff --git a/scripts/review_apps/review-apps.sh b/scripts/review_apps/review-apps.sh
index 98ad3112202..6ebb0f61a04 100755
--- a/scripts/review_apps/review-apps.sh
+++ b/scripts/review_apps/review-apps.sh
@@ -128,8 +128,22 @@ function disable_sign_ups() {
true
fi
- # Create the root token + Disable sign-ups
- local disable_signup_rb="token = User.find_by_username('root').personal_access_tokens.create(scopes: [:api], name: 'Token to disable sign-ups'); token.set_token('${REVIEW_APPS_ROOT_TOKEN}'); begin; token.save!; rescue(ActiveRecord::RecordNotUnique); end; Gitlab::CurrentSettings.current_application_settings.update!(signup_enabled: false)"
+# Create the root token + Disable sign-ups
+#
+# We use this weird syntax because we need to pass a one-liner ruby command to a Kubernetes container via kubectl.
+read -r -d '' multiline_ruby_code <<RUBY
+user = User.find_by_username('root');
+puts 'Error: Could not find root user. Check that the database was properly seeded'; exit(1) unless user;
+token = user.personal_access_tokens.create(scopes: [:api], name: 'Token to disable sign-ups');
+token.set_token('${REVIEW_APPS_ROOT_TOKEN}');
+begin;
+token.save!;
+rescue(ActiveRecord::RecordNotUnique);
+end;
+Gitlab::CurrentSettings.current_application_settings.update!(signup_enabled: false);
+RUBY
+
+ local disable_signup_rb=$(echo $multiline_ruby_code | tr '\n' ' ')
if (retry_exponential "run_task \"${disable_signup_rb}\""); then
echoinfo "Sign-ups have been disabled successfully."
else
@@ -194,14 +208,14 @@ function create_application_secret() {
echoinfo "The 'shared-gitlab-initial-root-password' secret already exists in the ${namespace} namespace."
fi
- if [ -z "${REVIEW_APPS_EE_LICENSE_FILE}" ]; then echo "License not found" && return; fi
+ if [ -z "${QA_EE_LICENSE}" ]; then echo "License not found" && return; fi
gitlab_license_shared_secret=$(kubectl get secret --namespace "${namespace}" --no-headers -o=custom-columns=NAME:.metadata.name shared-gitlab-license 2> /dev/null | tail -n 1)
if [[ "${gitlab_license_shared_secret}" == "" ]]; then
echoinfo "Creating the 'shared-gitlab-license' secret in the "${namespace}" namespace..." true
kubectl create secret generic --namespace "${namespace}" \
"shared-gitlab-license" \
- --from-file=license="${REVIEW_APPS_EE_LICENSE_FILE}" \
+ --from-literal=license="${QA_EE_LICENSE}" \
--dry-run=client -o json | kubectl apply -f -
else
echoinfo "The 'shared-gitlab-license' secret already exists in the ${namespace} namespace."
@@ -273,11 +287,63 @@ function deploy() {
retry "create_application_secret"
cat > review_apps.values.yml <<EOF
+ ci:
+ branch: "${CI_COMMIT_REF_NAME}"
+ commit:
+ sha: "${CI_COMMIT_SHORT_SHA}"
+ job:
+ url: "${CI_JOB_URL}"
+ pipeline:
+ url: "${CI_PIPELINE_URL}"
+
gitlab:
+ gitaly:
+ image:
+ repository: "${gitlab_gitaly_image_repository}"
+ tag: "${gitaly_image_tag}"
+ gitlab-shell:
+ image:
+ repository: "${gitlab_shell_image_repository}"
+ tag: "v${GITLAB_SHELL_VERSION}"
+ migrations:
+ image:
+ repository: "${gitlab_toolbox_image_repository}"
+ tag: "${CI_COMMIT_SHA}"
+ sidekiq:
+ annotations:
+ commit: "${CI_COMMIT_SHORT_SHA}"
+ image:
+ repository: "${gitlab_sidekiq_image_repository}"
+ tag: "${CI_COMMIT_SHA}"
+ toolbox:
+ image:
+ repository: "${gitlab_toolbox_image_repository}"
+ tag: "${CI_COMMIT_SHA}"
webservice:
+ annotations:
+ commit: "${CI_COMMIT_SHORT_SHA}"
extraEnv:
REVIEW_APPS_ENABLED: "true"
REVIEW_APPS_MERGE_REQUEST_IID: "${CI_MERGE_REQUEST_IID}"
+ image:
+ repository: "${gitlab_webservice_image_repository}"
+ tag: "${CI_COMMIT_SHA}"
+ workhorse:
+ image: "${gitlab_workhorse_image_repository}"
+ tag: "${CI_COMMIT_SHA}"
+
+ global:
+ hosts:
+ domain: "${REVIEW_APPS_DOMAIN}"
+ hostSuffix: "${HOST_SUFFIX}"
+ appConfig:
+ sentry:
+ dsn: "${REVIEW_APPS_SENTRY_DSN}"
+ # Boolean fields should be left without quotes
+ enabled: ${sentry_enabled}
+ environment: "review"
+
+ releaseOverride: "${release}"
EOF
HELM_CMD=$(cat << EOF
@@ -286,38 +352,11 @@ HELM_CMD=$(cat << EOF
--create-namespace \
--install \
--wait \
- -f review_apps.values.yml \
- --timeout "${HELM_INSTALL_TIMEOUT:-20m}" \
- --set ci.branch="${CI_COMMIT_REF_NAME}" \
- --set ci.commit.sha="${CI_COMMIT_SHORT_SHA}" \
- --set ci.job.url="${CI_JOB_URL}" \
- --set ci.pipeline.url="${CI_PIPELINE_URL}" \
- --set releaseOverride="${release}" \
- --set global.hosts.hostSuffix="${HOST_SUFFIX}" \
- --set global.hosts.domain="${REVIEW_APPS_DOMAIN}" \
- --set global.appConfig.sentry.enabled="${sentry_enabled}" \
- --set global.appConfig.sentry.dsn="${REVIEW_APPS_SENTRY_DSN}" \
- --set global.appConfig.sentry.environment="review" \
- --set gitlab.migrations.image.repository="${gitlab_toolbox_image_repository}" \
- --set gitlab.migrations.image.tag="${CI_COMMIT_SHA}" \
- --set gitlab.gitaly.image.repository="${gitlab_gitaly_image_repository}" \
- --set gitlab.gitaly.image.tag="${gitaly_image_tag}" \
- --set gitlab.gitlab-shell.image.repository="${gitlab_shell_image_repository}" \
- --set gitlab.gitlab-shell.image.tag="v${GITLAB_SHELL_VERSION}" \
- --set gitlab.sidekiq.annotations.commit="${CI_COMMIT_SHORT_SHA}" \
- --set gitlab.sidekiq.image.repository="${gitlab_sidekiq_image_repository}" \
- --set gitlab.sidekiq.image.tag="${CI_COMMIT_SHA}" \
- --set gitlab.webservice.annotations.commit="${CI_COMMIT_SHORT_SHA}" \
- --set gitlab.webservice.image.repository="${gitlab_webservice_image_repository}" \
- --set gitlab.webservice.image.tag="${CI_COMMIT_SHA}" \
- --set gitlab.webservice.workhorse.image="${gitlab_workhorse_image_repository}" \
- --set gitlab.webservice.workhorse.tag="${CI_COMMIT_SHA}" \
- --set gitlab.toolbox.image.repository="${gitlab_toolbox_image_repository}" \
- --set gitlab.toolbox.image.tag="${CI_COMMIT_SHA}"
+ --timeout "${HELM_INSTALL_TIMEOUT:-20m}"
EOF
)
-if [ -n "${REVIEW_APPS_EE_LICENSE_FILE}" ]; then
+if [ -n "${QA_EE_LICENSE}" ]; then
HELM_CMD=$(cat << EOF
${HELM_CMD} \
--set global.gitlab.license.secret="shared-gitlab-license"
@@ -325,19 +364,28 @@ EOF
)
fi
+# Important: the `-f` calls are ordered. They should not be changed.
+#
+# The `base_config_file` contains the default values for the chart, and the
+# `review_apps.values.yml` contains the overrides we want to apply specifically
+# for this review app deployment.
HELM_CMD=$(cat << EOF
${HELM_CMD} \
--version="${CI_PIPELINE_ID}-${CI_JOB_ID}" \
-f "${base_config_file}" \
+ -f review_apps.values.yml \
-v "${HELM_LOG_VERBOSITY:-1}" \
"${release}" "gitlab-${GITLAB_HELM_CHART_REF}"
EOF
)
# Pretty-print the command for display
- echoinfo "Deploying with:"
+ echoinfo "Deploying with helm command:"
echo "${HELM_CMD}" | sed 's/ /\n\t/g'
+ echoinfo "Content of review_apps.values.yml:"
+ cat review_apps.values.yml
+
retry "eval \"${HELM_CMD}\""
}
diff --git a/scripts/rspec_helpers.sh b/scripts/rspec_helpers.sh
index de735e03db0..4e73bf48021 100644
--- a/scripts/rspec_helpers.sh
+++ b/scripts/rspec_helpers.sh
@@ -12,6 +12,11 @@ function retrieve_tests_metadata() {
curl --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FLAKY_RSPEC_SUITE_REPORT_PATH}" ||
echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
fi
+
+ if [[ ! -f "${RSPEC_FAST_QUARANTINE_LOCAL_PATH}" ]]; then
+ curl --location -o "${RSPEC_FAST_QUARANTINE_LOCAL_PATH}" "https://gitlab-org.gitlab.io/quality/engineering-productivity/fast-quarantine/${RSPEC_FAST_QUARANTINE_LOCAL_PATH}" ||
+ echo "" > "${RSPEC_FAST_QUARANTINE_LOCAL_PATH}"
+ fi
}
function update_tests_metadata() {
@@ -121,22 +126,25 @@ function rspec_db_library_code() {
}
function debug_rspec_variables() {
- echoinfo "SKIP_FLAKY_TESTS_AUTOMATICALLY: ${SKIP_FLAKY_TESTS_AUTOMATICALLY}"
- echoinfo "RETRY_FAILED_TESTS_IN_NEW_PROCESS: ${RETRY_FAILED_TESTS_IN_NEW_PROCESS}"
+ echoinfo "SKIP_FLAKY_TESTS_AUTOMATICALLY: ${SKIP_FLAKY_TESTS_AUTOMATICALLY:-}"
+ echoinfo "RETRY_FAILED_TESTS_IN_NEW_PROCESS: ${RETRY_FAILED_TESTS_IN_NEW_PROCESS:-}"
- echoinfo "KNAPSACK_GENERATE_REPORT: ${KNAPSACK_GENERATE_REPORT}"
- echoinfo "FLAKY_RSPEC_GENERATE_REPORT: ${FLAKY_RSPEC_GENERATE_REPORT}"
+ echoinfo "KNAPSACK_GENERATE_REPORT: ${KNAPSACK_GENERATE_REPORT:-}"
+ echoinfo "FLAKY_RSPEC_GENERATE_REPORT: ${FLAKY_RSPEC_GENERATE_REPORT:-}"
- echoinfo "KNAPSACK_TEST_FILE_PATTERN: ${KNAPSACK_TEST_FILE_PATTERN}"
- echoinfo "KNAPSACK_LOG_LEVEL: ${KNAPSACK_LOG_LEVEL}"
- echoinfo "KNAPSACK_REPORT_PATH: ${KNAPSACK_REPORT_PATH}"
+ echoinfo "KNAPSACK_TEST_FILE_PATTERN: ${KNAPSACK_TEST_FILE_PATTERN:-}"
+ echoinfo "KNAPSACK_LOG_LEVEL: ${KNAPSACK_LOG_LEVEL:-}"
+ echoinfo "KNAPSACK_REPORT_PATH: ${KNAPSACK_REPORT_PATH:-}"
- echoinfo "FLAKY_RSPEC_SUITE_REPORT_PATH: ${FLAKY_RSPEC_SUITE_REPORT_PATH}"
- echoinfo "FLAKY_RSPEC_REPORT_PATH: ${FLAKY_RSPEC_REPORT_PATH}"
- echoinfo "NEW_FLAKY_RSPEC_REPORT_PATH: ${NEW_FLAKY_RSPEC_REPORT_PATH}"
- echoinfo "SKIPPED_FLAKY_TESTS_REPORT_PATH: ${SKIPPED_FLAKY_TESTS_REPORT_PATH}"
+ echoinfo "FLAKY_RSPEC_SUITE_REPORT_PATH: ${FLAKY_RSPEC_SUITE_REPORT_PATH:-}"
+ echoinfo "FLAKY_RSPEC_REPORT_PATH: ${FLAKY_RSPEC_REPORT_PATH:-}"
+ echoinfo "NEW_FLAKY_RSPEC_REPORT_PATH: ${NEW_FLAKY_RSPEC_REPORT_PATH:-}"
+ echoinfo "SKIPPED_TESTS_REPORT_PATH: ${SKIPPED_TESTS_REPORT_PATH:-}"
- echoinfo "CRYSTALBALL: ${CRYSTALBALL}"
+ echoinfo "CRYSTALBALL: ${CRYSTALBALL:-}"
+
+ echoinfo "RSPEC_TESTS_MAPPING_ENABLED: ${RSPEC_TESTS_MAPPING_ENABLED:-}"
+ echoinfo "RSPEC_TESTS_FILTER_FILE: ${RSPEC_TESTS_FILTER_FILE:-}"
}
function handle_retry_rspec_in_new_process() {
@@ -163,8 +171,9 @@ function rspec_paralellized_job() {
read -ra job_name <<< "${CI_JOB_NAME}"
local test_tool="${job_name[0]}"
local test_level="${job_name[1]}"
- local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg12 1/24' would become 'rspec_unit_pg12_1_24'
- local rspec_opts="${1}"
+ local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg13 1/24' would become 'rspec_unit_pg13_1_24'
+ local rspec_opts="${1:-}"
+ local rspec_tests_mapping_enabled="${RSPEC_TESTS_MAPPING_ENABLED:-}"
local spec_folder_prefixes=""
local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/"
local knapsack_folder_path="$(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}")/"
@@ -193,15 +202,10 @@ function rspec_paralellized_job() {
cp "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "${KNAPSACK_REPORT_PATH}"
- export KNAPSACK_TEST_FILE_PATTERN="spec/{,**/}*_spec.rb"
-
- if [[ "${test_level}" != "foss-impact" ]]; then
- export KNAPSACK_TEST_FILE_PATTERN=$(ruby -r./tooling/quality/test_level.rb -e "puts Quality::TestLevel.new(${spec_folder_prefixes}).pattern(:${test_level})")
- fi
-
+ export KNAPSACK_TEST_FILE_PATTERN=$(ruby -r./tooling/quality/test_level.rb -e "puts Quality::TestLevel.new(${spec_folder_prefixes}).pattern(:${test_level})")
export FLAKY_RSPEC_REPORT_PATH="${rspec_flaky_folder_path}all_${report_name}_report.json"
export NEW_FLAKY_RSPEC_REPORT_PATH="${rspec_flaky_folder_path}new_${report_name}_report.json"
- export SKIPPED_FLAKY_TESTS_REPORT_PATH="${rspec_flaky_folder_path}skipped_flaky_tests_${report_name}_report.txt"
+ export SKIPPED_TESTS_REPORT_PATH="rspec/skipped_tests_${report_name}.txt"
if [[ -d "ee/" ]]; then
export KNAPSACK_GENERATE_REPORT="true"
@@ -218,7 +222,7 @@ function rspec_paralellized_job() {
debug_rspec_variables
- if [[ -n "${RSPEC_TESTS_MAPPING_ENABLED}" ]]; then
+ if [[ -n "${rspec_tests_mapping_enabled}" ]]; then
tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" --filter "${RSPEC_TESTS_FILTER_FILE}" || rspec_run_status=$?
else
tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" || rspec_run_status=$?
@@ -240,7 +244,7 @@ function retry_failed_rspec_examples() {
# Keep track of the tests that are retried, later consolidated in a single file by the `rspec:flaky-tests-report` job
local failed_examples=$(grep " failed" ${RSPEC_LAST_RUN_RESULTS_FILE})
- local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg12 1/24' would become 'rspec_unit_pg12_1_24'
+ local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg13 1/24' would become 'rspec_unit_pg13_1_24'
local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/"
export RETRIED_TESTS_REPORT_PATH="${rspec_flaky_folder_path}retried_tests_${report_name}_report.txt"
@@ -404,7 +408,7 @@ function generate_flaky_tests_reports() {
mkdir -p ${rspec_flaky_folder_path}
- find ${rspec_flaky_folder_path} -type f -name 'skipped_flaky_tests_*_report.txt' -exec cat {} + >> "${SKIPPED_FLAKY_TESTS_REPORT_PATH}"
+ find ${rspec_flaky_folder_path} -type f -name 'skipped_tests_*.txt' -exec cat {} + >> "${SKIPPED_TESTS_REPORT_PATH}"
find ${rspec_flaky_folder_path} -type f -name 'retried_tests_*_report.txt' -exec cat {} + >> "${RETRIED_TESTS_REPORT_PATH}"
cleanup_individual_job_reports
diff --git a/scripts/setup-test-env b/scripts/setup-test-env
index 97762e1cafa..ae00b569ce3 100755
--- a/scripts/setup-test-env
+++ b/scripts/setup-test-env
@@ -4,66 +4,49 @@
require_relative '../config/bundler_setup'
+require_relative '../spec/rails_autoload'
+
require 'request_store'
require 'rake'
-require 'active_support/dependencies'
-require 'active_support/dependencies/autoload'
-require 'active_support/core_ext/numeric'
-require 'active_support/string_inquirer'
+require 'active_support/all'
ENV['SKIP_RAILS_ENV_IN_RAKE'] = 'true'
-module Rails
- extend self
-
- def root
- Pathname.new(File.expand_path('..', __dir__))
- end
-
- def env
- @_env ||= ActiveSupport::StringInquirer.new(ENV["RAILS_ENV"] || ENV["RACK_ENV"] || "test")
- end
-end
-
-ActiveSupport::Dependencies.autoload_paths << 'lib'
-
load File.expand_path('../lib/tasks/gitlab/helpers.rake', __dir__)
load File.expand_path('../lib/tasks/gitlab/gitaly.rake', __dir__)
-# Required for config/0_inject_enterprise_edition_module.rb, lib/gitlab/access.rb
-require_dependency File.expand_path('../lib/gitlab', __dir__)
-
-require_dependency File.expand_path('../config/initializers/0_inject_enterprise_edition_module', __dir__)
-
-# Require for lib/gitlab/gitaly_client/storage_settings.rb and config/initializers/1_settings.rb
-require 'active_support/hash_with_indifferent_access'
-
-# Required for lib/gitlab/visibility_level.rb and lib/gitlab/safe_request_store.rb
-require 'active_support/concern'
-require 'active_support/core_ext/module/delegation'
-
# Required for lib/system_check/helpers.rb
-require_dependency File.expand_path('../lib/gitlab/task_helpers', __dir__)
+require_relative '../lib/gitlab/task_helpers'
# Required for lib/tasks/gitlab/helpers.rake
-require_dependency File.expand_path('../lib/system_check/helpers', __dir__)
+require_relative '../lib/system_check/helpers'
# Required for config/initializers/1_settings.rb
require 'omniauth'
require 'omniauth-github'
require 'etc'
-require_dependency File.expand_path('../lib/gitlab/access', __dir__)
-require_dependency File.expand_path('../lib/gitlab/utils', __dir__)
+require_relative '../lib/gitlab/access'
+require_relative '../lib/gitlab/utils'
+
+unless defined?(License)
+ # This is needed to allow use of `Gitlab::ImportSources.values` in `1_settings.rb`.
+ # See ee/lib/ee/gitlab/import_sources.rb
+ class License
+ def self.database
+ Struct.new(:cached_table_exists?).new(false)
+ end
+ end
+end
-require_dependency File.expand_path('../config/initializers/1_settings', __dir__)
+require_relative '../config/initializers/1_settings'
Gitlab.ee do
load File.expand_path('../ee/lib/tasks/gitlab/indexer.rake', __dir__)
- require_dependency File.expand_path('../ee/lib/gitlab/elastic/indexer', __dir__)
- require_dependency File.expand_path('../lib/gitlab/utils/override', __dir__)
+ require_relative '../ee/lib/gitlab/elastic/indexer'
+ require_relative '../lib/gitlab/utils/override'
end
-require_dependency File.expand_path('../spec/support/helpers/test_env', __dir__)
+require_relative '../spec/support/helpers/test_env'
TestEnv.init
diff --git a/scripts/trigger-build.rb b/scripts/trigger-build.rb
index 69eea7488fb..c7c09557ff9 100755
--- a/scripts/trigger-build.rb
+++ b/scripts/trigger-build.rb
@@ -278,6 +278,7 @@ module Trigger
def extra_variables
{
"BRANCH_#{project_slug.upcase}" => ENV['CI_COMMIT_REF_NAME'],
+ "MERGE_REQUEST_IID_#{project_slug.upcase}" => ENV['CI_MERGE_REQUEST_IID'],
"REVIEW_SLUG" => review_slug
}
end
diff --git a/scripts/utils.sh b/scripts/utils.sh
index 55005d0abff..b41bc18deff 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -1,10 +1,19 @@
function retry() {
+ retry_times_sleep 2 3 "$@"
+}
+
+function retry_times_sleep() {
+ number_of_retries="$1"
+ shift
+ sleep_seconds="$1"
+ shift
+
if eval "$@"; then
return 0
fi
- for i in 2 1; do
- sleep 3s
+ for i in $(seq "${number_of_retries}" -1 1); do
+ sleep "$sleep_seconds"s
echo "[$(date '+%H:%M:%S')] Retrying $i..."
if eval "$@"; then
return 0
@@ -32,6 +41,7 @@ function retry_exponential() {
return 0
fi
done
+
return 1
}
@@ -53,6 +63,19 @@ function test_url() {
fi
}
+function section_start () {
+ local section_title="${1}"
+ local section_description="${2:-$section_title}"
+
+ echo -e "section_start:`date +%s`:${section_title}[collapsed=true]\r\e[0K${section_description}"
+}
+
+function section_end () {
+ local section_title="${1}"
+
+ echo -e "section_end:`date +%s`:${section_title}\r\e[0K"
+}
+
function bundle_install_script() {
local extra_install_args="${1}"
@@ -62,11 +85,11 @@ function bundle_install_script() {
exit 1;
fi;
- echo -e "section_start:`date +%s`:bundle-install[collapsed=true]\r\e[0KInstalling gems"
+ section_start "bundle-install" "Installing gems"
gem --version
bundle --version
- gem install bundler --no-document --conservative --version 2.3.15
+ gem install bundler --no-document --conservative --version 2.4.11
test -d jh && bundle config set --local gemfile 'jh/Gemfile'
bundle config set path "$(pwd)/vendor"
bundle config set clean 'true'
@@ -83,23 +106,23 @@ function bundle_install_script() {
run_timed_command "bundle pristine pg"
fi
- echo -e "section_end:`date +%s`:bundle-install\r\e[0K"
+ section_end "bundle-install"
}
function yarn_install_script() {
- echo -e "section_start:`date +%s`:yarn-install[collapsed=true]\r\e[0KInstalling Yarn packages"
+ section_start "yarn-install" "Installing Yarn packages"
retry yarn install --frozen-lockfile
- echo -e "section_end:`date +%s`:yarn-install\r\e[0K"
+ section_end "yarn-install"
}
function assets_compile_script() {
- echo -e "section_start:`date +%s`:assets-compile[collapsed=true]\r\e[0KCompiling frontend assets"
+ section_start "assets-compile" "Compiling frontend assets"
bin/rake gitlab:assets:compile
- echo -e "section_end:`date +%s`:assets-compile\r\e[0K"
+ section_end "assets-compile"
}
function setup_db_user_only() {
@@ -111,9 +134,13 @@ function setup_db_praefect() {
}
function setup_db() {
- run_timed_command "setup_db_user_only"
+ section_start "setup-db" "Setting up DBs"
+
+ setup_db_user_only
run_timed_command_with_metric "bundle exec rake db:drop db:create db:schema:load db:migrate gitlab:db:lock_writes" "setup_db"
- run_timed_command "setup_db_praefect"
+ setup_db_praefect
+
+ section_end "setup-db"
}
function install_gitlab_gem() {
@@ -126,7 +153,7 @@ function install_tff_gem() {
}
function install_activesupport_gem() {
- run_timed_command "gem install activesupport --no-document --version 6.1.7.1"
+ run_timed_command "gem install activesupport --no-document --version 6.1.7.2"
}
function install_junit_merge_gem() {
diff --git a/scripts/validate_schema_changes b/scripts/validate_schema_changes
new file mode 100755
index 00000000000..a6a01a060ce
--- /dev/null
+++ b/scripts/validate_schema_changes
@@ -0,0 +1,7 @@
+#!/usr/bin/env ruby
+
+# frozen_string_literal: true
+
+require_relative './database/schema_validator'
+
+SchemaValidator.new.validate!
diff --git a/scripts/verify-tff-mapping b/scripts/verify-tff-mapping
index 872f8dae86e..86ab7548b19 100755
--- a/scripts/verify-tff-mapping
+++ b/scripts/verify-tff-mapping
@@ -2,6 +2,7 @@
# frozen_string_literal: true
require 'set'
+require 'test_file_finder'
# These tests run a sanity check on the mapping file `tests.yml`
# used with the `test_file_finder` gem (`tff`) to identify matching test files.
@@ -35,6 +36,12 @@ tests = [
},
{
+ explanation: 'FOSS lib should map to respective spec',
+ source: 'lib/gitaly/server.rb',
+ expected: ['spec/lib/gitaly/server_spec.rb']
+ },
+
+ {
explanation: 'https://gitlab.com/gitlab-org/gitlab/-/issues/368628',
source: 'lib/gitlab/usage_data_counters/foo.rb',
expected: ['spec/lib/gitlab/usage_data_spec.rb']
@@ -53,15 +60,15 @@ tests = [
},
{
- explanation: 'FOSS lib should map to respective spec',
- source: 'lib/gitaly/server.rb',
- expected: ['spec/lib/gitaly/server_spec.rb']
+ explanation: 'Tooling should map to respective spec',
+ source: 'tooling/danger/specs/project_factory_suggestion.rb',
+ expected: ['spec/tooling/danger/specs/project_factory_suggestion_spec.rb']
},
{
- explanation: 'Tooling should map to respective spec',
- source: 'tooling/lib/tooling/helm3_client.rb',
- expected: ['spec/tooling/lib/tooling/helm3_client_spec.rb']
+ explanation: 'Map RuboCop related files to respective specs',
+ source: 'rubocop/cop/gettext/static_identifier.rb',
+ expected: ['spec/rubocop/cop/gettext/static_identifier_spec.rb']
},
{
@@ -81,13 +88,13 @@ tests = [
source: 'db/migrate/20221014034338_populate_releases_access_level_from_repository.rb',
expected: ['spec/migrations/populate_releases_access_level_from_repository_spec.rb']
},
-
+ # rubocop:disable Layout/LineLength
{
explanation: 'Migration should map to its timestamped spec',
- source: 'db/post_migrate/20210915022415_cleanup_bigint_conversion_for_ci_builds.rb',
- expected: ['spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb']
+ source: 'db/post_migrate/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table.rb',
+ expected: ['spec/migrations/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table_spec.rb']
},
-
+ # rubocop:enable Layout/LineLength
{
explanation: 'FOSS views should map to respective spec',
source: 'app/views/admin/dashboard/index.html.haml',
@@ -196,14 +203,31 @@ tests = [
explanation: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues/287#note_1192008962',
source: 'ee/lib/ee/gitlab/usage_data_counters/known_events/common.yml',
expected: ['ee/spec/config/metrics/every_metric_definition_spec.rb']
+ },
+ {
+ explanation: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues/146',
+ source: 'config/feature_categories.yml',
+ expected: ['spec/db/docs_spec.rb', 'ee/spec/lib/ee/gitlab/database/docs/docs_spec.rb']
+ },
+
+ {
+ explanation: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues/1360',
+ source: 'vendor/project_templates/gitbook.tar.gz',
+ expected: ['spec/lib/gitlab/project_template_spec.rb']
+ },
+
+ {
+ explanation: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues/1683#note_1385966977',
+ source: 'app/finders/members_finder.rb',
+ expected: ['spec/finders/members_finder_spec.rb', 'spec/graphql/types/project_member_relation_enum_spec.rb']
}
]
class MappingTest
- def initialize(explanation:, source:, expected:, mapping: 'tests.yml')
+ def initialize(explanation:, source:, expected:, strategy:)
@explanation = explanation
@source = source
- @mapping = mapping
+ @strategy = strategy
@expected_set = Set.new(expected)
@actual_set = Set.new(actual)
end
@@ -225,11 +249,14 @@ class MappingTest
attr_reader :explanation, :source, :expected_set, :actual_set, :mapping
def actual
- `tff -f #{mapping} #{source}`.split(' ')
+ tff = TestFileFinder::FileFinder.new(paths: [source])
+ tff.use @strategy
+ tff.test_files
end
end
-results = tests.map { |test| MappingTest.new(**test) }
+strategy = TestFileFinder::MappingStrategies::PatternMatching.load('tests.yml')
+results = tests.map { |test| MappingTest.new(strategy: strategy, **test) }
failed_tests = results.select(&:failed?)
if failed_tests.any?