summaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/api/base.rb28
-rwxr-xr-xscripts/api/cancel_pipeline.rb14
-rw-r--r--scripts/api/commit_merge_requests.rb19
-rw-r--r--scripts/api/create_issue.rb24
-rw-r--r--scripts/api/create_issue_discussion.rb24
-rw-r--r--scripts/api/find_issues.rb24
-rwxr-xr-xscripts/api/get_job_id.rb19
-rw-r--r--scripts/api/pipeline_failed_jobs.rb21
-rw-r--r--scripts/api/update_issue.rb29
-rw-r--r--scripts/database/schema_validator.rb34
-rwxr-xr-xscripts/db_tasks3
-rwxr-xr-xscripts/decomposition/generate-loose-foreign-key11
-rw-r--r--scripts/frontend/startup_css/constants.js3
-rwxr-xr-xscripts/generate-e2e-pipeline3
-rwxr-xr-xscripts/generate-rspec-foss-impact-pipeline66
-rwxr-xr-xscripts/generate_rspec_pipeline.rb176
-rw-r--r--scripts/gitlab_component_helpers.sh31
-rwxr-xr-xscripts/lint-docs-blueprints.rb4
-rwxr-xr-xscripts/pipeline/create_test_failure_issues.rb224
-rwxr-xr-xscripts/pipeline_test_report_builder.rb10
-rw-r--r--scripts/prepare_build.sh6
-rwxr-xr-xscripts/review_apps/automated_cleanup.rb14
-rwxr-xr-xscripts/setup-test-env59
-rwxr-xr-xscripts/trigger-build.rb1
-rw-r--r--scripts/utils.sh14
-rwxr-xr-xscripts/validate_schema_changes7
26 files changed, 622 insertions, 246 deletions
diff --git a/scripts/api/base.rb b/scripts/api/base.rb
new file mode 100644
index 00000000000..972b461a09a
--- /dev/null
+++ b/scripts/api/base.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'gitlab'
+require_relative 'default_options'
+
+class Base
+ def initialize(options)
+ @project = options.fetch(:project)
+
+ # If api_token is nil, it's set to '' to allow unauthenticated requests (for forks).
+ api_token = options[:api_token] || ''
+
+ warn "No API token given." if api_token.empty?
+
+ @client = Gitlab.client(
+ endpoint: options.fetch(:endpoint, API::DEFAULT_OPTIONS[:endpoint]),
+ private_token: api_token
+ )
+ end
+
+ def execute
+ raise NotImplementedError
+ end
+
+ private
+
+ attr_reader :project, :client
+end
diff --git a/scripts/api/cancel_pipeline.rb b/scripts/api/cancel_pipeline.rb
index 2667cfb9733..5069527368b 100755
--- a/scripts/api/cancel_pipeline.rb
+++ b/scripts/api/cancel_pipeline.rb
@@ -1,19 +1,13 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
-require 'gitlab'
require 'optparse'
-require_relative 'default_options'
+require_relative 'base'
-class CancelPipeline
+class CancelPipeline < Base
def initialize(options)
- @project = options.delete(:project)
+ super
@pipeline_id = options.delete(:pipeline_id)
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
- private_token: options.delete(:api_token)
- )
end
def execute
@@ -22,7 +16,7 @@ class CancelPipeline
private
- attr_reader :project, :pipeline_id, :client
+ attr_reader :pipeline_id
end
if $PROGRAM_NAME == __FILE__
diff --git a/scripts/api/commit_merge_requests.rb b/scripts/api/commit_merge_requests.rb
index 3cf8dc87497..523d2e769f0 100644
--- a/scripts/api/commit_merge_requests.rb
+++ b/scripts/api/commit_merge_requests.rb
@@ -1,22 +1,11 @@
# frozen_string_literal: true
-require 'gitlab'
-require_relative 'default_options'
+require_relative 'base'
-class CommitMergeRequests
+class CommitMergeRequests < Base
def initialize(options)
- @project = options.fetch(:project)
+ super
@sha = options.fetch(:sha)
-
- # If api_token is nil, it's set to '' to allow unauthenticated requests (for forks).
- api_token = options.fetch(:api_token, '')
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.fetch(:endpoint, API::DEFAULT_OPTIONS[:endpoint]),
- private_token: api_token
- )
end
def execute
@@ -25,5 +14,5 @@ class CommitMergeRequests
private
- attr_reader :project, :sha, :client
+ attr_reader :sha
end
diff --git a/scripts/api/create_issue.rb b/scripts/api/create_issue.rb
index 2117c285771..1c385ce41f2 100644
--- a/scripts/api/create_issue.rb
+++ b/scripts/api/create_issue.rb
@@ -1,29 +1,9 @@
# frozen_string_literal: true
-require 'gitlab'
-require_relative 'default_options'
-
-class CreateIssue
- def initialize(options)
- @project = options.fetch(:project)
-
- # Force the token to be a string so that if api_token is nil, it's set to '',
- # allowing unauthenticated requests (for forks).
- api_token = options.delete(:api_token).to_s
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
- private_token: api_token
- )
- end
+require_relative 'base'
+class CreateIssue < Base
def execute(issue_data)
client.create_issue(project, issue_data.delete(:title), issue_data)
end
-
- private
-
- attr_reader :project, :client
end
diff --git a/scripts/api/create_issue_discussion.rb b/scripts/api/create_issue_discussion.rb
index 74a9f3ae378..6471a5c2579 100644
--- a/scripts/api/create_issue_discussion.rb
+++ b/scripts/api/create_issue_discussion.rb
@@ -1,32 +1,12 @@
# frozen_string_literal: true
-require 'gitlab'
-require_relative 'default_options'
-
-class CreateIssueDiscussion
- def initialize(options)
- @project = options.fetch(:project)
-
- # Force the token to be a string so that if api_token is nil, it's set to '',
- # allowing unauthenticated requests (for forks).
- api_token = options.delete(:api_token).to_s
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
- private_token: api_token
- )
- end
+require_relative 'base'
+class CreateIssueDiscussion < Base
def execute(discussion_data)
client.post(
"/projects/#{client.url_encode project}/issues/#{discussion_data.delete(:issue_iid)}/discussions",
body: discussion_data
)
end
-
- private
-
- attr_reader :project, :client
end
diff --git a/scripts/api/find_issues.rb b/scripts/api/find_issues.rb
index a1c37030319..f74f815fba9 100644
--- a/scripts/api/find_issues.rb
+++ b/scripts/api/find_issues.rb
@@ -1,29 +1,9 @@
# frozen_string_literal: true
-require 'gitlab'
-require_relative 'default_options'
-
-class FindIssues
- def initialize(options)
- @project = options.fetch(:project)
-
- # Force the token to be a string so that if api_token is nil, it's set to '',
- # allowing unauthenticated requests (for forks).
- api_token = options.delete(:api_token).to_s
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
- private_token: api_token
- )
- end
+require_relative 'base'
+class FindIssues < Base
def execute(search_data)
client.issues(project, search_data)
end
-
- private
-
- attr_reader :project, :client
end
diff --git a/scripts/api/get_job_id.rb b/scripts/api/get_job_id.rb
index 12535106a4c..babe8f5dee0 100755
--- a/scripts/api/get_job_id.rb
+++ b/scripts/api/get_job_id.rb
@@ -1,11 +1,10 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
-require 'gitlab'
require 'optparse'
-require_relative 'default_options'
+require_relative 'base'
-class JobFinder
+class JobFinder < Base
DEFAULT_OPTIONS = API::DEFAULT_OPTIONS.merge(
pipeline_query: {}.freeze,
job_query: {}.freeze
@@ -13,22 +12,12 @@ class JobFinder
MAX_PIPELINES_TO_ITERATE = 20
def initialize(options)
- @project = options.delete(:project)
+ super
@pipeline_query = options.delete(:pipeline_query) || DEFAULT_OPTIONS[:pipeline_query]
@job_query = options.delete(:job_query) || DEFAULT_OPTIONS[:job_query]
@pipeline_id = options.delete(:pipeline_id)
@job_name = options.delete(:job_name)
@artifact_path = options.delete(:artifact_path)
-
- # Force the token to be a string so that if api_token is nil, it's set to '', allowing unauthenticated requests (for forks).
- api_token = options.delete(:api_token).to_s
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || DEFAULT_OPTIONS[:endpoint],
- private_token: api_token
- )
end
def execute
@@ -37,7 +26,7 @@ class JobFinder
private
- attr_reader :project, :pipeline_query, :job_query, :pipeline_id, :job_name, :artifact_path, :client
+ attr_reader :pipeline_query, :job_query, :pipeline_id, :job_name, :artifact_path
def find_job_with_artifact
return if artifact_path.nil?
diff --git a/scripts/api/pipeline_failed_jobs.rb b/scripts/api/pipeline_failed_jobs.rb
index df9a7e76dcd..9012d48994f 100644
--- a/scripts/api/pipeline_failed_jobs.rb
+++ b/scripts/api/pipeline_failed_jobs.rb
@@ -1,25 +1,12 @@
# frozen_string_literal: true
-require 'gitlab'
+require_relative 'base'
-require_relative 'default_options'
-
-class PipelineFailedJobs
+class PipelineFailedJobs < Base
def initialize(options)
- @project = options.delete(:project)
+ super
@pipeline_id = options.delete(:pipeline_id)
@exclude_allowed_to_fail_jobs = options.delete(:exclude_allowed_to_fail_jobs)
-
- # Force the token to be a string so that if api_token is nil, it's set to '',
- # allowing unauthenticated requests (for forks).
- api_token = options.delete(:api_token).to_s
-
- warn "No API token given." if api_token.empty?
-
- @client = Gitlab.client(
- endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
- private_token: api_token
- )
end
def execute
@@ -43,5 +30,5 @@ class PipelineFailedJobs
private
- attr_reader :project, :pipeline_id, :exclude_allowed_to_fail_jobs, :client
+ attr_reader :pipeline_id, :exclude_allowed_to_fail_jobs
end
diff --git a/scripts/api/update_issue.rb b/scripts/api/update_issue.rb
new file mode 100644
index 00000000000..ce296ebc358
--- /dev/null
+++ b/scripts/api/update_issue.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'gitlab'
+require_relative 'default_options'
+
+class UpdateIssue
+ def initialize(options)
+ @project = options.fetch(:project)
+
+ # Force the token to be a string so that if api_token is nil, it's set to '',
+ # allowing unauthenticated requests (for forks).
+ api_token = options.delete(:api_token).to_s
+
+ warn "No API token given." if api_token.empty?
+
+ @client = Gitlab.client(
+ endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
+ private_token: api_token
+ )
+ end
+
+ def execute(issue_iid, issue_data)
+ client.edit_issue(project, issue_iid, issue_data)
+ end
+
+ private
+
+ attr_reader :project, :client
+end
diff --git a/scripts/database/schema_validator.rb b/scripts/database/schema_validator.rb
new file mode 100644
index 00000000000..11a53faa945
--- /dev/null
+++ b/scripts/database/schema_validator.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require_relative '../migration_schema_validator'
+
+class SchemaValidator < MigrationSchemaValidator
+ ALLOW_SCHEMA_CHANGES = 'ALLOW_SCHEMA_CHANGES'
+ COMMIT_MESSAGE_SKIP_TAG = 'skip-db-structure-check'
+
+ def validate!
+ return if should_skip?
+
+ return if schema_changes.empty?
+
+ die "#{FILENAME} was changed, and no migrations were added:\n#{schema_changes}" if committed_migrations.empty?
+ end
+
+ private
+
+ def schema_changes
+ @schema_changes ||= run("git diff #{diff_target} HEAD -- #{FILENAME}")
+ end
+
+ def should_skip?
+ skip_env_present? || skip_commit_present?
+ end
+
+ def skip_env_present?
+ !ENV[ALLOW_SCHEMA_CHANGES].to_s.empty?
+ end
+
+ def skip_commit_present?
+ run("git show -s --format=%B -n 1").to_s.include?(COMMIT_MESSAGE_SKIP_TAG)
+ end
+end
diff --git a/scripts/db_tasks b/scripts/db_tasks
index 36040877abf..645e46e02d9 100755
--- a/scripts/db_tasks
+++ b/scripts/db_tasks
@@ -5,6 +5,7 @@ require 'yaml'
rails_env = ENV.fetch('RAILS_ENV')
database_config = YAML.load_file(File.join(File.expand_path('..', __dir__), 'config', 'database.yml'))[rails_env]
+database_config.reject! { |_k, v| v["database_tasks"] == false }
task = ARGV.shift
raise ArgumentError, 'You need to pass a task name!' unless task
@@ -14,4 +15,4 @@ cmd = ['bundle', 'exec', 'rake', task, *ARGV]
puts "Running: `#{cmd.join(' ')}`"
-system(*cmd)
+exit 1 unless system(*cmd)
diff --git a/scripts/decomposition/generate-loose-foreign-key b/scripts/decomposition/generate-loose-foreign-key
index ad7d6e32aa0..1ea1728732b 100755
--- a/scripts/decomposition/generate-loose-foreign-key
+++ b/scripts/decomposition/generate-loose-foreign-key
@@ -110,15 +110,12 @@ def add_definition_to_yaml(definition)
content = YAML.load_file(Rails.root.join('config/gitlab_loose_foreign_keys.yml'))
table_definitions = content[definition.from_table]
- # insert new entry at random place to avoid conflicts
+ # insert new entry in alphabetic order
unless table_definitions
table_definitions = []
- insert_idx = rand(content.count+1)
- # insert at a given index in ordered hash
- content = content.to_a
- content.insert(insert_idx, [definition.from_table, table_definitions])
- content = content.to_h
+ content[definition.from_table] = table_definitions
+ content = content.sort.to_h
end
on_delete =
@@ -217,7 +214,7 @@ def add_test_to_specs(definition)
puts "Adding test to #{spec_path}..."
spec_test = <<-EOF.strip_heredoc.indent(2)
- context 'loose foreign key on #{definition.from_table}.#{definition.column}' do
+ context 'with loose foreign key on #{definition.from_table}.#{definition.column}' do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:#{definition.to_table.singularize}) }
let!(:model) { create(:#{definition.from_table.singularize}, #{definition.column.delete_suffix("_id").singularize}: parent) }
diff --git a/scripts/frontend/startup_css/constants.js b/scripts/frontend/startup_css/constants.js
index 5143c04dc37..e6ca4472fe3 100644
--- a/scripts/frontend/startup_css/constants.js
+++ b/scripts/frontend/startup_css/constants.js
@@ -52,11 +52,14 @@ const createMainOutput = ({ outFile, cssKeys, type }) => ({
path.join(FIXTURES_ROOT, `startup_css/project-${type}.html`),
path.join(FIXTURES_ROOT, `startup_css/project-${type}-signed-out.html`),
path.join(FIXTURES_ROOT, `startup_css/project-${type}-search-ff-off.html`),
+ path.join(FIXTURES_ROOT, `startup_css/project-${type}-super-sidebar.html`),
],
cssKeys,
purgeOptions: {
safelist: {
standard: [
+ 'page-with-super-sidebar',
+ 'page-with-super-sidebar-collapsed',
'page-with-icon-sidebar',
'sidebar-collapsed-desktop',
// We want to include the root dropdown-menu style since it should be hidden by default
diff --git a/scripts/generate-e2e-pipeline b/scripts/generate-e2e-pipeline
index c612a700f90..8ca6771bf1f 100755
--- a/scripts/generate-e2e-pipeline
+++ b/scripts/generate-e2e-pipeline
@@ -27,7 +27,7 @@ variables:
GIT_STRATEGY: "clone" # 'GIT_STRATEGY: clone' optimizes the pack-objects cache hit ratio
GIT_SUBMODULE_STRATEGY: "none"
GITLAB_QA_CACHE_KEY: "$qa_cache_key"
- GITLAB_VERSION: "$(cat VERSION)"
+ GITLAB_SEMVER_VERSION: "$(cat VERSION)"
QA_EXPORT_TEST_METRICS: "${QA_EXPORT_TEST_METRICS:-true}"
QA_FEATURE_FLAGS: "${QA_FEATURE_FLAGS}"
QA_FRAMEWORK_CHANGES: "${QA_FRAMEWORK_CHANGES:-false}"
@@ -41,6 +41,7 @@ YML
echo "Using .gitlab/ci/review-apps/main.gitlab-ci.yml and .gitlab/ci/package-and-test/main.gitlab-ci.yml"
cp .gitlab/ci/review-apps/main.gitlab-ci.yml "$REVIEW_PIPELINE_YML"
echo "$variables" >>"$REVIEW_PIPELINE_YML"
+
cp .gitlab/ci/package-and-test/main.gitlab-ci.yml "$OMNIBUS_PIPELINE_YML"
echo "$variables" >>"$OMNIBUS_PIPELINE_YML"
diff --git a/scripts/generate-rspec-foss-impact-pipeline b/scripts/generate-rspec-foss-impact-pipeline
deleted file mode 100755
index 3277f38ebe1..00000000000
--- a/scripts/generate-rspec-foss-impact-pipeline
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env bash
-set -euo pipefail
-
-# Script to generate `rspec foss-impact` test child pipeline with dynamically parallelized jobs.
-
-source scripts/utils.sh
-
-rspec_matching_tests_foss_path="${1}"
-pipeline_yml="${2}"
-
-test_file_count=$(wc -w "${rspec_matching_tests_foss_path}" | awk '{ print $1 }')
-echoinfo "test_file_count: ${test_file_count}"
-
-if [[ "${test_file_count}" -eq 0 ]]; then
- skip_pipeline=".gitlab/ci/_skip.yml"
-
- echo "Using ${skip_pipeline} due to no impacted FOSS rspec tests to run"
- cp $skip_pipeline "$pipeline_yml"
- exit
-fi
-
-# As of 2022-09-01:
-# $ find spec -type f | wc -l
-# 12825
-# and
-# $ find ee/spec -type f | wc -l
-# 5610
-# which gives a total of 18435 test files (`number_of_tests_in_total_in_the_test_suite`).
-#
-# Total time to run all tests (based on https://gitlab-org.gitlab.io/rspec_profiling_stats/) is 170183 seconds (`duration_of_the_test_suite_in_seconds`).
-#
-# This gives an approximate 170183 / 18435 = 9.2 seconds per test file (`average_test_file_duration_in_seconds`).
-#
-# If we want each test job to finish in 10 minutes, given we have 3 minutes of setup (`setup_duration_in_seconds`), then we need to give 7 minutes of testing to each test node (`optimal_test_runtime_duration_in_seconds`).
-# (7 * 60) / 9.2 = 45.6
-#
-# So if we'd want to run the full test suites in 10 minutes (`optimal_test_job_duration_in_seconds`), we'd need to run at max 45 test file per nodes (`optimal_test_file_count_per_node`).
-number_of_tests_in_total_in_the_test_suite=18435
-duration_of_the_test_suite_in_seconds=170183
-optimal_test_job_duration_in_seconds=600 # 10 minutes
-setup_duration_in_seconds=180 # 3 minutes
-
-optimal_test_runtime_duration_in_seconds=$(( optimal_test_job_duration_in_seconds - setup_duration_in_seconds ))
-echoinfo "optimal_test_runtime_duration_in_seconds: ${optimal_test_runtime_duration_in_seconds}"
-
-average_test_file_duration_in_seconds=$(( duration_of_the_test_suite_in_seconds / number_of_tests_in_total_in_the_test_suite ))
-echoinfo "average_test_file_duration_in_seconds: ${average_test_file_duration_in_seconds}"
-
-optimal_test_file_count_per_node=$(( optimal_test_runtime_duration_in_seconds / average_test_file_duration_in_seconds ))
-echoinfo "optimal_test_file_count_per_node: ${optimal_test_file_count_per_node}"
-
-node_count=$(( test_file_count / optimal_test_file_count_per_node ))
-echoinfo "node_count: ${node_count}"
-
-echoinfo "Optimal node count for 'rspec foss-impact' jobs is ${node_count}."
-
-MAX_NODES_COUNT=50 # Maximum parallelization allowed by GitLab
-if [[ "${node_count}" -gt "${MAX_NODES_COUNT}" ]]; then
- echoinfo "We don't want to parallelize 'rspec foss-impact' to more than ${MAX_NODES_COUNT} jobs for now! Decreasing the parallelization to ${MAX_NODES_COUNT}."
- node_count=${MAX_NODES_COUNT}
-fi
-
-ruby -rerb -e "puts ERB.new(File.read('.gitlab/ci/rails/rspec-foss-impact.gitlab-ci.yml.erb')).result_with_hash(parallel_value: ${node_count})" > "${pipeline_yml}"
-
-echosuccess "Generated ${pipeline_yml} pipeline with following content:"
-cat "${pipeline_yml}"
diff --git a/scripts/generate_rspec_pipeline.rb b/scripts/generate_rspec_pipeline.rb
new file mode 100755
index 00000000000..e226acc0430
--- /dev/null
+++ b/scripts/generate_rspec_pipeline.rb
@@ -0,0 +1,176 @@
+#!/usr/bin/env ruby
+
+# frozen_string_literal: true
+
+require 'optparse'
+require 'json'
+require 'fileutils'
+require 'erb'
+require_relative '../tooling/quality/test_level'
+
+# Class to generate RSpec test child pipeline with dynamically parallelized jobs.
+class GenerateRspecPipeline
+ SKIP_PIPELINE_YML_FILE = ".gitlab/ci/_skip.yml"
+ TEST_LEVELS = %i[migration background_migration unit integration system].freeze
+ MAX_NODES_COUNT = 50 # Maximum parallelization allowed by GitLab
+
+ OPTIMAL_TEST_JOB_DURATION_IN_SECONDS = 600 # 10 MINUTES
+ SETUP_DURATION_IN_SECONDS = 180.0 # 3 MINUTES
+ OPTIMAL_TEST_RUNTIME_DURATION_IN_SECONDS = OPTIMAL_TEST_JOB_DURATION_IN_SECONDS - SETUP_DURATION_IN_SECONDS
+
+ # As of 2022-09-01:
+ # $ find spec -type f | wc -l
+ # 12825
+ # and
+ # $ find ee/spec -type f | wc -l
+ # 5610
+ # which gives a total of 18435 test files (`NUMBER_OF_TESTS_IN_TOTAL_IN_THE_TEST_SUITE`).
+ #
+ # Total time to run all tests (based on https://gitlab-org.gitlab.io/rspec_profiling_stats/)
+ # is 170183 seconds (`DURATION_OF_THE_TEST_SUITE_IN_SECONDS`).
+ #
+ # This gives an approximate 170183 / 18435 = 9.2 seconds per test file
+ # (`DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS`).
+ #
+ # If we want each test job to finish in 10 minutes, given we have 3 minutes of setup (`SETUP_DURATION_IN_SECONDS`),
+ # then we need to give 7 minutes of testing to each test node (`OPTIMAL_TEST_RUNTIME_DURATION_IN_SECONDS`).
+ # (7 * 60) / 9.2 = 45.6
+ #
+ # So if we'd want to run the full test suites in 10 minutes (`OPTIMAL_TEST_JOB_DURATION_IN_SECONDS`),
+ # we'd need to run at max 45 test file per nodes (`#optimal_test_file_count_per_node_per_test_level`).
+ NUMBER_OF_TESTS_IN_TOTAL_IN_THE_TEST_SUITE = 18_435
+ DURATION_OF_THE_TEST_SUITE_IN_SECONDS = 170_183
+ DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS =
+ DURATION_OF_THE_TEST_SUITE_IN_SECONDS / NUMBER_OF_TESTS_IN_TOTAL_IN_THE_TEST_SUITE
+
+ # rspec_files_path: A file containing RSpec files to run, separated by a space
+ # pipeline_template_path: A YAML pipeline configuration template to generate the final pipeline config from
+ def initialize(pipeline_template_path:, rspec_files_path: nil, knapsack_report_path: nil)
+ @pipeline_template_path = pipeline_template_path.to_s
+ @rspec_files_path = rspec_files_path.to_s
+ @knapsack_report_path = knapsack_report_path.to_s
+
+ raise ArgumentError unless File.exist?(@pipeline_template_path)
+ end
+
+ def generate!
+ if all_rspec_files.empty?
+ info "Using #{SKIP_PIPELINE_YML_FILE} due to no RSpec files to run"
+ FileUtils.cp(SKIP_PIPELINE_YML_FILE, pipeline_filename)
+ return
+ end
+
+ File.open(pipeline_filename, 'w') do |handle|
+ pipeline_yaml = ERB.new(File.read(pipeline_template_path)).result_with_hash(**erb_binding)
+ handle.write(pipeline_yaml.squeeze("\n").strip)
+ end
+ end
+
+ private
+
+ attr_reader :pipeline_template_path, :rspec_files_path, :knapsack_report_path
+
+ def info(text)
+ $stdout.puts "[#{self.class.name}] #{text}"
+ end
+
+ def all_rspec_files
+ @all_rspec_files ||= File.exist?(rspec_files_path) ? File.read(rspec_files_path).split(' ') : []
+ end
+
+ def pipeline_filename
+ @pipeline_filename ||= "#{pipeline_template_path}.yml"
+ end
+
+ def erb_binding
+ { rspec_files_per_test_level: rspec_files_per_test_level }
+ end
+
+ def rspec_files_per_test_level
+ @rspec_files_per_test_level ||= begin
+ all_remaining_rspec_files = all_rspec_files.dup
+ TEST_LEVELS.each_with_object(Hash.new { |h, k| h[k] = {} }) do |test_level, memo| # rubocop:disable Rails/IndexWith
+ memo[test_level][:files] = all_remaining_rspec_files
+ .grep(Quality::TestLevel.new.regexp(test_level))
+ .tap { |files| files.each { |file| all_remaining_rspec_files.delete(file) } }
+ memo[test_level][:parallelization] = optimal_nodes_count(test_level, memo[test_level][:files])
+ end
+ end
+ end
+
+ def optimal_nodes_count(test_level, rspec_files)
+ nodes_count = (rspec_files.size / optimal_test_file_count_per_node_per_test_level(test_level)).ceil
+ info "Optimal node count for #{rspec_files.size} #{test_level} RSpec files is #{nodes_count}."
+
+ if nodes_count > MAX_NODES_COUNT
+ info "We don't want to parallelize to more than #{MAX_NODES_COUNT} jobs for now! " \
+ "Decreasing the parallelization to #{MAX_NODES_COUNT}."
+
+ MAX_NODES_COUNT
+ else
+ nodes_count
+ end
+ end
+
+ def optimal_test_file_count_per_node_per_test_level(test_level)
+ [
+ (OPTIMAL_TEST_RUNTIME_DURATION_IN_SECONDS / average_test_file_duration_in_seconds_per_test_level[test_level]),
+ 1
+ ].max
+ end
+
+ def average_test_file_duration_in_seconds_per_test_level
+ @optimal_test_file_count_per_node_per_test_level ||=
+ if knapsack_report.any?
+ remaining_knapsack_report = knapsack_report.dup
+ TEST_LEVELS.each_with_object({}) do |test_level, memo|
+ matching_data_per_test_level = remaining_knapsack_report
+ .select { |test_file, _| test_file.match?(Quality::TestLevel.new.regexp(test_level)) }
+ .tap { |test_data| test_data.each { |file, _| remaining_knapsack_report.delete(file) } }
+ memo[test_level] =
+ matching_data_per_test_level.values.sum / matching_data_per_test_level.keys.size
+ end
+ else
+ TEST_LEVELS.each_with_object({}) do |test_level, memo| # rubocop:disable Rails/IndexWith
+ memo[test_level] = DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS
+ end
+ end
+ end
+
+ def knapsack_report
+ @knapsack_report ||=
+ begin
+ File.exist?(knapsack_report_path) ? JSON.parse(File.read(knapsack_report_path)) : {}
+ rescue JSON::ParserError => e
+ info "[ERROR] Knapsack report at #{knapsack_report_path} couldn't be parsed! Error:\n#{e}"
+ {}
+ end
+ end
+end
+
+if $PROGRAM_NAME == __FILE__
+ options = {}
+
+ OptionParser.new do |opts|
+ opts.on("-f", "--rspec-files-path path", String, "Path to a file containing RSpec files to run, " \
+ "separated by a space") do |value|
+ options[:rspec_files_path] = value
+ end
+
+ opts.on("-t", "--pipeline-template-path PATH", String, "Path to a YAML pipeline configuration template to " \
+ "generate the final pipeline config from") do |value|
+ options[:pipeline_template_path] = value
+ end
+
+ opts.on("-k", "--knapsack-report-path path", String, "Path to a Knapsack report") do |value|
+ options[:knapsack_report_path] = value
+ end
+
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+ end.parse!
+
+ GenerateRspecPipeline.new(**options).generate!
+end
diff --git a/scripts/gitlab_component_helpers.sh b/scripts/gitlab_component_helpers.sh
index c46dbb57a58..309e339de01 100644
--- a/scripts/gitlab_component_helpers.sh
+++ b/scripts/gitlab_component_helpers.sh
@@ -51,6 +51,24 @@ export GITLAB_ASSETS_HASH="${GITLAB_ASSETS_HASH:-"NO_HASH"}"
export GITLAB_ASSETS_PACKAGE="assets-${NODE_ENV}-${GITLAB_EDITION}-${GITLAB_ASSETS_HASH}-${GITLAB_ASSETS_PACKAGE_VERSION}.tar.gz"
export GITLAB_ASSETS_PACKAGE_URL="${API_PACKAGES_BASE_URL}/assets/${NODE_ENV}-${GITLAB_EDITION}-${GITLAB_ASSETS_HASH}/${GITLAB_ASSETS_PACKAGE}"
+# Fixtures constants
+
+# Export the SHA variable for updating/downloading fixture packages, using the following order of precedence:
+# 1. If MERGE_BASE_SHA is defined, use its value.
+# 2. If CI_MERGE_REQUEST_SOURCE_BRANCH_SHA is defined, use its value for merge request pipelines.
+# 3. Otherwise, use the value of CI_COMMIT_SHA for default branch pipelines or merge requests with detached pipelines.
+if [ -n "${MERGE_BASE_SHA:-}" ]; then
+ export FIXTURES_SHA="${MERGE_BASE_SHA}"
+elif [ -n "${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA:-}" ]; then
+ export FIXTURES_SHA="${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}"
+else
+ export FIXTURES_SHA="${CI_COMMIT_SHA}"
+fi
+
+export FIXTURES_PATH="tmp/tests/frontend/**/*"
+export FIXTURES_PACKAGE="fixtures-${FIXTURES_SHA}.tar.gz"
+export FIXTURES_PACKAGE_URL="${API_PACKAGES_BASE_URL}/fixtures/${FIXTURES_SHA}/${FIXTURES_PACKAGE}"
+
# Generic helper functions
function archive_doesnt_exist() {
local package_url="${1}"
@@ -147,3 +165,16 @@ function create_gitlab_assets_package() {
function upload_gitlab_assets_package() {
upload_package "${GITLAB_ASSETS_PACKAGE}" "${GITLAB_ASSETS_PACKAGE_URL}"
}
+
+# Fixtures functions
+function fixtures_archive_doesnt_exist() {
+ archive_doesnt_exist "${FIXTURES_PACKAGE_URL}"
+}
+
+function create_fixtures_package() {
+ create_package "${FIXTURES_PACKAGE}" "${FIXTURES_PATH}"
+}
+
+function upload_fixtures_package() {
+ upload_package "${FIXTURES_PACKAGE}" "${FIXTURES_PACKAGE_URL}"
+}
diff --git a/scripts/lint-docs-blueprints.rb b/scripts/lint-docs-blueprints.rb
index 35e0013cb34..d0a0a6a05de 100755
--- a/scripts/lint-docs-blueprints.rb
+++ b/scripts/lint-docs-blueprints.rb
@@ -22,7 +22,7 @@ def extract_front_matter(path)
end
class BlueprintFrontMatter
- STATUSES = %w[proposed accepted ongoing implemented rejected]
+ STATUSES = %w[proposed accepted ongoing implemented postponed rejected]
attr_reader :errors
@@ -32,6 +32,8 @@ class BlueprintFrontMatter
end
def validate
+ return if @metadata['redirect_to']
+
validate_status
validate_authors
validate_creation_date
diff --git a/scripts/pipeline/create_test_failure_issues.rb b/scripts/pipeline/create_test_failure_issues.rb
new file mode 100755
index 00000000000..6312d392760
--- /dev/null
+++ b/scripts/pipeline/create_test_failure_issues.rb
@@ -0,0 +1,224 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'optparse'
+require 'json'
+require 'httparty'
+
+require_relative '../api/create_issue'
+require_relative '../api/find_issues'
+require_relative '../api/update_issue'
+
+class CreateTestFailureIssues
+ DEFAULT_OPTIONS = {
+ project: nil,
+ tests_report_file: 'tests_report.json',
+ issue_json_folder: 'tmp/issues/'
+ }.freeze
+
+ def initialize(options)
+ @options = options
+ end
+
+ def execute
+ puts "[CreateTestFailureIssues] No failed tests!" if failed_tests.empty?
+
+ failed_tests.each_with_object([]) do |failed_test, existing_issues|
+ CreateTestFailureIssue.new(options.dup).comment_or_create(failed_test, existing_issues).tap do |issue|
+ existing_issues << issue
+ File.write(File.join(options[:issue_json_folder], "issue-#{issue.iid}.json"), JSON.pretty_generate(issue.to_h))
+ end
+ end
+ end
+
+ private
+
+ attr_reader :options
+
+ def failed_tests
+ @failed_tests ||=
+ if File.exist?(options[:tests_report_file])
+ JSON.parse(File.read(options[:tests_report_file]))
+ else
+ puts "[CreateTestFailureIssues] #{options[:tests_report_file]} doesn't exist!"
+ []
+ end
+ end
+end
+
+class CreateTestFailureIssue
+ MAX_TITLE_LENGTH = 255
+ WWW_GITLAB_COM_SITE = 'https://about.gitlab.com'
+ WWW_GITLAB_COM_GROUPS_JSON = "#{WWW_GITLAB_COM_SITE}/groups.json".freeze
+ WWW_GITLAB_COM_CATEGORIES_JSON = "#{WWW_GITLAB_COM_SITE}/categories.json".freeze
+ FEATURE_CATEGORY_METADATA_REGEX = /(?<=feature_category: :)\w+/
+ DEFAULT_LABELS = ['type::maintenance', 'failure::flaky-test'].freeze
+
+ def initialize(options)
+ @project = options.delete(:project)
+ @api_token = options.delete(:api_token)
+ end
+
+ def comment_or_create(failed_test, existing_issues = [])
+ existing_issue = find(failed_test, existing_issues)
+
+ if existing_issue
+ update_reports(existing_issue, failed_test)
+ existing_issue
+ else
+ create(failed_test)
+ end
+ end
+
+ def find(failed_test, existing_issues = [])
+ failed_test_issue_title = failed_test_issue_title(failed_test)
+ issue_from_existing_issues = existing_issues.find { |issue| issue.title == failed_test_issue_title }
+ issue_from_issue_tracker = FindIssues
+ .new(project: project, api_token: api_token)
+ .execute(state: 'opened', search: failed_test_issue_title)
+ .first
+
+ existing_issue = issue_from_existing_issues || issue_from_issue_tracker
+
+ return unless existing_issue
+
+ puts "[CreateTestFailureIssue] Found issue '#{existing_issue.title}': #{existing_issue.web_url}!"
+
+ existing_issue
+ end
+
+ def update_reports(existing_issue, failed_test)
+ new_issue_description = "#{existing_issue.description}\n- #{failed_test['job_url']} (#{ENV['CI_PIPELINE_URL']})"
+ UpdateIssue
+ .new(project: project, api_token: api_token)
+ .execute(existing_issue.iid, description: new_issue_description)
+ puts "[CreateTestFailureIssue] Added a report in '#{existing_issue.title}': #{existing_issue.web_url}!"
+ end
+
+ def create(failed_test)
+ payload = {
+ title: failed_test_issue_title(failed_test),
+ description: failed_test_issue_description(failed_test),
+ labels: failed_test_issue_labels(failed_test)
+ }
+
+ CreateIssue.new(project: project, api_token: api_token).execute(payload).tap do |issue|
+ puts "[CreateTestFailureIssue] Created issue '#{issue.title}': #{issue.web_url}!"
+ end
+ end
+
+ private
+
+ attr_reader :project, :api_token
+
+ def failed_test_id(failed_test)
+ Digest::SHA256.hexdigest(search_safe(failed_test['name']))[0...12]
+ end
+
+ def failed_test_issue_title(failed_test)
+ title = "#{failed_test['file']} - ID: #{failed_test_id(failed_test)}"
+
+ raise "Title is too long!" if title.size > MAX_TITLE_LENGTH
+
+ title
+ end
+
+ def failed_test_issue_description(failed_test)
+ <<~DESCRIPTION
+ ### Full description
+
+ `#{search_safe(failed_test['name'])}`
+
+ ### File path
+
+ `#{failed_test['file']}`
+
+ <!-- Don't add anything after the report list since it's updated automatically -->
+ ### Reports
+
+ - #{failed_test['job_url']} (#{ENV['CI_PIPELINE_URL']})
+ DESCRIPTION
+ end
+
+ def failed_test_issue_labels(failed_test)
+ labels = DEFAULT_LABELS + category_and_group_labels_for_test_file(failed_test['file'])
+
+ # make sure we don't spam people who are notified to actual labels
+ labels.map { |label| "wip-#{label}" }
+ end
+
+ def category_and_group_labels_for_test_file(test_file)
+ feature_categories = File.open(File.expand_path(File.join('..', '..', test_file), __dir__))
+ .read
+ .scan(FEATURE_CATEGORY_METADATA_REGEX)
+
+ category_labels = feature_categories.filter_map { |category| categories_mapping.dig(category, 'label') }.uniq
+
+ groups = feature_categories.filter_map { |category| categories_mapping.dig(category, 'group') }
+ group_labels = groups.map { |group| groups_mapping.dig(group, 'label') }.uniq
+
+ (category_labels + [group_labels.first]).compact
+ end
+
+ def categories_mapping
+ @categories_mapping ||= self.class.fetch_json(WWW_GITLAB_COM_CATEGORIES_JSON)
+ end
+
+ def groups_mapping
+ @groups_mapping ||= self.class.fetch_json(WWW_GITLAB_COM_GROUPS_JSON)
+ end
+
+ def search_safe(value)
+ value.delete('"')
+ end
+
+ def self.fetch_json(json_url)
+ json = with_retries { HTTParty.get(json_url, format: :plain) } # rubocop:disable Gitlab/HTTParty
+ JSON.parse(json)
+ end
+
+ def self.with_retries(attempts: 3)
+ yield
+ rescue Errno::ECONNRESET, OpenSSL::SSL::SSLError, Net::OpenTimeout
+ retry if (attempts -= 1) > 0
+ raise
+ end
+ private_class_method :with_retries
+end
+
+if $PROGRAM_NAME == __FILE__
+ options = CreateTestFailureIssues::DEFAULT_OPTIONS.dup
+
+ OptionParser.new do |opts|
+ opts.on("-p", "--project PROJECT", String,
+ "Project where to create the issue (defaults to " \
+ "`#{CreateTestFailureIssues::DEFAULT_OPTIONS[:project]}`)") do |value|
+ options[:project] = value
+ end
+
+ opts.on("-r", "--tests-report-file file_path", String,
+ "Path to a JSON file which contains the current pipeline's tests report (defaults to " \
+ "`#{CreateTestFailureIssues::DEFAULT_OPTIONS[:tests_report_file]}`)"
+ ) do |value|
+ options[:tests_report_file] = value
+ end
+
+ opts.on("-f", "--issues-json-folder file_path", String,
+ "Path to a folder where to save the issues JSON data (defaults to " \
+ "`#{CreateTestFailureIssues::DEFAULT_OPTIONS[:issue_json_folder]}`)") do |value|
+ options[:issue_json_folder] = value
+ end
+
+ opts.on("-t", "--api-token API_TOKEN", String,
+ "A valid Project token with the `Reporter` role and `api` scope to create the issue") do |value|
+ options[:api_token] = value
+ end
+
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+ end.parse!
+
+ CreateTestFailureIssues.new(options).execute
+end
diff --git a/scripts/pipeline_test_report_builder.rb b/scripts/pipeline_test_report_builder.rb
index 6f69a5c692f..c84acf2fd94 100755
--- a/scripts/pipeline_test_report_builder.rb
+++ b/scripts/pipeline_test_report_builder.rb
@@ -19,7 +19,8 @@ require_relative 'api/default_options'
# Push into expected format for failed tests
class PipelineTestReportBuilder
DEFAULT_OPTIONS = {
- target_project: Host::DEFAULT_OPTIONS[:target_project],
+ target_project: Host::DEFAULT_OPTIONS[:target_project] || API::DEFAULT_OPTIONS[:project],
+ current_pipeline_id: API::DEFAULT_OPTIONS[:pipeline_id],
mr_iid: Host::DEFAULT_OPTIONS[:mr_iid],
api_endpoint: API::DEFAULT_OPTIONS[:endpoint],
output_file_path: 'test_results/test_reports.json',
@@ -28,6 +29,7 @@ class PipelineTestReportBuilder
def initialize(options)
@target_project = options.delete(:target_project)
+ @current_pipeline_id = options.delete(:current_pipeline_id)
@mr_iid = options.delete(:mr_iid)
@api_endpoint = options.delete(:api_endpoint).to_s
@output_file_path = options.delete(:output_file_path).to_s
@@ -47,7 +49,7 @@ class PipelineTestReportBuilder
end
def latest_pipeline
- pipelines_sorted_descending[0]
+ fetch("#{target_project_api_base_url}/pipelines/#{current_pipeline_id}")
end
def previous_pipeline
@@ -58,6 +60,8 @@ class PipelineTestReportBuilder
private
+ attr_reader :target_project, :current_pipeline_id, :mr_iid, :api_endpoint, :output_file_path, :pipeline_index
+
def pipeline
@pipeline ||=
case pipeline_index
@@ -76,8 +80,6 @@ class PipelineTestReportBuilder
pipelines_for_mr.sort_by { |a| -a['id'] }
end
- attr_reader :target_project, :mr_iid, :api_endpoint, :output_file_path, :pipeline_index
-
def pipeline_project_api_base_url(pipeline)
"#{api_endpoint}/projects/#{pipeline['project_id']}"
end
diff --git a/scripts/prepare_build.sh b/scripts/prepare_build.sh
index 500e61ab76a..ca3dd0eec57 100644
--- a/scripts/prepare_build.sh
+++ b/scripts/prepare_build.sh
@@ -14,7 +14,13 @@ if [ "$DECOMPOSED_DB" == "true" ]; then
echo "Using decomposed database config (config/database.yml.decomposed-postgresql)"
cp config/database.yml.decomposed-postgresql config/database.yml
else
+ echo "Using decomposed database config (config/database.yml.postgresql)"
cp config/database.yml.postgresql config/database.yml
+
+ if [ "$CI_CONNECTION_DB" == "true" ]; then
+ echo "Enabling ci connection (database_tasks: false) in config/database.yml"
+ sed -i '/ci:/,/geo:/''s/^ # / /g' config/database.yml
+ fi
fi
# Set up Geo database if the job name matches `rspec-ee` or `geo`.
diff --git a/scripts/review_apps/automated_cleanup.rb b/scripts/review_apps/automated_cleanup.rb
index 7e606b74de9..5fff7f4ff88 100755
--- a/scripts/review_apps/automated_cleanup.rb
+++ b/scripts/review_apps/automated_cleanup.rb
@@ -36,8 +36,6 @@ module ReviewApps
@api_endpoint = api_endpoint
@dry_run = options[:dry_run]
@environments_not_found_count = 0
-
- puts "Dry-run mode." if dry_run
end
def gitlab
@@ -65,6 +63,7 @@ module ReviewApps
end
def perform_gitlab_environment_cleanup!(days_for_delete:)
+ puts "Dry-run mode." if dry_run
puts "Checking for Review Apps not updated in the last #{days_for_delete} days..."
checked_environments = []
@@ -106,6 +105,7 @@ module ReviewApps
end
def perform_gitlab_docs_environment_cleanup!(days_for_stop:, days_for_delete:)
+ puts "Dry-run mode." if dry_run
puts "Checking for Docs Review Apps not updated in the last #{days_for_stop} days..."
checked_environments = []
@@ -140,6 +140,7 @@ module ReviewApps
end
def perform_helm_releases_cleanup!(days:)
+ puts "Dry-run mode." if dry_run
puts "Checking for Helm releases that are failed or not updated in the last #{days} days..."
threshold = threshold_time(days: days)
@@ -162,12 +163,14 @@ module ReviewApps
end
def perform_stale_namespace_cleanup!(days:)
+ puts "Dry-run mode." if dry_run
kubernetes_client = Tooling::KubernetesClient.new(namespace: nil)
kubernetes_client.cleanup_review_app_namespaces(created_before: threshold_time(days: days), wait: false) unless dry_run
end
def perform_stale_pvc_cleanup!(days:)
+ puts "Dry-run mode." if dry_run
kubernetes.cleanup_by_created_at(resource_type: 'pvc', created_before: threshold_time(days: days), wait: false) unless dry_run
end
@@ -243,6 +246,7 @@ module ReviewApps
unless dry_run
helm.delete(release_name: releases_names)
kubernetes.cleanup_by_release(release_name: releases_names, wait: false)
+ kubernetes.delete_namespaces_by_exact_names(resource_names: releases_names, wait: false)
end
rescue Tooling::Helm3Client::CommandFailedError => ex
@@ -256,7 +260,11 @@ module ReviewApps
end
def threshold_time(days:)
- Time.now - days * 24 * 3600
+ days_integer = days.to_i
+
+ raise "days should be an integer between 1 and 365 inclusive! Got #{days_integer}" unless days_integer.between?(1, 365)
+
+ Time.now - days_integer * 24 * 3600
end
def ignore_exception?(exception_message, exceptions_ignored)
diff --git a/scripts/setup-test-env b/scripts/setup-test-env
index 97762e1cafa..ae00b569ce3 100755
--- a/scripts/setup-test-env
+++ b/scripts/setup-test-env
@@ -4,66 +4,49 @@
require_relative '../config/bundler_setup'
+require_relative '../spec/rails_autoload'
+
require 'request_store'
require 'rake'
-require 'active_support/dependencies'
-require 'active_support/dependencies/autoload'
-require 'active_support/core_ext/numeric'
-require 'active_support/string_inquirer'
+require 'active_support/all'
ENV['SKIP_RAILS_ENV_IN_RAKE'] = 'true'
-module Rails
- extend self
-
- def root
- Pathname.new(File.expand_path('..', __dir__))
- end
-
- def env
- @_env ||= ActiveSupport::StringInquirer.new(ENV["RAILS_ENV"] || ENV["RACK_ENV"] || "test")
- end
-end
-
-ActiveSupport::Dependencies.autoload_paths << 'lib'
-
load File.expand_path('../lib/tasks/gitlab/helpers.rake', __dir__)
load File.expand_path('../lib/tasks/gitlab/gitaly.rake', __dir__)
-# Required for config/0_inject_enterprise_edition_module.rb, lib/gitlab/access.rb
-require_dependency File.expand_path('../lib/gitlab', __dir__)
-
-require_dependency File.expand_path('../config/initializers/0_inject_enterprise_edition_module', __dir__)
-
-# Require for lib/gitlab/gitaly_client/storage_settings.rb and config/initializers/1_settings.rb
-require 'active_support/hash_with_indifferent_access'
-
-# Required for lib/gitlab/visibility_level.rb and lib/gitlab/safe_request_store.rb
-require 'active_support/concern'
-require 'active_support/core_ext/module/delegation'
-
# Required for lib/system_check/helpers.rb
-require_dependency File.expand_path('../lib/gitlab/task_helpers', __dir__)
+require_relative '../lib/gitlab/task_helpers'
# Required for lib/tasks/gitlab/helpers.rake
-require_dependency File.expand_path('../lib/system_check/helpers', __dir__)
+require_relative '../lib/system_check/helpers'
# Required for config/initializers/1_settings.rb
require 'omniauth'
require 'omniauth-github'
require 'etc'
-require_dependency File.expand_path('../lib/gitlab/access', __dir__)
-require_dependency File.expand_path('../lib/gitlab/utils', __dir__)
+require_relative '../lib/gitlab/access'
+require_relative '../lib/gitlab/utils'
+
+unless defined?(License)
+ # This is needed to allow use of `Gitlab::ImportSources.values` in `1_settings.rb`.
+ # See ee/lib/ee/gitlab/import_sources.rb
+ class License
+ def self.database
+ Struct.new(:cached_table_exists?).new(false)
+ end
+ end
+end
-require_dependency File.expand_path('../config/initializers/1_settings', __dir__)
+require_relative '../config/initializers/1_settings'
Gitlab.ee do
load File.expand_path('../ee/lib/tasks/gitlab/indexer.rake', __dir__)
- require_dependency File.expand_path('../ee/lib/gitlab/elastic/indexer', __dir__)
- require_dependency File.expand_path('../lib/gitlab/utils/override', __dir__)
+ require_relative '../ee/lib/gitlab/elastic/indexer'
+ require_relative '../lib/gitlab/utils/override'
end
-require_dependency File.expand_path('../spec/support/helpers/test_env', __dir__)
+require_relative '../spec/support/helpers/test_env'
TestEnv.init
diff --git a/scripts/trigger-build.rb b/scripts/trigger-build.rb
index 69eea7488fb..c7c09557ff9 100755
--- a/scripts/trigger-build.rb
+++ b/scripts/trigger-build.rb
@@ -278,6 +278,7 @@ module Trigger
def extra_variables
{
"BRANCH_#{project_slug.upcase}" => ENV['CI_COMMIT_REF_NAME'],
+ "MERGE_REQUEST_IID_#{project_slug.upcase}" => ENV['CI_MERGE_REQUEST_IID'],
"REVIEW_SLUG" => review_slug
}
end
diff --git a/scripts/utils.sh b/scripts/utils.sh
index 55005d0abff..df8a5825dab 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -1,10 +1,19 @@
function retry() {
+ retry_times_sleep 2 3 "$@"
+}
+
+function retry_times_sleep() {
+ number_of_retries="$1"
+ shift
+ sleep_seconds="$1"
+ shift
+
if eval "$@"; then
return 0
fi
- for i in 2 1; do
- sleep 3s
+ for i in $(seq "${number_of_retries}" -1 1); do
+ sleep "$sleep_seconds"s
echo "[$(date '+%H:%M:%S')] Retrying $i..."
if eval "$@"; then
return 0
@@ -32,6 +41,7 @@ function retry_exponential() {
return 0
fi
done
+
return 1
}
diff --git a/scripts/validate_schema_changes b/scripts/validate_schema_changes
new file mode 100755
index 00000000000..a6a01a060ce
--- /dev/null
+++ b/scripts/validate_schema_changes
@@ -0,0 +1,7 @@
+#!/usr/bin/env ruby
+
+# frozen_string_literal: true
+
+require_relative './database/schema_validator'
+
+SchemaValidator.new.validate!