summaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-04-20 11:43:17 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2023-04-20 11:43:17 +0000
commitdfc94207fec2d84314b1a5410cface22e8b369bd (patch)
treec54022f61ced104305889a64de080998a0dc773b /scripts
parentb874efeff674f6bf0355d5d242ecf81c6f7155df (diff)
downloadgitlab-ce-dfc94207fec2d84314b1a5410cface22e8b369bd.tar.gz
Add latest changes from gitlab-org/gitlab@15-11-stable-eev15.11.0-rc42
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/build_assets_image2
-rwxr-xr-xscripts/create-pipeline-failure-incident.rb272
-rwxr-xr-xscripts/decomposition/generate-loose-foreign-key5
-rwxr-xr-xscripts/failed_tests.rb4
-rw-r--r--scripts/frontend/extract_gettext_all.js10
-rwxr-xr-xscripts/frontend/po_to_json.js208
-rw-r--r--scripts/frontend/startup_css/constants.js1
-rwxr-xr-xscripts/generate-e2e-pipeline3
-rwxr-xr-xscripts/generate-failed-pipeline-slack-message.rb181
-rwxr-xr-xscripts/generate_rspec_pipeline.rb55
-rwxr-xr-xscripts/gitaly-test-build7
-rwxr-xr-xscripts/gitaly-test-spawn2
-rw-r--r--scripts/gitlab_component_helpers.sh82
-rwxr-xr-xscripts/packages/automated_cleanup.rb4
-rwxr-xr-xscripts/pipeline/create_test_failure_issues.rb81
-rwxr-xr-xscripts/review_apps/automated_cleanup.rb58
-rw-r--r--scripts/review_apps/base-config.yaml87
-rwxr-xr-xscripts/review_apps/k8s-resources-count-checks.sh9
-rwxr-xr-xscripts/review_apps/review-apps.sh92
-rw-r--r--scripts/rspec_helpers.sh15
-rw-r--r--scripts/utils.sh2
-rwxr-xr-xscripts/verify-tff-mapping19
22 files changed, 582 insertions, 617 deletions
diff --git a/scripts/build_assets_image b/scripts/build_assets_image
index ee8623c826e..00f21e5dede 100755
--- a/scripts/build_assets_image
+++ b/scripts/build_assets_image
@@ -29,7 +29,7 @@ fi
ASSETS_IMAGE_NAME="gitlab-assets-ce"
# `dev.gitlab-org` still has gitlab-ee.
-if [ "${CI_PROJECT_NAME}" = "gitlab" ] || [ "${CI_PROJECT_NAME}" = "gitlab-ee" ]; then
+if ([ "${CI_PROJECT_NAME}" = "gitlab" ] && [ "${FOSS_ONLY}" != "1" ]) || ([ "${CI_PROJECT_NAME}" = "gitlab-ee" ] && [ "${FOSS_ONLY}" != "1" ]); then
ASSETS_IMAGE_NAME="gitlab-assets-ee"
fi
diff --git a/scripts/create-pipeline-failure-incident.rb b/scripts/create-pipeline-failure-incident.rb
deleted file mode 100755
index bd57abf3740..00000000000
--- a/scripts/create-pipeline-failure-incident.rb
+++ /dev/null
@@ -1,272 +0,0 @@
-#!/usr/bin/env ruby
-
-# frozen_string_literal: true
-
-require 'optparse'
-require 'json'
-
-require_relative 'api/commit_merge_requests'
-require_relative 'api/create_issue'
-require_relative 'api/create_issue_discussion'
-require_relative 'api/pipeline_failed_jobs'
-
-class CreatePipelineFailureIncident
- DEFAULT_OPTIONS = {
- project: nil,
- incident_json_file: 'incident.json'
- }.freeze
- DEFAULT_LABELS = ['Engineering Productivity', 'master-broken::undetermined'].freeze
-
- def initialize(options)
- @project = options.delete(:project)
- @api_token = options.delete(:api_token)
- end
-
- def execute
- payload = {
- issue_type: 'incident',
- title: title,
- description: description,
- labels: incident_labels
- }
-
- payload[:assignee_ids] = assignee_ids if stable_branch_incident?
-
- CreateIssue.new(project: project, api_token: api_token).execute(payload).tap do |incident|
- CreateIssueDiscussion.new(project: project, api_token: api_token)
- .execute(issue_iid: incident.iid, body: "## Root Cause Analysis")
- CreateIssueDiscussion.new(project: project, api_token: api_token)
- .execute(issue_iid: incident.iid, body: "## Investigation Steps")
- end
- end
-
- private
-
- attr_reader :project, :api_token
-
- def stable_branch_incident?
- ENV['CI_COMMIT_REF_NAME'] =~ /^[\d-]+-stable(-ee)?$/
- end
-
- def failed_jobs
- @failed_jobs ||= PipelineFailedJobs.new(API::DEFAULT_OPTIONS.merge(exclude_allowed_to_fail_jobs: true)).execute
- end
-
- def merge_request
- @merge_request ||= CommitMergeRequests.new(
- API::DEFAULT_OPTIONS.merge(sha: ENV['CI_COMMIT_SHA'])
- ).execute.first
- end
-
- def now
- @now ||= Time.now.utc
- end
-
- def title
- @title ||= begin
- full_title = "#{now.strftime('%A %F %R UTC')} - `#{ENV['CI_PROJECT_PATH']}` " \
- "broken `#{ENV['CI_COMMIT_REF_NAME']}` with #{failed_jobs.map(&:name).join(', ')}"
-
- if full_title.size >= 255
- "#{full_title[...252]}..." # max title length is 255, and we add an elipsis
- else
- full_title
- end
- end
- end
-
- def description
- return broken_stable_description_content if stable_branch_incident?
-
- broken_master_description_content
- end
-
- def broken_master_description_content
- <<~MARKDOWN
- ## #{project_link} pipeline #{pipeline_link} failed
-
- **Branch: #{branch_link}**
-
- **Commit: #{commit_link}**
-
- **Triggered by** #{triggered_by_link} • **Source:** #{source} • **Duration:** #{pipeline_duration} minutes
-
- **Failed jobs (#{failed_jobs.size}):**
-
- #{failed_jobs_list}
-
- ### General guidelines
-
- Follow the [Broken `master` handbook guide](https://about.gitlab.com/handbook/engineering/workflow/#broken-master).
-
- ### Investigation
-
- **Be sure to fill the `Timeline` for this incident.**
-
- 1. If the failure is new, and looks like a potential flaky failure, you can retry the failing job.
- Make sure to mention the retry in the `Timeline` and leave a link to the retried job.
- 1. If the failure looks like a broken `master`, communicate the broken `master` in Slack using the "Broadcast Master Broken" workflow:
- - Click the Shortcut lightning bolt icon in the `#master-broken` channel and select "Broadcast Master Broken".
- - Click "Continue the broadcast" after the automated message in `#master-broken`.
-
- ### Pre-resolution
-
- If you believe that there's an easy resolution by either:
-
- - Reverting a particular merge request.
- - Making a quick fix (for example, one line or a few similar simple changes in a few lines).
- You can create a merge request, assign to any available maintainer, and ping people that were involved/related to the introduction of the failure.
- Additionally, a message can be posted in `#backend_maintainers` or `#frontend_maintainers` to get a maintainer take a look at the fix ASAP.
-
- In both cases, make sure to add the ~"pipeline:expedite" label, and `master:broken` or `master:foss-broken` label, to speed up the `master`-fixing pipelines.
-
- ### Resolution
-
- Follow [the Resolution steps from the handbook](https://about.gitlab.com/handbook/engineering/workflow/#responsibilities-of-the-resolution-dri).
- MARKDOWN
- end
-
- def broken_stable_description_content
- <<~MARKDOWN
- ## #{project_link} pipeline #{pipeline_link} failed
-
- **Branch: #{branch_link}**
-
- **Commit: #{commit_link}**
-
- **Merge Request: #{merge_request_link}**
-
- **Triggered by** #{triggered_by_link} • **Source:** #{source} • **Duration:** #{pipeline_duration} minutes
-
- **Failed jobs (#{failed_jobs.size}):**
-
- #{failed_jobs_list}
-
- ### General guidelines
-
- A broken stable branch prevents patch releases from being built.
- Fixing the pipeline is a priority to prevent any delays in releases.
-
- The process in the [Broken `master` handbook guide](https://about.gitlab.com/handbook/engineering/workflow/#broken-master) can be referenced since much of that process also applies here.
-
- ### Investigation
-
- **Be sure to fill the `Timeline` for this incident.**
-
- 1. If the failure is new, and looks like a potential flaky failure, you can retry the failing job.
- Make sure to mention the retry in the `Timeline` and leave a link to the retried job.
- 1. Search for similar master-broken issues in https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues
- 1. If one exists, ask the DRI of the master-broken issue to cherry-pick any resulting merge requests into the stable branch
-
- @gitlab-org/release/managers if the merge request author or maintainer is not available, this can be escalated using the dev-on-call process in the [#dev-escalation slack channel](https://gitlab.slack.com/archives/CLKLMSUR4).
-
- ### Pre-resolution
-
- If you believe that there's an easy resolution by either:
-
- - Reverting a particular merge request.
- - Making a quick fix (for example, one line or a few similar simple changes in a few lines).
- You can create a merge request, assign to any available maintainer, and ping people that were involved/related to the introduction of the failure.
- Additionally, a message can be posted in `#backend_maintainers` or `#frontend_maintainers` to get a maintainer take a look at the fix ASAP.
- - Cherry picking a change that was used to fix a similar master-broken issue.
-
- ### Resolution
-
- Add a comment to this issue describing how this incident could have been prevented earlier in the Merge Request pipeline (rather than the merge commit pipeline).
-
- MARKDOWN
- end
-
- def incident_labels
- return ['release-blocker'] if stable_branch_incident?
-
- master_broken_label =
- if ENV['CI_PROJECT_NAME'] == 'gitlab-foss'
- 'master:foss-broken'
- else
- 'master:broken'
- end
-
- DEFAULT_LABELS.dup << master_broken_label
- end
-
- def assignee_ids
- ids = [ENV['GITLAB_USER_ID'].to_i]
- ids << merge_request['author']['id'].to_i if merge_request
- ids
- end
-
- def pipeline_link
- "[##{ENV['CI_PIPELINE_ID']}](#{ENV['CI_PIPELINE_URL']})"
- end
-
- def branch_link
- "[`#{ENV['CI_COMMIT_REF_NAME']}`](#{ENV['CI_PROJECT_URL']}/-/commits/#{ENV['CI_COMMIT_REF_NAME']})"
- end
-
- def pipeline_duration
- ((Time.now - Time.parse(ENV['CI_PIPELINE_CREATED_AT'])) / 60.to_f).round(2)
- end
-
- def commit_link
- "[#{ENV['CI_COMMIT_TITLE']}](#{ENV['CI_PROJECT_URL']}/-/commit/#{ENV['CI_COMMIT_SHA']})"
- end
-
- def merge_request_link
- return 'N/A' unless merge_request
-
- "[#{merge_request['title']}](#{merge_request['web_url']})"
- end
-
- def source
- "`#{ENV['CI_PIPELINE_SOURCE']}`"
- end
-
- def project_link
- "[#{ENV['CI_PROJECT_PATH']}](#{ENV['CI_PROJECT_URL']})"
- end
-
- def triggered_by_link
- "[#{ENV['GITLAB_USER_NAME']}](#{ENV['CI_SERVER_URL']}/#{ENV['GITLAB_USER_LOGIN']})"
- end
-
- def failed_jobs_list_for_title
- failed_jobs.map(&:name).join(', ')
- end
-
- def failed_jobs_list
- failed_jobs.map { |job| "- [#{job.name}](#{job.web_url})" }.join("\n")
- end
-end
-
-if $PROGRAM_NAME == __FILE__
- options = CreatePipelineFailureIncident::DEFAULT_OPTIONS.dup
-
- OptionParser.new do |opts|
- opts.on("-p", "--project PROJECT", String, "Project where to create the incident (defaults to "\
- "`#{CreatePipelineFailureIncident::DEFAULT_OPTIONS[:project]}`)") do |value|
- options[:project] = value
- end
-
- opts.on("-f", "--incident-json-file file_path", String, "Path to a file where to save the incident JSON data "\
- "(defaults to `#{CreatePipelineFailureIncident::DEFAULT_OPTIONS[:incident_json_file]}`)") do |value|
- options[:incident_json_file] = value
- end
-
- opts.on("-t", "--api-token API_TOKEN", String, "A valid Project token with the `Reporter` role and `api` scope "\
- "to create the incident") do |value|
- options[:api_token] = value
- end
-
- opts.on("-h", "--help", "Prints this help") do
- puts opts
- exit
- end
- end.parse!
-
- incident_json_file = options.delete(:incident_json_file)
-
- CreatePipelineFailureIncident.new(options).execute.tap do |incident|
- File.write(incident_json_file, JSON.pretty_generate(incident.to_h)) if incident_json_file
- end
-end
diff --git a/scripts/decomposition/generate-loose-foreign-key b/scripts/decomposition/generate-loose-foreign-key
index 1ea1728732b..fbffebb6086 100755
--- a/scripts/decomposition/generate-loose-foreign-key
+++ b/scripts/decomposition/generate-loose-foreign-key
@@ -168,9 +168,8 @@ def generate_migration(definition)
return unless foreign_key_exists?(:#{definition.from_table}, :#{definition.to_table}, name: "#{definition.name}")
with_lock_retries do
- execute('LOCK #{definition.to_table}, #{definition.from_table} IN ACCESS EXCLUSIVE MODE') if transaction_open?
-
- remove_foreign_key_if_exists(:#{definition.from_table}, :#{definition.to_table}, name: "#{definition.name}")
+ remove_foreign_key_if_exists(:#{definition.from_table}, :#{definition.to_table},
+ name: "#{definition.name}", reverse_lock_order: true)
end
end
diff --git a/scripts/failed_tests.rb b/scripts/failed_tests.rb
index 786d3c24c74..0ba454894b7 100755
--- a/scripts/failed_tests.rb
+++ b/scripts/failed_tests.rb
@@ -12,8 +12,8 @@ class FailedTests
previous_tests_report_path: 'test_results/previous/test_reports.json',
output_directory: 'tmp/previous_failed_tests/',
format: :oneline,
- rspec_pg_regex: /rspec .+ pg12( .+)?/,
- rspec_ee_pg_regex: /rspec-ee .+ pg12( .+)?/
+ rspec_pg_regex: /rspec .+ pg13( .+)?/,
+ rspec_ee_pg_regex: /rspec-ee .+ pg13( .+)?/
}.freeze
def initialize(options)
diff --git a/scripts/frontend/extract_gettext_all.js b/scripts/frontend/extract_gettext_all.js
index 922aa85241f..0cd6ab99a3a 100644
--- a/scripts/frontend/extract_gettext_all.js
+++ b/scripts/frontend/extract_gettext_all.js
@@ -19,7 +19,7 @@ extractor.addMessageTransformFunction(ensureSingleLine);
const jsParser = extractor.createJsParser([
// Place all the possible expressions to extract here:
- JsExtractors.callExpression('__', {
+ JsExtractors.callExpression(['__', 's__'], {
arguments: {
text: 0,
},
@@ -30,15 +30,13 @@ const jsParser = extractor.createJsParser([
textPlural: 1,
},
}),
- JsExtractors.callExpression('s__', {
- arguments: {
- text: 0,
- },
- }),
]);
const vueParser = decorateJSParserWithVueSupport(jsParser, {
vue2TemplateCompiler,
+ // All of our expressions contain `__`.
+ // So we can safely ignore parsing files _not_ containing it.
+ guard: '__',
});
function printJson() {
diff --git a/scripts/frontend/po_to_json.js b/scripts/frontend/po_to_json.js
new file mode 100755
index 00000000000..fba68a61814
--- /dev/null
+++ b/scripts/frontend/po_to_json.js
@@ -0,0 +1,208 @@
+#!/usr/bin/env node
+
+const fs = require('fs/promises');
+const path = require('path');
+
+async function isDir(dirPath) {
+ if (!dirPath) {
+ return false;
+ }
+ try {
+ const stat = await fs.stat(dirPath);
+ return stat.isDirectory();
+ } catch (e) {
+ return false;
+ }
+}
+
+/**
+ * This is the main function which starts multiple workers
+ * in order to speed up the po file => app.js
+ * locale conversions
+ */
+async function main({ localeRoot, outputDir } = {}) {
+ if (!(await isDir(localeRoot))) {
+ throw new Error(`Provided localeRoot: '${localeRoot}' doesn't seem to be a folder`);
+ }
+
+ if (!(await isDir(outputDir))) {
+ throw new Error(`Provided outputDir '${outputDir}' doesn't seem to be a folder`);
+ }
+
+ // eslint-disable-next-line global-require
+ const glob = require('glob');
+ // eslint-disable-next-line global-require
+ const { Worker } = require('jest-worker');
+
+ const locales = glob.sync('*/*.po', { cwd: localeRoot });
+
+ const worker = new Worker(__filename, {
+ exposedMethods: ['convertPoFileForLocale'],
+ silent: false,
+ enableWorkerThreads: true,
+ });
+ worker.getStdout().pipe(process.stdout);
+ worker.getStderr().pipe(process.stderr);
+
+ await Promise.all(
+ locales.map((localeFile) => {
+ const locale = path.dirname(localeFile);
+ return worker.convertPoFileForLocale({
+ locale,
+ localeFile: path.join(localeRoot, localeFile),
+ resultDir: path.join(outputDir, locale),
+ });
+ }),
+ );
+
+ await worker.end();
+
+ console.log('Done converting all the po files');
+}
+
+/**
+ * This is the conversion logic for: po => JS object for jed
+ */
+function convertPoToJed(data, locale) {
+ // eslint-disable-next-line global-require
+ const { parse } = require('gettext-parser/lib/poparser');
+ const DEFAULT_CONTEXT = '';
+
+ /**
+ * TODO: This replacer might be unnecessary _or_ even cause bugs.
+ * due to potential unnecessary double escaping.
+ * But for now it is here to ensure that the old and new output
+ * are equivalent.
+ * @param str
+ * @returns {string}
+ */
+ function escapeMsgid(str) {
+ return `${str}`.replace(/([\\"])/g, '\\$1');
+ }
+
+ /**
+ * TODO: This replacer might be unnecessary _or_ even cause bugs.
+ * due to potential unnecessary double escaping.
+ * But for now it is here to ensure that the old and new output
+ * are equivalent.
+ *
+ * NOTE: The replacements of `\n` and `\t` need to be iterated on,
+ * because: In the cases where we see those chars, they:
+ * - likely need or could be trimmed because they do nothing
+ * - they seem to escaped in a way that is broken anyhow
+ * @param str
+ * @returns {string}
+ */
+ function escapeMsgstr(str) {
+ return `${str}`.replace(/[\t\n"\\]/g, (match) => {
+ if (match === '\n') {
+ return '\\n';
+ }
+ if (match === '\t') {
+ return '\\t';
+ }
+ return `\\${match}`;
+ });
+ }
+
+ const { headers = {}, translations: parsed } = parse(data);
+
+ const translations = Object.values(parsed[DEFAULT_CONTEXT] ?? {}).reduce((acc, entry) => {
+ const { msgid, msgstr } = entry;
+
+ /* TODO: If a msgid has no translation, we can just drop the whole key,
+ as jed will just fallback to the keys
+ We are not doing that yet, because we do want to ensure that
+ the results of the new and old way of generating the files matches.
+ if (msgstr.every((x) => x === '')) {
+ return acc;
+ }
+ */
+
+ acc[escapeMsgid(msgid)] = msgstr.map(escapeMsgstr);
+
+ return acc;
+ }, {});
+
+ // Do not bother if the file has no actual translations
+ if (!Object.keys(translations).length) {
+ return { jed: null };
+ }
+
+ if (headers['Plural-Forms']) {
+ headers.plural_forms = headers['Plural-Forms'];
+ }
+
+ // Format required for jed: http://messageformat.github.io/Jed/
+ const jed = {
+ domain: 'app',
+ locale_data: {
+ app: {
+ ...translations,
+ // Ensure that the header data which is attached to a message with id ""
+ // is not accidentally overwritten by an empty externalized string
+ '': {
+ ...headers,
+ domain: 'app',
+ lang: locale,
+ },
+ },
+ },
+ };
+
+ return { jed };
+}
+
+/**
+ * This is the function which the workers actually execute
+ * 1. It reads the po
+ * 2. converts it with convertPoToJed
+ * 3. writes the file to
+ */
+async function convertPoFileForLocale({ locale, localeFile, resultDir }) {
+ const poContent = await fs.readFile(localeFile);
+
+ const { jed } = await convertPoToJed(poContent, locale);
+
+ if (jed === null) {
+ console.log(`${locale}: No translations. Skipping creation of app.js`);
+ return;
+ }
+
+ await fs.mkdir(resultDir, { recursive: true });
+
+ await fs.writeFile(
+ path.join(resultDir, 'app.js'),
+ `window.translations = ${JSON.stringify(jed)}`,
+ 'utf8',
+ );
+ console.log(`Created app.js in ${resultDir}`);
+}
+
+/*
+ Start the main thread only if we are not part of a worker
+ */
+if (!process.env.JEST_WORKER_ID) {
+ // eslint-disable-next-line global-require
+ const argumentsParser = require('commander');
+
+ const args = argumentsParser
+ .option('-l, --locale-root <locale_root>', 'Extract messages from subfolders in this directory')
+ .option('-o, --output-dir <output_dir>', 'Write app.js files into subfolders in this directory')
+ .parse(process.argv);
+
+ main(args).catch((e) => {
+ console.warn(`Something went wrong: ${e.message}`);
+ console.warn(args.printHelp());
+ process.exitCode = 1;
+ });
+}
+
+/*
+ Expose the function for workers
+ */
+module.exports = {
+ main,
+ convertPoToJed,
+ convertPoFileForLocale,
+};
diff --git a/scripts/frontend/startup_css/constants.js b/scripts/frontend/startup_css/constants.js
index e6ca4472fe3..bf9774daea5 100644
--- a/scripts/frontend/startup_css/constants.js
+++ b/scripts/frontend/startup_css/constants.js
@@ -51,7 +51,6 @@ const createMainOutput = ({ outFile, cssKeys, type }) => ({
htmlPaths: [
path.join(FIXTURES_ROOT, `startup_css/project-${type}.html`),
path.join(FIXTURES_ROOT, `startup_css/project-${type}-signed-out.html`),
- path.join(FIXTURES_ROOT, `startup_css/project-${type}-search-ff-off.html`),
path.join(FIXTURES_ROOT, `startup_css/project-${type}-super-sidebar.html`),
],
cssKeys,
diff --git a/scripts/generate-e2e-pipeline b/scripts/generate-e2e-pipeline
index 8ca6771bf1f..c0d17443ba9 100755
--- a/scripts/generate-e2e-pipeline
+++ b/scripts/generate-e2e-pipeline
@@ -22,16 +22,17 @@ fi
qa_cache_key="qa-e2e-ruby-${RUBY_VERSION}-$(md5sum qa/Gemfile.lock | awk '{ print $1 }')"
variables=$(cat <<YML
variables:
- COLORIZED_LOGS: "true"
GIT_DEPTH: "20"
GIT_STRATEGY: "clone" # 'GIT_STRATEGY: clone' optimizes the pack-objects cache hit ratio
GIT_SUBMODULE_STRATEGY: "none"
GITLAB_QA_CACHE_KEY: "$qa_cache_key"
GITLAB_SEMVER_VERSION: "$(cat VERSION)"
+ SKIP_OMNIBUS_TRIGGER: "false"
QA_EXPORT_TEST_METRICS: "${QA_EXPORT_TEST_METRICS:-true}"
QA_FEATURE_FLAGS: "${QA_FEATURE_FLAGS}"
QA_FRAMEWORK_CHANGES: "${QA_FRAMEWORK_CHANGES:-false}"
QA_RUN_ALL_TESTS: "${QA_RUN_ALL_TESTS:-false}"
+ QA_RUN_ALL_E2E_LABEL: "${QA_RUN_ALL_E2E_LABEL:-false}"
QA_SAVE_TEST_METRICS: "${QA_SAVE_TEST_METRICS:-false}"
QA_SUITES: "$QA_SUITES"
QA_TESTS: "$QA_TESTS"
diff --git a/scripts/generate-failed-pipeline-slack-message.rb b/scripts/generate-failed-pipeline-slack-message.rb
deleted file mode 100755
index eefdebd5db5..00000000000
--- a/scripts/generate-failed-pipeline-slack-message.rb
+++ /dev/null
@@ -1,181 +0,0 @@
-#!/usr/bin/env ruby
-
-# frozen_string_literal: true
-
-require 'optparse'
-require 'json'
-
-require_relative 'api/pipeline_failed_jobs'
-
-class GenerateFailedPipelineSlackMessage
- DEFAULT_OPTIONS = {
- failed_pipeline_slack_message_file: 'failed_pipeline_slack_message.json',
- incident_json_file: 'incident.json'
- }.freeze
-
- def initialize(options)
- @incident_json_file = options.delete(:incident_json_file)
- end
-
- def execute
- {
- channel: ENV['SLACK_CHANNEL'],
- username: "Failed pipeline reporter",
- icon_emoji: ":boom:",
- text: "*#{title}*",
- blocks: [
- {
- type: "section",
- text: {
- type: "mrkdwn",
- text: "*#{title}*"
- },
- accessory: {
- type: "button",
- text: {
- type: "plain_text",
- text: incident_button_text
- },
- url: incident_button_link
- }
- },
- {
- type: "section",
- text: {
- type: "mrkdwn",
- text: "*Branch*: #{branch_link}"
- }
- },
- {
- type: "section",
- text: {
- type: "mrkdwn",
- text: "*Commit*: #{commit_link}"
- }
- },
- {
- type: "section",
- text: {
- type: "mrkdwn",
- text: "*Triggered by* #{triggered_by_link} • *Source:* #{source} • *Duration:* #{pipeline_duration} minutes"
- }
- },
- {
- type: "section",
- text: {
- type: "mrkdwn",
- text: "*Failed jobs (#{failed_jobs.size}):* #{failed_jobs_list}"
- }
- }
- ]
- }
- end
-
- private
-
- attr_reader :incident_json_file
-
- def failed_jobs
- @failed_jobs ||= PipelineFailedJobs.new(API::DEFAULT_OPTIONS.dup.merge(exclude_allowed_to_fail_jobs: true)).execute
- end
-
- def title
- "#{project_link} pipeline #{pipeline_link} failed"
- end
-
- def incident_exist?
- return @incident_exist if defined?(@incident_exist)
-
- @incident_exist = File.exist?(incident_json_file)
- end
-
- def incident
- return unless incident_exist?
-
- @incident ||= JSON.parse(File.read(incident_json_file))
- end
-
- def incident_button_text
- if incident_exist?
- "View incident ##{incident['iid']}"
- else
- 'Create incident'
- end
- end
-
- def incident_button_link
- if incident_exist?
- incident['web_url']
- else
- "#{ENV['CI_SERVER_URL']}/#{ENV['BROKEN_BRANCH_INCIDENTS_PROJECT']}/-/issues/new?" \
- "issuable_template=incident&issue%5Bissue_type%5D=incident"
- end
- end
-
- def pipeline_link
- "<#{ENV['CI_PIPELINE_URL']}|##{ENV['CI_PIPELINE_ID']}>"
- end
-
- def branch_link
- "<#{ENV['CI_PROJECT_URL']}/-/commits/#{ENV['CI_COMMIT_REF_NAME']}|`#{ENV['CI_COMMIT_REF_NAME']}`>"
- end
-
- def pipeline_duration
- ((Time.now - Time.parse(ENV['CI_PIPELINE_CREATED_AT'])) / 60.to_f).round(2)
- end
-
- def commit_link
- "<#{ENV['CI_PROJECT_URL']}/-/commit/#{ENV['CI_COMMIT_SHA']}|#{ENV['CI_COMMIT_TITLE']}>"
- end
-
- def source
- "`#{ENV['CI_PIPELINE_SOURCE']}#{schedule_type}`"
- end
-
- def schedule_type
- ENV['CI_PIPELINE_SOURCE'] == 'schedule' ? ": #{ENV['SCHEDULE_TYPE']}" : ''
- end
-
- def project_link
- "<#{ENV['CI_PROJECT_URL']}|#{ENV['CI_PROJECT_PATH']}>"
- end
-
- def triggered_by_link
- "<#{ENV['CI_SERVER_URL']}/#{ENV['GITLAB_USER_LOGIN']}|#{ENV['GITLAB_USER_NAME']}>"
- end
-
- def failed_jobs_list
- failed_jobs.map { |job| "<#{job.web_url}|#{job.name}>" }.join(', ')
- end
-end
-
-if $PROGRAM_NAME == __FILE__
- options = GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS.dup
-
- OptionParser.new do |opts|
- opts.on("-i", "--incident-json-file file_path", String, "Path to a file where the incident JSON data "\
- "can be found (defaults to "\
- "`#{GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS[:incident_json_file]}`)") do |value|
- options[:incident_json_file] = value
- end
-
- opts.on("-f", "--failed-pipeline-slack-message-file file_path", String, "Path to a file where to save the Slack "\
- "message (defaults to "\
- "`#{GenerateFailedPipelineSlackMessage::DEFAULT_OPTIONS[:failed_pipeline_slack_message_file]}`)") do |value|
- options[:failed_pipeline_slack_message_file] = value
- end
-
- opts.on("-h", "--help", "Prints this help") do
- puts opts
- exit
- end
- end.parse!
-
- failed_pipeline_slack_message_file = options.delete(:failed_pipeline_slack_message_file)
-
- GenerateFailedPipelineSlackMessage.new(options).execute.tap do |message_payload|
- if failed_pipeline_slack_message_file
- File.write(failed_pipeline_slack_message_file, JSON.pretty_generate(message_payload))
- end
- end
-end
diff --git a/scripts/generate_rspec_pipeline.rb b/scripts/generate_rspec_pipeline.rb
index e226acc0430..292b3d85b20 100755
--- a/scripts/generate_rspec_pipeline.rb
+++ b/scripts/generate_rspec_pipeline.rb
@@ -43,12 +43,20 @@ class GenerateRspecPipeline
DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS =
DURATION_OF_THE_TEST_SUITE_IN_SECONDS / NUMBER_OF_TESTS_IN_TOTAL_IN_THE_TEST_SUITE
- # rspec_files_path: A file containing RSpec files to run, separated by a space
# pipeline_template_path: A YAML pipeline configuration template to generate the final pipeline config from
- def initialize(pipeline_template_path:, rspec_files_path: nil, knapsack_report_path: nil)
+ # rspec_files_path: A file containing RSpec files to run, separated by a space
+ # knapsack_report_path: A file containing a Knapsack report
+ # test_suite_prefix: An optional test suite folder prefix (e.g. `ee/` or `jh/`)
+ # generated_pipeline_path: An optional filename where to write the pipeline config (defaults to
+ # `"#{pipeline_template_path}.yml"`)
+ def initialize(
+ pipeline_template_path:, rspec_files_path: nil, knapsack_report_path: nil, test_suite_prefix: nil,
+ generated_pipeline_path: nil)
@pipeline_template_path = pipeline_template_path.to_s
@rspec_files_path = rspec_files_path.to_s
@knapsack_report_path = knapsack_report_path.to_s
+ @test_suite_prefix = test_suite_prefix
+ @generated_pipeline_path = generated_pipeline_path || "#{pipeline_template_path}.yml"
raise ArgumentError unless File.exist?(@pipeline_template_path)
end
@@ -56,11 +64,14 @@ class GenerateRspecPipeline
def generate!
if all_rspec_files.empty?
info "Using #{SKIP_PIPELINE_YML_FILE} due to no RSpec files to run"
- FileUtils.cp(SKIP_PIPELINE_YML_FILE, pipeline_filename)
+ FileUtils.cp(SKIP_PIPELINE_YML_FILE, generated_pipeline_path)
return
end
- File.open(pipeline_filename, 'w') do |handle|
+ info "pipeline_template_path: #{pipeline_template_path}"
+ info "generated_pipeline_path: #{generated_pipeline_path}"
+
+ File.open(generated_pipeline_path, 'w') do |handle|
pipeline_yaml = ERB.new(File.read(pipeline_template_path)).result_with_hash(**erb_binding)
handle.write(pipeline_yaml.squeeze("\n").strip)
end
@@ -68,7 +79,8 @@ class GenerateRspecPipeline
private
- attr_reader :pipeline_template_path, :rspec_files_path, :knapsack_report_path
+ attr_reader :pipeline_template_path, :rspec_files_path, :knapsack_report_path, :test_suite_prefix,
+ :generated_pipeline_path
def info(text)
$stdout.puts "[#{self.class.name}] #{text}"
@@ -78,12 +90,11 @@ class GenerateRspecPipeline
@all_rspec_files ||= File.exist?(rspec_files_path) ? File.read(rspec_files_path).split(' ') : []
end
- def pipeline_filename
- @pipeline_filename ||= "#{pipeline_template_path}.yml"
- end
-
def erb_binding
- { rspec_files_per_test_level: rspec_files_per_test_level }
+ {
+ rspec_files_per_test_level: rspec_files_per_test_level,
+ test_suite_prefix: test_suite_prefix
+ }
end
def rspec_files_per_test_level
@@ -91,7 +102,7 @@ class GenerateRspecPipeline
all_remaining_rspec_files = all_rspec_files.dup
TEST_LEVELS.each_with_object(Hash.new { |h, k| h[k] = {} }) do |test_level, memo| # rubocop:disable Rails/IndexWith
memo[test_level][:files] = all_remaining_rspec_files
- .grep(Quality::TestLevel.new.regexp(test_level))
+ .grep(test_level_service.regexp(test_level, true))
.tap { |files| files.each { |file| all_remaining_rspec_files.delete(file) } }
memo[test_level][:parallelization] = optimal_nodes_count(test_level, memo[test_level][:files])
end
@@ -125,10 +136,15 @@ class GenerateRspecPipeline
remaining_knapsack_report = knapsack_report.dup
TEST_LEVELS.each_with_object({}) do |test_level, memo|
matching_data_per_test_level = remaining_knapsack_report
- .select { |test_file, _| test_file.match?(Quality::TestLevel.new.regexp(test_level)) }
+ .select { |test_file, _| test_file.match?(test_level_service.regexp(test_level, true)) }
.tap { |test_data| test_data.each { |file, _| remaining_knapsack_report.delete(file) } }
+
memo[test_level] =
- matching_data_per_test_level.values.sum / matching_data_per_test_level.keys.size
+ if matching_data_per_test_level.empty?
+ DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS
+ else
+ matching_data_per_test_level.values.sum / matching_data_per_test_level.keys.size
+ end
end
else
TEST_LEVELS.each_with_object({}) do |test_level, memo| # rubocop:disable Rails/IndexWith
@@ -146,6 +162,10 @@ class GenerateRspecPipeline
{}
end
end
+
+ def test_level_service
+ @test_level_service ||= Quality::TestLevel.new(test_suite_prefix)
+ end
end
if $PROGRAM_NAME == __FILE__
@@ -166,6 +186,15 @@ if $PROGRAM_NAME == __FILE__
options[:knapsack_report_path] = value
end
+ opts.on("-p", "--test-suite-prefix test_suite_prefix", String, "Test suite folder prefix") do |value|
+ options[:test_suite_prefix] = value
+ end
+
+ opts.on("-o", "--generated-pipeline-path generated_pipeline_path", String, "Path where to write the pipeline " \
+ "config") do |value|
+ options[:generated_pipeline_path] = value
+ end
+
opts.on("-h", "--help", "Prints this help") do
puts opts
exit
diff --git a/scripts/gitaly-test-build b/scripts/gitaly-test-build
index adc9b56ca4f..6901593009a 100755
--- a/scripts/gitaly-test-build
+++ b/scripts/gitaly-test-build
@@ -6,8 +6,8 @@ require 'fileutils'
require_relative '../spec/support/helpers/gitaly_setup'
# This script assumes tmp/tests/gitaly already contains the correct
-# Gitaly version. We just have to compile it and run its 'bundle
-# install'. We have this separate script for that to avoid bundle
+# Gitaly version. We just have to compile it.
+# We have this separate script for that to avoid bundle
# poisoning in CI. This script should only be run in CI.
class GitalyTestBuild
include GitalySetup
@@ -16,14 +16,11 @@ class GitalyTestBuild
# If we have the binaries from the cache, we can skip building them again
if File.exist?(tmp_tests_gitaly_bin_dir)
GitalySetup::LOGGER.debug "Gitaly binary already built. Skip building...\n"
- # We still need to install the gems in that case
- install_gitaly_gems
else
abort 'gitaly build failed' unless build_gitaly
end
ensure_gitlab_shell_secret!
- check_gitaly_config!
# Starting gitaly further validates its configuration
gitaly_pid = start_gitaly
diff --git a/scripts/gitaly-test-spawn b/scripts/gitaly-test-spawn
index b9c78b88555..475c7715bdd 100755
--- a/scripts/gitaly-test-spawn
+++ b/scripts/gitaly-test-spawn
@@ -9,8 +9,6 @@ class GitalyTestSpawn
include GitalySetup
def run
- install_gitaly_gems
-
# Run Praefect migrations
setup_praefect
diff --git a/scripts/gitlab_component_helpers.sh b/scripts/gitlab_component_helpers.sh
index 309e339de01..301d4fb5d37 100644
--- a/scripts/gitlab_component_helpers.sh
+++ b/scripts/gitlab_component_helpers.sh
@@ -52,22 +52,7 @@ export GITLAB_ASSETS_PACKAGE="assets-${NODE_ENV}-${GITLAB_EDITION}-${GITLAB_ASSE
export GITLAB_ASSETS_PACKAGE_URL="${API_PACKAGES_BASE_URL}/assets/${NODE_ENV}-${GITLAB_EDITION}-${GITLAB_ASSETS_HASH}/${GITLAB_ASSETS_PACKAGE}"
# Fixtures constants
-
-# Export the SHA variable for updating/downloading fixture packages, using the following order of precedence:
-# 1. If MERGE_BASE_SHA is defined, use its value.
-# 2. If CI_MERGE_REQUEST_SOURCE_BRANCH_SHA is defined, use its value for merge request pipelines.
-# 3. Otherwise, use the value of CI_COMMIT_SHA for default branch pipelines or merge requests with detached pipelines.
-if [ -n "${MERGE_BASE_SHA:-}" ]; then
- export FIXTURES_SHA="${MERGE_BASE_SHA}"
-elif [ -n "${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA:-}" ]; then
- export FIXTURES_SHA="${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}"
-else
- export FIXTURES_SHA="${CI_COMMIT_SHA}"
-fi
-
export FIXTURES_PATH="tmp/tests/frontend/**/*"
-export FIXTURES_PACKAGE="fixtures-${FIXTURES_SHA}.tar.gz"
-export FIXTURES_PACKAGE_URL="${API_PACKAGES_BASE_URL}/fixtures/${FIXTURES_SHA}/${FIXTURES_PACKAGE}"
# Generic helper functions
function archive_doesnt_exist() {
@@ -75,7 +60,13 @@ function archive_doesnt_exist() {
status=$(curl -I --silent --retry 3 --output /dev/null -w "%{http_code}" "${package_url}")
- [[ "${status}" != "200" ]]
+ if [[ "${status}" = "200" ]]; then
+ echoinfo "The archive was found. The server returned status ${status}."
+ return 1
+ else
+ echoinfo "The archive was not found. The server returned status ${status}."
+ return 0
+ fi
}
function create_package() {
@@ -167,14 +158,69 @@ function upload_gitlab_assets_package() {
}
# Fixtures functions
-function fixtures_archive_doesnt_exist() {
- archive_doesnt_exist "${FIXTURES_PACKAGE_URL}"
+function check_fixtures_download() {
+ if [[ "${REUSE_FRONTEND_FIXTURES_ENABLED:-}" != "true" ]]; then
+ return 1
+ fi
+
+ # Note: Currently, reusing frontend fixtures is only supported in EE.
+ # Other projects will be supported through this issue in the future: https://gitlab.com/gitlab-org/gitlab/-/issues/393615.
+ if [[ "${CI_PROJECT_NAME}" != "gitlab" ]] || [[ "${CI_JOB_NAME}" =~ "foss" ]]; then
+ return 1
+ fi
+
+ if [[ -z "${CI_MERGE_REQUEST_IID:-}" ]]; then
+ return 1
+ else
+ if tooling/bin/find_only_js_changes && ! fixtures_archive_doesnt_exist; then
+ return 0
+ else
+ return 1
+ fi
+ fi
}
function create_fixtures_package() {
create_package "${FIXTURES_PACKAGE}" "${FIXTURES_PATH}"
}
+function download_and_extract_fixtures() {
+ read_curl_package "${FIXTURES_PACKAGE_URL}" | extract_package
+}
+
+function export_fixtures_package_variables() {
+ export FIXTURES_PACKAGE="fixtures-${FIXTURES_SHA}.tar.gz"
+ export FIXTURES_PACKAGE_URL="${API_PACKAGES_BASE_URL}/fixtures/${FIXTURES_SHA}/${FIXTURES_PACKAGE}"
+}
+
+function export_fixtures_sha_for_download() {
+ export FIXTURES_SHA="${CI_MERGE_REQUEST_TARGET_BRANCH_SHA:-${CI_MERGE_REQUEST_DIFF_BASE_SHA:-$CI_COMMIT_SHA}}"
+ export_fixtures_package_variables
+}
+
+function export_fixtures_sha_for_upload() {
+ export FIXTURES_SHA="${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA:-$CI_COMMIT_SHA}"
+ export_fixtures_package_variables
+}
+
+function fixtures_archive_doesnt_exist() {
+ echoinfo "Checking if the package is available at ${FIXTURES_PACKAGE_URL} ..."
+
+ archive_doesnt_exist "${FIXTURES_PACKAGE_URL}"
+}
+
+function fixtures_directory_exists() {
+ local fixtures_directory="tmp/tests/frontend/"
+
+ if [[ -d "${fixtures_directory}" ]]; then
+ echo "${fixtures_directory} directory exists"
+ return 0
+ else
+ echo "${fixtures_directory} directory does not exist"
+ return 1
+ fi
+}
+
function upload_fixtures_package() {
upload_package "${FIXTURES_PACKAGE}" "${FIXTURES_PACKAGE_URL}"
}
diff --git a/scripts/packages/automated_cleanup.rb b/scripts/packages/automated_cleanup.rb
index 2b5a0011079..8d9ba1e4a34 100755
--- a/scripts/packages/automated_cleanup.rb
+++ b/scripts/packages/automated_cleanup.rb
@@ -123,4 +123,8 @@ if $PROGRAM_NAME == __FILE__
timed('"assets" packages cleanup') do
automated_cleanup.perform_gitlab_package_cleanup!(package_name: 'assets', days_for_delete: 7)
end
+
+ timed('"fixtures" packages cleanup') do
+ automated_cleanup.perform_gitlab_package_cleanup!(package_name: 'fixtures', days_for_delete: 14)
+ end
end
diff --git a/scripts/pipeline/create_test_failure_issues.rb b/scripts/pipeline/create_test_failure_issues.rb
index 6312d392760..e4bcabb6223 100755
--- a/scripts/pipeline/create_test_failure_issues.rb
+++ b/scripts/pipeline/create_test_failure_issues.rb
@@ -24,7 +24,7 @@ class CreateTestFailureIssues
puts "[CreateTestFailureIssues] No failed tests!" if failed_tests.empty?
failed_tests.each_with_object([]) do |failed_test, existing_issues|
- CreateTestFailureIssue.new(options.dup).comment_or_create(failed_test, existing_issues).tap do |issue|
+ CreateTestFailureIssue.new(options.dup).upsert(failed_test, existing_issues).tap do |issue|
existing_issues << issue
File.write(File.join(options[:issue_json_folder], "issue-#{issue.iid}.json"), JSON.pretty_generate(issue.to_h))
end
@@ -52,14 +52,30 @@ class CreateTestFailureIssue
WWW_GITLAB_COM_GROUPS_JSON = "#{WWW_GITLAB_COM_SITE}/groups.json".freeze
WWW_GITLAB_COM_CATEGORIES_JSON = "#{WWW_GITLAB_COM_SITE}/categories.json".freeze
FEATURE_CATEGORY_METADATA_REGEX = /(?<=feature_category: :)\w+/
- DEFAULT_LABELS = ['type::maintenance', 'failure::flaky-test'].freeze
+ DEFAULT_LABELS = ['type::maintenance', 'test'].freeze
+
+ def self.server_host
+ @server_host ||= ENV.fetch('CI_SERVER_HOST', 'gitlab.com')
+ end
+
+ def self.project_path
+ @project_path ||= ENV.fetch('CI_PROJECT_PATH', 'gitlab-org/gitlab')
+ end
+
+ def self.file_base_url
+ @file_base_url ||= "https://#{server_host}/#{project_path}/-/blob/master/"
+ end
+
+ def self.report_item_regex
+ @report_item_regex ||= %r{^1\. \d{4}-\d{2}-\d{2}: https://#{server_host}/#{project_path}/-/jobs/.+$}
+ end
def initialize(options)
@project = options.delete(:project)
@api_token = options.delete(:api_token)
end
- def comment_or_create(failed_test, existing_issues = [])
+ def upsert(failed_test, existing_issues = [])
existing_issue = find(failed_test, existing_issues)
if existing_issue
@@ -70,12 +86,16 @@ class CreateTestFailureIssue
end
end
+ private
+
+ attr_reader :project, :api_token
+
def find(failed_test, existing_issues = [])
- failed_test_issue_title = failed_test_issue_title(failed_test)
- issue_from_existing_issues = existing_issues.find { |issue| issue.title == failed_test_issue_title }
+ test_hash = failed_test_hash(failed_test)
+ issue_from_existing_issues = existing_issues.find { |issue| issue.title.include?(test_hash) }
issue_from_issue_tracker = FindIssues
.new(project: project, api_token: api_token)
- .execute(state: 'opened', search: failed_test_issue_title)
+ .execute(state: :opened, search: test_hash, in: :title, per_page: 1)
.first
existing_issue = issue_from_existing_issues || issue_from_issue_tracker
@@ -88,10 +108,24 @@ class CreateTestFailureIssue
end
def update_reports(existing_issue, failed_test)
- new_issue_description = "#{existing_issue.description}\n- #{failed_test['job_url']} (#{ENV['CI_PIPELINE_URL']})"
+ # We count the number of existing reports.
+ reports_count = existing_issue.description
+ .scan(self.class.report_item_regex)
+ .size.to_i + 1
+
+ # We include the number of reports in the header, for visibility.
+ issue_description = existing_issue.description.sub(/^### Reports.*$/, "### Reports (#{reports_count})")
+
+ # We add the current failure to the list of reports.
+ issue_description = "#{issue_description}\n#{report_list_item(failed_test)}"
+
UpdateIssue
.new(project: project, api_token: api_token)
- .execute(existing_issue.iid, description: new_issue_description)
+ .execute(
+ existing_issue.iid,
+ description: issue_description,
+ weight: reports_count
+ )
puts "[CreateTestFailureIssue] Added a report in '#{existing_issue.title}': #{existing_issue.web_url}!"
end
@@ -99,7 +133,8 @@ class CreateTestFailureIssue
payload = {
title: failed_test_issue_title(failed_test),
description: failed_test_issue_description(failed_test),
- labels: failed_test_issue_labels(failed_test)
+ labels: failed_test_issue_labels(failed_test),
+ weight: 1
}
CreateIssue.new(project: project, api_token: api_token).execute(payload).tap do |issue|
@@ -107,36 +142,40 @@ class CreateTestFailureIssue
end
end
- private
-
- attr_reader :project, :api_token
-
- def failed_test_id(failed_test)
- Digest::SHA256.hexdigest(search_safe(failed_test['name']))[0...12]
+ def failed_test_hash(failed_test)
+ Digest::SHA256.hexdigest(failed_test['file'] + failed_test['name'])[0...12]
end
def failed_test_issue_title(failed_test)
- title = "#{failed_test['file']} - ID: #{failed_test_id(failed_test)}"
+ title = "#{failed_test['file']} [test-hash:#{failed_test_hash(failed_test)}]"
raise "Title is too long!" if title.size > MAX_TITLE_LENGTH
title
end
+ def test_file_link(failed_test)
+ "[`#{failed_test['file']}`](#{self.class.file_base_url}#{failed_test['file']})"
+ end
+
+ def report_list_item(failed_test)
+ "1. #{Time.new.utc.strftime('%F')}: #{failed_test['job_url']} (#{ENV['CI_PIPELINE_URL']})"
+ end
+
def failed_test_issue_description(failed_test)
<<~DESCRIPTION
- ### Full description
+ ### Test description
`#{search_safe(failed_test['name'])}`
- ### File path
+ ### Test file path
- `#{failed_test['file']}`
+ #{test_file_link(failed_test)}
<!-- Don't add anything after the report list since it's updated automatically -->
- ### Reports
+ ### Reports (1)
- - #{failed_test['job_url']} (#{ENV['CI_PIPELINE_URL']})
+ #{report_list_item(failed_test)}
DESCRIPTION
end
diff --git a/scripts/review_apps/automated_cleanup.rb b/scripts/review_apps/automated_cleanup.rb
index 5fff7f4ff88..154a73462bb 100755
--- a/scripts/review_apps/automated_cleanup.rb
+++ b/scripts/review_apps/automated_cleanup.rb
@@ -24,6 +24,25 @@ module ReviewApps
].freeze
ENVIRONMENTS_NOT_FOUND_THRESHOLD = 3
+ def self.parse_args(argv)
+ options = {
+ dry_run: false
+ }
+
+ OptionParser.new do |opts|
+ opts.on("-d BOOLEAN", "--dry-run BOOLEAN", String, "Whether to perform a dry-run or not.") do |value|
+ options[:dry_run] = true if value == 'true'
+ end
+
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+ end.parse!(argv)
+
+ options
+ end
+
# $GITLAB_PROJECT_REVIEW_APP_CLEANUP_API_TOKEN => `Automated Review App Cleanup` project token
def initialize(
project_path: ENV['CI_PROJECT_PATH'],
@@ -50,16 +69,12 @@ module ReviewApps
end
end
- def review_apps_namespace
- 'review-apps'
- end
-
def helm
@helm ||= Tooling::Helm3Client.new
end
def kubernetes
- @kubernetes ||= Tooling::KubernetesClient.new(namespace: review_apps_namespace)
+ @kubernetes ||= Tooling::KubernetesClient.new
end
def perform_gitlab_environment_cleanup!(days_for_delete:)
@@ -164,14 +179,8 @@ module ReviewApps
def perform_stale_namespace_cleanup!(days:)
puts "Dry-run mode." if dry_run
- kubernetes_client = Tooling::KubernetesClient.new(namespace: nil)
-
- kubernetes_client.cleanup_review_app_namespaces(created_before: threshold_time(days: days), wait: false) unless dry_run
- end
- def perform_stale_pvc_cleanup!(days:)
- puts "Dry-run mode." if dry_run
- kubernetes.cleanup_by_created_at(resource_type: 'pvc', created_before: threshold_time(days: days), wait: false) unless dry_run
+ kubernetes.cleanup_namespaces_by_created_at(created_before: threshold_time(days: days)) unless dry_run
end
private
@@ -245,8 +254,7 @@ module ReviewApps
releases_names = releases.map(&:name)
unless dry_run
helm.delete(release_name: releases_names)
- kubernetes.cleanup_by_release(release_name: releases_names, wait: false)
- kubernetes.delete_namespaces_by_exact_names(resource_names: releases_names, wait: false)
+ kubernetes.delete_namespaces(releases_names)
end
rescue Tooling::Helm3Client::CommandFailedError => ex
@@ -284,21 +292,7 @@ def timed(task)
end
if $PROGRAM_NAME == __FILE__
- options = {
- dry_run: false
- }
-
- OptionParser.new do |opts|
- opts.on("-d", "--dry-run", "Whether to perform a dry-run or not.") do |value|
- options[:dry_run] = true
- end
-
- opts.on("-h", "--help", "Prints this help") do
- puts opts
- exit
- end
- end.parse!
-
+ options = ReviewApps::AutomatedCleanup.parse_args(ARGV)
automated_cleanup = ReviewApps::AutomatedCleanup.new(options: options)
timed('Docs Review Apps cleanup') do
@@ -322,10 +316,4 @@ if $PROGRAM_NAME == __FILE__
timed('Stale Namespace cleanup') do
automated_cleanup.perform_stale_namespace_cleanup!(days: 3)
end
-
- puts
-
- timed('Stale PVC cleanup') do
- automated_cleanup.perform_stale_pvc_cleanup!(days: 30)
- end
end
diff --git a/scripts/review_apps/base-config.yaml b/scripts/review_apps/base-config.yaml
index 1d062a76191..d923a659da0 100644
--- a/scripts/review_apps/base-config.yaml
+++ b/scripts/review_apps/base-config.yaml
@@ -23,10 +23,17 @@ gitlab:
gitaly:
resources:
requests:
- cpu: 1200m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22gitaly%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average is around 0.100vCPU (setting request accordingly)
+ #
+ # The maximum CPU usage was 0.196vCPU (setting limit accordingly)
+ cpu: 150m
memory: 600Mi
limits:
- cpu: 1800m
+ cpu: 300m
memory: 1000Mi
persistence:
size: 10Gi
@@ -42,10 +49,17 @@ gitlab:
gitlab-shell:
resources:
requests:
- cpu: 500m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22gitlab-shell%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average is around 0.01vCPU (setting request accordingly)
+ #
+ # The maximum CPU usage was 0.127vCPU (setting limit accordingly)
+ cpu: 10m
memory: 100Mi
limits:
- cpu: 750m
+ cpu: 150m
memory: 150Mi
minReplicas: 1
maxReplicas: 1
@@ -87,7 +101,14 @@ gitlab:
toolbox:
resources:
requests:
- cpu: 300m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22toolbox%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.100vCPU
+ #
+ # The maximum CPU usage was 0.250vCPU (setting limit accordingly)
+ cpu: 150m
memory: 1927Mi
limits:
cpu: 450m
@@ -124,10 +145,18 @@ gitlab:
gitlab-runner:
resources:
requests:
- cpu: 675m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3Dmonitoring.regex.full_match(%5C%22.*gitlab-runner$%5C%22)%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.01vCPU
+ #
+ # The maximum CPU usage was 0.015vCPU (setting limit accordingly)
+ cpu: 10m
memory: 100Mi
limits:
- cpu: 1015m
+ # In case somebody would like to use runners in review-apps, we set the limit higher than the requests
+ cpu: 400m
memory: 150Mi
nodeSelector:
preemptible: "true"
@@ -153,10 +182,17 @@ nginx-ingress:
ssl-ciphers: ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4
resources:
requests:
- cpu: 300m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22controller%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.02vCPU
+ #
+ # The maximum CPU usage was 0.07vCPU (setting limit accordingly)
+ cpu: 10m
memory: 450Mi
limits:
- cpu: 600m
+ cpu: 20m
memory: 675Mi
service:
enableHttp: false
@@ -182,10 +218,17 @@ postgresql:
enabled: false
resources:
requests:
- cpu: 600m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3Dmonitoring.regex.full_match(%5C%22.*-postgresql$%5C%22)%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.150vCPU
+ #
+ # The maximum CPU usage was 0.420vCPU (setting limit accordingly)
+ cpu: 150m
memory: 1000Mi
limits:
- cpu: 1300m
+ cpu: 1000m
memory: 1600Mi
master:
nodeSelector:
@@ -201,10 +244,17 @@ redis:
enabled: false
resources:
requests:
- cpu: 100m
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22redis%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.03vCPU
+ #
+ # The maximum CPU usage was 0.500vCPU (setting limit accordingly)
+ cpu: 10m
memory: 60Mi
limits:
- cpu: 200m
+ cpu: 500m
memory: 130Mi
master:
nodeSelector:
@@ -217,11 +267,18 @@ registry:
minReplicas: 1
maxReplicas: 1
resources:
+ # Based on https://console.cloud.google.com/monitoring/metrics-explorer;endTime=2023-04-19T08:37:33.183Z;startTime=2023-02-05T09:37:33.182Z?pageState=%7B%22xyChart%22:%7B%22constantLines%22:%5B%5D,%22dataSets%22:%5B%7B%22plotType%22:%22LINE%22,%22targetAxis%22:%22Y1%22,%22timeSeriesFilter%22:%7B%22aggregations%22:%5B%7B%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22groupByFields%22:%5B%5D,%22perSeriesAligner%22:%22ALIGN_RATE%22%7D,%7B%22crossSeriesReducer%22:%22REDUCE_MEAN%22,%22groupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D,%22perSeriesAligner%22:%22ALIGN_MEAN%22%7D%5D,%22apiSource%22:%22DEFAULT_CLOUD%22,%22crossSeriesReducer%22:%22REDUCE_NONE%22,%22filter%22:%22metric.type%3D%5C%22kubernetes.io%2Fcontainer%2Fcpu%2Fcore_usage_time%5C%22%20resource.type%3D%5C%22k8s_container%5C%22%20resource.label.%5C%22container_name%5C%22%3D%5C%22registry%5C%22%22,%22groupByFields%22:%5B%5D,%22minAlignmentPeriod%22:%2260s%22,%22perSeriesAligner%22:%22ALIGN_RATE%22,%22secondaryCrossSeriesReducer%22:%22REDUCE_MEAN%22,%22secondaryGroupByFields%22:%5B%22resource.label.%5C%22namespace_name%5C%22%22%5D%7D%7D%5D,%22options%22:%7B%22mode%22:%22COLOR%22%7D,%22y1Axis%22:%7B%22label%22:%22%22,%22scale%22:%22LINEAR%22%7D%7D%7D&project=gitlab-review-apps
+ #
+ # Data over the 3 months (2023-02-24 - 2023-04-19)
+ #
+ # The average seems to be around 0.0005vCPU
+ #
+ # The maximum CPU usage was 0.0.003vCPU (setting limit accordingly)
requests:
- cpu: 100m
+ cpu: 10m
memory: 30Mi
limits:
- cpu: 200m
+ cpu: 50m
memory: 45Mi
nodeSelector:
preemptible: "true"
diff --git a/scripts/review_apps/k8s-resources-count-checks.sh b/scripts/review_apps/k8s-resources-count-checks.sh
index b63fa043065..7ce98f8d164 100755
--- a/scripts/review_apps/k8s-resources-count-checks.sh
+++ b/scripts/review_apps/k8s-resources-count-checks.sh
@@ -15,6 +15,9 @@ function k8s_resource_count() {
SERVICES_COUNT_THRESHOLD=3000
REVIEW_APPS_COUNT_THRESHOLD=200
+# One review app currently deploys 4 PVCs
+PVCS_COUNT_THRESHOLD=$((REVIEW_APPS_COUNT_THRESHOLD * 4))
+
exit_with_error=false
# In the current GKE cluster configuration, we should never go higher than 4096 services per cluster.
@@ -36,6 +39,12 @@ if [ "$(echo $(($namespaces_count - $review_apps_count)) | sed 's/-//')" -gt 30
exit_with_error=true
fi
+pvcs_count=$(kubectl get pvc -A | wc -l | xargs)
+if [ "${pvcs_count}" -gt "${PVCS_COUNT_THRESHOLD}" ]; then
+ >&2 echo "❌ [ERROR] PVCs are above ${PVCS_COUNT_THRESHOLD} (currently at ${pvcs_count})"
+ exit_with_error=true
+fi
+
if [ "${exit_with_error}" = true ] ; then
exit 1
fi
diff --git a/scripts/review_apps/review-apps.sh b/scripts/review_apps/review-apps.sh
index 98ad3112202..79571cb1e53 100755
--- a/scripts/review_apps/review-apps.sh
+++ b/scripts/review_apps/review-apps.sh
@@ -273,11 +273,63 @@ function deploy() {
retry "create_application_secret"
cat > review_apps.values.yml <<EOF
+ ci:
+ branch: "${CI_COMMIT_REF_NAME}"
+ commit:
+ sha: "${CI_COMMIT_SHORT_SHA}"
+ job:
+ url: "${CI_JOB_URL}"
+ pipeline:
+ url: "${CI_PIPELINE_URL}"
+
gitlab:
+ gitaly:
+ image:
+ repository: "${gitlab_gitaly_image_repository}"
+ tag: "${gitaly_image_tag}"
+ gitlab-shell:
+ image:
+ repository: "${gitlab_shell_image_repository}"
+ tag: "v${GITLAB_SHELL_VERSION}"
+ migrations:
+ image:
+ repository: "${gitlab_toolbox_image_repository}"
+ tag: "${CI_COMMIT_SHA}"
+ sidekiq:
+ annotations:
+ commit: "${CI_COMMIT_SHORT_SHA}"
+ image:
+ repository: "${gitlab_sidekiq_image_repository}"
+ tag: "${CI_COMMIT_SHA}"
+ toolbox:
+ image:
+ repository: "${gitlab_toolbox_image_repository}"
+ tag: "${CI_COMMIT_SHA}"
webservice:
+ annotations:
+ commit: "${CI_COMMIT_SHORT_SHA}"
extraEnv:
REVIEW_APPS_ENABLED: "true"
REVIEW_APPS_MERGE_REQUEST_IID: "${CI_MERGE_REQUEST_IID}"
+ image:
+ repository: "${gitlab_webservice_image_repository}"
+ tag: "${CI_COMMIT_SHA}"
+ workhorse:
+ image: "${gitlab_workhorse_image_repository}"
+ tag: "${CI_COMMIT_SHA}"
+
+ global:
+ hosts:
+ domain: "${REVIEW_APPS_DOMAIN}"
+ hostSuffix: "${HOST_SUFFIX}"
+ appConfig:
+ sentry:
+ dsn: "${REVIEW_APPS_SENTRY_DSN}"
+ # Boolean fields should be left without quotes
+ enabled: ${sentry_enabled}
+ environment: "review"
+
+ releaseOverride: "${release}"
EOF
HELM_CMD=$(cat << EOF
@@ -286,34 +338,7 @@ HELM_CMD=$(cat << EOF
--create-namespace \
--install \
--wait \
- -f review_apps.values.yml \
- --timeout "${HELM_INSTALL_TIMEOUT:-20m}" \
- --set ci.branch="${CI_COMMIT_REF_NAME}" \
- --set ci.commit.sha="${CI_COMMIT_SHORT_SHA}" \
- --set ci.job.url="${CI_JOB_URL}" \
- --set ci.pipeline.url="${CI_PIPELINE_URL}" \
- --set releaseOverride="${release}" \
- --set global.hosts.hostSuffix="${HOST_SUFFIX}" \
- --set global.hosts.domain="${REVIEW_APPS_DOMAIN}" \
- --set global.appConfig.sentry.enabled="${sentry_enabled}" \
- --set global.appConfig.sentry.dsn="${REVIEW_APPS_SENTRY_DSN}" \
- --set global.appConfig.sentry.environment="review" \
- --set gitlab.migrations.image.repository="${gitlab_toolbox_image_repository}" \
- --set gitlab.migrations.image.tag="${CI_COMMIT_SHA}" \
- --set gitlab.gitaly.image.repository="${gitlab_gitaly_image_repository}" \
- --set gitlab.gitaly.image.tag="${gitaly_image_tag}" \
- --set gitlab.gitlab-shell.image.repository="${gitlab_shell_image_repository}" \
- --set gitlab.gitlab-shell.image.tag="v${GITLAB_SHELL_VERSION}" \
- --set gitlab.sidekiq.annotations.commit="${CI_COMMIT_SHORT_SHA}" \
- --set gitlab.sidekiq.image.repository="${gitlab_sidekiq_image_repository}" \
- --set gitlab.sidekiq.image.tag="${CI_COMMIT_SHA}" \
- --set gitlab.webservice.annotations.commit="${CI_COMMIT_SHORT_SHA}" \
- --set gitlab.webservice.image.repository="${gitlab_webservice_image_repository}" \
- --set gitlab.webservice.image.tag="${CI_COMMIT_SHA}" \
- --set gitlab.webservice.workhorse.image="${gitlab_workhorse_image_repository}" \
- --set gitlab.webservice.workhorse.tag="${CI_COMMIT_SHA}" \
- --set gitlab.toolbox.image.repository="${gitlab_toolbox_image_repository}" \
- --set gitlab.toolbox.image.tag="${CI_COMMIT_SHA}"
+ --timeout "${HELM_INSTALL_TIMEOUT:-20m}"
EOF
)
@@ -325,19 +350,28 @@ EOF
)
fi
+# Important: the `-f` calls are ordered. They should not be changed.
+#
+# The `base_config_file` contains the default values for the chart, and the
+# `review_apps.values.yml` contains the overrides we want to apply specifically
+# for this review app deployment.
HELM_CMD=$(cat << EOF
${HELM_CMD} \
--version="${CI_PIPELINE_ID}-${CI_JOB_ID}" \
-f "${base_config_file}" \
+ -f review_apps.values.yml \
-v "${HELM_LOG_VERBOSITY:-1}" \
"${release}" "gitlab-${GITLAB_HELM_CHART_REF}"
EOF
)
# Pretty-print the command for display
- echoinfo "Deploying with:"
+ echoinfo "Deploying with helm command:"
echo "${HELM_CMD}" | sed 's/ /\n\t/g'
+ echoinfo "Content of review_apps.values.yml:"
+ cat review_apps.values.yml
+
retry "eval \"${HELM_CMD}\""
}
diff --git a/scripts/rspec_helpers.sh b/scripts/rspec_helpers.sh
index de735e03db0..c6ec773cf78 100644
--- a/scripts/rspec_helpers.sh
+++ b/scripts/rspec_helpers.sh
@@ -124,8 +124,8 @@ function debug_rspec_variables() {
echoinfo "SKIP_FLAKY_TESTS_AUTOMATICALLY: ${SKIP_FLAKY_TESTS_AUTOMATICALLY}"
echoinfo "RETRY_FAILED_TESTS_IN_NEW_PROCESS: ${RETRY_FAILED_TESTS_IN_NEW_PROCESS}"
- echoinfo "KNAPSACK_GENERATE_REPORT: ${KNAPSACK_GENERATE_REPORT}"
- echoinfo "FLAKY_RSPEC_GENERATE_REPORT: ${FLAKY_RSPEC_GENERATE_REPORT}"
+ echoinfo "KNAPSACK_GENERATE_REPORT: ${KNAPSACK_GENERATE_REPORT:-}"
+ echoinfo "FLAKY_RSPEC_GENERATE_REPORT: ${FLAKY_RSPEC_GENERATE_REPORT:-}"
echoinfo "KNAPSACK_TEST_FILE_PATTERN: ${KNAPSACK_TEST_FILE_PATTERN}"
echoinfo "KNAPSACK_LOG_LEVEL: ${KNAPSACK_LOG_LEVEL}"
@@ -136,7 +136,7 @@ function debug_rspec_variables() {
echoinfo "NEW_FLAKY_RSPEC_REPORT_PATH: ${NEW_FLAKY_RSPEC_REPORT_PATH}"
echoinfo "SKIPPED_FLAKY_TESTS_REPORT_PATH: ${SKIPPED_FLAKY_TESTS_REPORT_PATH}"
- echoinfo "CRYSTALBALL: ${CRYSTALBALL}"
+ echoinfo "CRYSTALBALL: ${CRYSTALBALL:-}"
}
function handle_retry_rspec_in_new_process() {
@@ -163,8 +163,9 @@ function rspec_paralellized_job() {
read -ra job_name <<< "${CI_JOB_NAME}"
local test_tool="${job_name[0]}"
local test_level="${job_name[1]}"
- local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg12 1/24' would become 'rspec_unit_pg12_1_24'
- local rspec_opts="${1}"
+ local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg13 1/24' would become 'rspec_unit_pg13_1_24'
+ local rspec_opts="${1:-}"
+ local rspec_tests_mapping_enabled="${RSPEC_TESTS_MAPPING_ENABLED:-}"
local spec_folder_prefixes=""
local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/"
local knapsack_folder_path="$(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}")/"
@@ -218,7 +219,7 @@ function rspec_paralellized_job() {
debug_rspec_variables
- if [[ -n "${RSPEC_TESTS_MAPPING_ENABLED}" ]]; then
+ if [[ -n "${rspec_tests_mapping_enabled}" ]]; then
tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" --filter "${RSPEC_TESTS_FILTER_FILE}" || rspec_run_status=$?
else
tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" || rspec_run_status=$?
@@ -240,7 +241,7 @@ function retry_failed_rspec_examples() {
# Keep track of the tests that are retried, later consolidated in a single file by the `rspec:flaky-tests-report` job
local failed_examples=$(grep " failed" ${RSPEC_LAST_RUN_RESULTS_FILE})
- local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg12 1/24' would become 'rspec_unit_pg12_1_24'
+ local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg13 1/24' would become 'rspec_unit_pg13_1_24'
local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/"
export RETRIED_TESTS_REPORT_PATH="${rspec_flaky_folder_path}retried_tests_${report_name}_report.txt"
diff --git a/scripts/utils.sh b/scripts/utils.sh
index df8a5825dab..80057842c28 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -76,7 +76,7 @@ function bundle_install_script() {
gem --version
bundle --version
- gem install bundler --no-document --conservative --version 2.3.15
+ gem install bundler --no-document --conservative --version 2.4.11
test -d jh && bundle config set --local gemfile 'jh/Gemfile'
bundle config set path "$(pwd)/vendor"
bundle config set clean 'true'
diff --git a/scripts/verify-tff-mapping b/scripts/verify-tff-mapping
index 872f8dae86e..39834ec38bc 100755
--- a/scripts/verify-tff-mapping
+++ b/scripts/verify-tff-mapping
@@ -81,13 +81,13 @@ tests = [
source: 'db/migrate/20221014034338_populate_releases_access_level_from_repository.rb',
expected: ['spec/migrations/populate_releases_access_level_from_repository_spec.rb']
},
-
+ # rubocop:disable Layout/LineLength
{
explanation: 'Migration should map to its timestamped spec',
- source: 'db/post_migrate/20210915022415_cleanup_bigint_conversion_for_ci_builds.rb',
- expected: ['spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb']
+ source: 'db/post_migrate/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table.rb',
+ expected: ['spec/migrations/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table_spec.rb']
},
-
+ # rubocop:enable Layout/LineLength
{
explanation: 'FOSS views should map to respective spec',
source: 'app/views/admin/dashboard/index.html.haml',
@@ -196,6 +196,17 @@ tests = [
explanation: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues/287#note_1192008962',
source: 'ee/lib/ee/gitlab/usage_data_counters/known_events/common.yml',
expected: ['ee/spec/config/metrics/every_metric_definition_spec.rb']
+ },
+ {
+ explanation: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues/146',
+ source: 'config/feature_categories.yml',
+ expected: ['spec/db/docs_spec.rb', 'ee/spec/lib/ee/gitlab/database/docs/docs_spec.rb']
+ },
+
+ {
+ explanation: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues/1360',
+ source: 'vendor/project_templates/gitbook.tar.gz',
+ expected: ['spec/lib/gitlab/project_template_spec.rb']
}
]