summaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-12-20 14:22:11 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-12-20 14:22:11 +0000
commit0c872e02b2c822e3397515ec324051ff540f0cd5 (patch)
treece2fb6ce7030e4dad0f4118d21ab6453e5938cdd /scripts
parentf7e05a6853b12f02911494c4b3fe53d9540d74fc (diff)
downloadgitlab-ce-0c872e02b2c822e3397515ec324051ff540f0cd5.tar.gz
Add latest changes from gitlab-org/gitlab@15-7-stable-eev15.7.0-rc42
Diffstat (limited to 'scripts')
-rw-r--r--scripts/api/create_issue_discussion.rb32
-rwxr-xr-xscripts/api/download_job_artifact.rb94
-rwxr-xr-xscripts/build_assets_image75
-rwxr-xr-xscripts/build_qa_image4
-rwxr-xr-xscripts/check-template-changes105
-rwxr-xr-xscripts/create-pipeline-failure-incident.rb24
-rw-r--r--scripts/lib/gitlab.rb4
-rw-r--r--scripts/lib/glfm/constants.rb21
-rw-r--r--scripts/lib/glfm/render_static_html.rb4
-rw-r--r--scripts/lib/glfm/render_wysiwyg_html_and_json.js1
-rw-r--r--scripts/lib/glfm/shared.rb1
-rw-r--r--scripts/lib/glfm/specification_html_template.erb244
-rw-r--r--scripts/lib/glfm/update_specification.rb118
-rwxr-xr-xscripts/lint-doc-quality.sh24
-rwxr-xr-xscripts/review_apps/automated_cleanup.rb31
-rw-r--r--scripts/review_apps/base-config.yaml6
-rwxr-xr-xscripts/review_apps/gcp-quotas-checks.rb46
-rwxr-xr-xscripts/review_apps/gcp_cleanup.sh160
-rwxr-xr-xscripts/review_apps/k8s-resources-count-checks.sh90
-rwxr-xr-xscripts/review_apps/review-apps.sh50
-rw-r--r--scripts/rspec_helpers.sh127
-rwxr-xr-xscripts/rubocop-max-files-in-cache-check28
-rwxr-xr-xscripts/static-analysis3
-rwxr-xr-xscripts/trigger-build.rb5
-rwxr-xr-xscripts/undercoverage9
-rwxr-xr-xscripts/used-feature-flags3
-rw-r--r--scripts/utils.sh43
-rwxr-xr-xscripts/verify-tff-mapping114
28 files changed, 992 insertions, 474 deletions
diff --git a/scripts/api/create_issue_discussion.rb b/scripts/api/create_issue_discussion.rb
new file mode 100644
index 00000000000..74a9f3ae378
--- /dev/null
+++ b/scripts/api/create_issue_discussion.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'gitlab'
+require_relative 'default_options'
+
+class CreateIssueDiscussion
+ def initialize(options)
+ @project = options.fetch(:project)
+
+ # Force the token to be a string so that if api_token is nil, it's set to '',
+ # allowing unauthenticated requests (for forks).
+ api_token = options.delete(:api_token).to_s
+
+ warn "No API token given." if api_token.empty?
+
+ @client = Gitlab.client(
+ endpoint: options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint],
+ private_token: api_token
+ )
+ end
+
+ def execute(discussion_data)
+ client.post(
+ "/projects/#{client.url_encode project}/issues/#{discussion_data.delete(:issue_iid)}/discussions",
+ body: discussion_data
+ )
+ end
+
+ private
+
+ attr_reader :project, :client
+end
diff --git a/scripts/api/download_job_artifact.rb b/scripts/api/download_job_artifact.rb
deleted file mode 100755
index 394ad8f3a3d..00000000000
--- a/scripts/api/download_job_artifact.rb
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env ruby
-# frozen_string_literal: true
-
-require 'optparse'
-require 'fileutils'
-require 'uri'
-require 'cgi'
-require 'net/http'
-require_relative 'default_options'
-
-class ArtifactFinder
- def initialize(options)
- @project = options.delete(:project)
- @job_id = options.delete(:job_id)
- @api_token = options.delete(:api_token)
- @endpoint = options.delete(:endpoint) || API::DEFAULT_OPTIONS[:endpoint]
- @artifact_path = options.delete(:artifact_path)
-
- warn "No API token given." unless api_token
- end
-
- def execute
- url = "#{endpoint}/projects/#{CGI.escape(project)}/jobs/#{job_id}/artifacts"
-
- if artifact_path
- FileUtils.mkdir_p(File.dirname(artifact_path))
- url += "/#{artifact_path}"
- end
-
- fetch(url)
- end
-
- private
-
- attr_reader :project, :job_id, :api_token, :endpoint, :artifact_path
-
- def fetch(uri_str, limit = 10)
- raise 'Too many HTTP redirects' if limit == 0
-
- uri = URI(uri_str)
- request = Net::HTTP::Get.new(uri)
- request['Private-Token'] = api_token if api_token
-
- Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http|
- http.request(request) do |response|
- case response
- when Net::HTTPSuccess then
- File.open(artifact_path || 'artifacts.zip', 'w') do |file|
- response.read_body(&file.method(:write))
- end
- when Net::HTTPRedirection then
- location = response['location']
- warn "Redirected (#{limit - 1} redirections remaining)."
- fetch(location, limit - 1)
- else
- raise "Unexpected response: #{response.value}"
- end
- end
- end
- end
-end
-
-if $PROGRAM_NAME == __FILE__
- options = API::DEFAULT_OPTIONS.dup
-
- OptionParser.new do |opts|
- opts.on("-p", "--project PROJECT", String, "Project where to find the job (defaults to $CI_PROJECT_ID)") do |value|
- options[:project] = value
- end
-
- opts.on("-j", "--job-id JOB_ID", String, "A job ID") do |value|
- options[:job_id] = value
- end
-
- opts.on("-a", "--artifact-path ARTIFACT_PATH", String, "A valid artifact path") do |value|
- options[:artifact_path] = value
- end
-
- opts.on("-t", "--api-token API_TOKEN", String, "A value API token with the `read_api` scope") do |value|
- options[:api_token] = value
- end
-
- opts.on("-E", "--endpoint ENDPOINT", String, "The API endpoint for the API token. (defaults to $CI_API_V4_URL and fallback to https://gitlab.com/api/v4)") do |value|
- options[:endpoint] = value
- end
-
- opts.on("-h", "--help", "Prints this help") do
- puts opts
- exit
- end
- end.parse!
-
- ArtifactFinder.new(options).execute
-end
diff --git a/scripts/build_assets_image b/scripts/build_assets_image
index 8aa6526061a..ee8623c826e 100755
--- a/scripts/build_assets_image
+++ b/scripts/build_assets_image
@@ -1,36 +1,75 @@
+#!/bin/sh
+
+set -e
+
+# This script builds an image that contains assets, that's then used by:
+# - The `CNG` downstream pipelines (triggered from `gitlab-org/gitlab` via the `review-build-cng` job):
+# https://gitlab.com/gitlab-org/gitlab/-/blob/c34e0834b01cd45c1f69a01b5e38dd6bc505f903/.gitlab/ci/review-apps/main.gitlab-ci.yml#L69.
+# - The `omnibus-gitlab` downstream pipelines (triggered from `gitlab-org/gitlab` via the `e2e:package-and-test` job):
+# https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/dfd1ad475868fc84e91ab7b5706aa03e46dc3a86/.gitlab-ci.yml#L130.
+# - The `gitlab-org/charts/gitlab` `master` pipelines via `gitlab-org/build/CNG`,
+# which pull `registry.gitlab.com/gitlab-org/gitlab/gitlab-assets-ee:master`
+# - The `omnibus-gitlab` and CNG `master`/stable-branch pipelines, for both gitlab.com and dev.gitlab.org,
+# which pull `registry.gitlab.com/gitlab-org/gitlab/gitlab-assets-ee:${CI_COMMIT_REF_SLUG}`.
+# - The `omnibus-gitlab` tag pipelines, for both gitlab.com and dev.gitlab.org,
+# which pull `registry.gitlab.com/gitlab-org/gitlab/gitlab-assets-ee:${CI_COMMIT_REF_SLUG}`.
+# - The CNG tag pipelines, for both gitlab.com and dev.gitlab.org,
+# which pull `registry.gitlab.com/gitlab-org/gitlab/gitlab-assets-ee:${CI_COMMIT_REF_NAME}`.
+# - The auto-deploy pipelines, which pull `registry.gitlab.com/gitlab-org/gitlab/gitlab-assets-ee:${CI_COMMIT_SHA}`.
+
+. scripts/utils.sh
+
# Exit early if we don't want to build the image
-if [[ "${BUILD_ASSETS_IMAGE}" != "true" ]]
+if [ "${BUILD_ASSETS_IMAGE}" != "true" ]
then
exit 0
fi
# Generate the image name based on the project this is being run in
ASSETS_IMAGE_NAME="gitlab-assets-ce"
+
# `dev.gitlab-org` still has gitlab-ee.
-if [[ "${CI_PROJECT_NAME}" == "gitlab" ]] || [[ "${CI_PROJECT_NAME}" == "gitlab-ee" ]]
-then
+if [ "${CI_PROJECT_NAME}" = "gitlab" ] || [ "${CI_PROJECT_NAME}" = "gitlab-ee" ]; then
ASSETS_IMAGE_NAME="gitlab-assets-ee"
fi
-ASSETS_IMAGE_PATH=${CI_REGISTRY}/${CI_PROJECT_PATH}/${ASSETS_IMAGE_NAME}
+ASSETS_IMAGE_PATH="${CI_REGISTRY}/${CI_PROJECT_PATH}/${ASSETS_IMAGE_NAME}"
-mkdir -p assets_container.build/public
-cp -r public/assets assets_container.build/public/
-cp Dockerfile.assets assets_container.build/
+# Used in MR pipelines
+COMMIT_ASSETS_HASH_DESTINATION="${ASSETS_IMAGE_PATH}:$(assets_image_tag)"
+# Used by other projects's master pipelines
+COMMIT_REF_SLUG_DESTINATION="${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_SLUG}"
+# Used by auto-deploy pipelines: https://gitlab.com/gitlab-org/release/docs/blob/master/general/deploy/auto-deploy.md
+COMMIT_SHA_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_SHA}
+# Used for CNG tag pipelines
+COMMIT_REF_NAME_DESTINATION="${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_NAME}"
-COMMIT_REF_SLUG_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_SLUG}
+if skopeo inspect "docker://${COMMIT_ASSETS_HASH_DESTINATION}" > /dev/null; then
+ echosuccess "Image ${COMMIT_ASSETS_HASH_DESTINATION} already exists, no need to rebuild it."
-COMMIT_SHA_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_SHA}
-COMMIT_REF_NAME_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_NAME}
+ skopeo copy "docker://${COMMIT_ASSETS_HASH_DESTINATION}" "docker://${COMMIT_REF_SLUG_DESTINATION}"
+ skopeo copy "docker://${COMMIT_ASSETS_HASH_DESTINATION}" "docker://${COMMIT_SHA_DESTINATION}"
-DESTINATIONS="--destination=$COMMIT_REF_SLUG_DESTINATION --destination=$COMMIT_SHA_DESTINATION"
+ if [ -n "${CI_COMMIT_TAG}" ]; then
+ skopeo copy "docker://${COMMIT_ASSETS_HASH_DESTINATION}" "docker://${COMMIT_REF_NAME_DESTINATION}"
+ fi
+else
+ echoinfo "Image ${COMMIT_ASSETS_HASH_DESTINATION} doesn't exist, we'll need to build it."
-# Also tag the image with GitLab version, if running on a tag pipeline, so
-# other projects can simply use that instead of computing the slug.
-if [ -n "$CI_COMMIT_TAG" ]; then
- DESTINATIONS="$DESTINATIONS --destination=$COMMIT_REF_NAME_DESTINATION"
-fi
+ DESTINATIONS="--destination=${COMMIT_ASSETS_HASH_DESTINATION} --destination=${COMMIT_REF_SLUG_DESTINATION} --destination=${COMMIT_SHA_DESTINATION}"
-echo "building assets image for destinations: $DESTINATIONS"
+ if [ -n "${CI_COMMIT_TAG}" ]; then
+ DESTINATIONS="$DESTINATIONS --destination=${COMMIT_REF_NAME_DESTINATION}"
+ fi
-/kaniko/executor --context=assets_container.build --dockerfile=assets_container.build/Dockerfile.assets $DESTINATIONS
+ mkdir -p assets_container.build/public
+ cp -r public/assets assets_container.build/public/
+ cp Dockerfile.assets assets_container.build/
+
+ echo "Building assets image for destinations: ${DESTINATIONS}"
+
+ /kaniko/executor \
+ --context="assets_container.build" \
+ --dockerfile="assets_container.build/Dockerfile.assets" \
+ ${DESTINATIONS}
+fi
diff --git a/scripts/build_qa_image b/scripts/build_qa_image
index 477bec29ba7..3728608e32c 100755
--- a/scripts/build_qa_image
+++ b/scripts/build_qa_image
@@ -1,9 +1,11 @@
#!/bin/bash
QA_IMAGE_NAME="gitlab-ee-qa"
+QA_BUILD_TARGET="ee"
if [[ "${CI_PROJECT_NAME}" == "gitlabhq" || "${CI_PROJECT_NAME}" == "gitlab-foss" ]]; then
QA_IMAGE_NAME="gitlab-ce-qa"
+ QA_BUILD_TARGET="foss"
fi
# Tag with commit SHA by default
@@ -36,7 +38,7 @@ docker buildx build \
--build-arg=CHROME_VERSION="${CHROME_VERSION}" \
--build-arg=DOCKER_VERSION="${DOCKER_VERSION}" \
--build-arg=RUBY_VERSION="${RUBY_VERSION}" \
- --build-arg=QA_BUILD_TARGET="${QA_BUILD_TARGET:-qa}" \
+ --build-arg=QA_BUILD_TARGET="${QA_BUILD_TARGET}" \
--file="${CI_PROJECT_DIR}/qa/Dockerfile" \
--push \
${DESTINATIONS} \
diff --git a/scripts/check-template-changes b/scripts/check-template-changes
new file mode 100755
index 00000000000..1a3060fe1bb
--- /dev/null
+++ b/scripts/check-template-changes
@@ -0,0 +1,105 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'tmpdir'
+
+@template = ARGV.first
+
+if @template.nil?
+ puts "Usage: #{__FILE__} <path_to_project_template>"
+ exit 1
+end
+
+@name = File.basename(@template).delete_suffix('.tar.gz')
+@extracted_template_dir = Dir.mktmpdir(@name)
+@master_template_dir = Dir.mktmpdir(@name)
+
+def extract(dest)
+ system('tar', 'xf', @template, '-C', dest, exception: true)
+end
+
+def cleanup
+ FileUtils.rm_rf(@extracted_template_dir)
+ FileUtils.rm_rf(@master_template_dir)
+end
+
+def repo_details
+ Dir.chdir(@extracted_template_dir) do
+ system('git', 'clone', 'project.bundle', @name, exception: true)
+ end
+
+ Dir.chdir(File.join(@extracted_template_dir, @name)) do
+ head_commit = `git cat-file -p HEAD`
+ lines = head_commit.split("\n")
+
+ repository = lines
+ .find { |line| line.start_with?('Template repository: ') }
+ .rpartition(' ').last
+
+ commit_sha = lines
+ .find { |line| line.start_with?('Commit SHA: ') }
+ .rpartition(' ').last
+
+ [repository, commit_sha]
+ end
+end
+
+puts "Extracting template to: #{@extracted_template_dir}"
+
+extract(@extracted_template_dir)
+branch = `git rev-parse --abbrev-ref HEAD`.chomp
+system('git', 'checkout', 'master', exception: true)
+extract(@master_template_dir)
+system('git', 'checkout', branch, exception: true)
+
+puts
+puts '🧐 Comparing new template with master'
+puts
+
+system('git', '--no-pager', 'diff', '--no-index', @master_template_dir, @extracted_template_dir)
+
+puts
+puts '--- end diff ---'
+
+repository, commit_sha = repo_details
+
+puts
+puts "📝 Template is created from #{repository} at commit #{commit_sha}"
+
+unless repository.start_with?('https://gitlab.com/gitlab-org/project-templates/')
+ puts '❌ This template does not have the correct origin'
+ cleanup
+ exit 1
+end
+
+puts '🧐 Verifying that template repo matches remote'
+puts
+
+remote_repo_dir = Dir.mktmpdir(@name)
+
+system('git', 'clone', repository, remote_repo_dir, exception: true)
+
+Dir.chdir(remote_repo_dir) do
+ system('git', 'checkout', commit_sha, exception: true)
+ system('git', '--no-pager', 'show')
+end
+
+extracted_template_repo_dir = File.join(@extracted_template_dir, @name)
+
+FileUtils.rm_rf(File.join(extracted_template_repo_dir, '.git'))
+FileUtils.cp_r(File.join(remote_repo_dir, '.git'), extracted_template_repo_dir)
+
+Dir.chdir(extracted_template_repo_dir) do
+ status = `git status`
+ puts status
+ puts
+
+ if status.include?('nothing to commit, working tree clean')
+ puts "✅ Template is up to date with remote commit #{commit_sha}"
+ else
+ puts '❌ Template is not synced with remote'
+ end
+end
+
+FileUtils.rm_rf(remote_repo_dir)
+cleanup
diff --git a/scripts/create-pipeline-failure-incident.rb b/scripts/create-pipeline-failure-incident.rb
index c38f80699e6..1035a680291 100755
--- a/scripts/create-pipeline-failure-incident.rb
+++ b/scripts/create-pipeline-failure-incident.rb
@@ -7,13 +7,14 @@ require 'json'
require_relative 'api/pipeline_failed_jobs'
require_relative 'api/create_issue'
+require_relative 'api/create_issue_discussion'
class CreatePipelineFailureIncident
DEFAULT_OPTIONS = {
project: nil,
incident_json_file: 'incident.json'
}.freeze
- DEFAULT_LABELS = ['Engineering Productivity', 'master-broken:undetermined'].freeze
+ DEFAULT_LABELS = ['Engineering Productivity', 'master-broken::undetermined'].freeze
def initialize(options)
@project = options.delete(:project)
@@ -28,7 +29,12 @@ class CreatePipelineFailureIncident
labels: incident_labels
}
- CreateIssue.new(project: project, api_token: api_token).execute(payload)
+ CreateIssue.new(project: project, api_token: api_token).execute(payload).tap do |incident|
+ CreateIssueDiscussion.new(project: project, api_token: api_token)
+ .execute(issue_iid: incident.iid, body: "## Root Cause Analysis")
+ CreateIssueDiscussion.new(project: project, api_token: api_token)
+ .execute(issue_iid: incident.iid, body: "## Investigation Steps")
+ end
end
private
@@ -44,8 +50,16 @@ class CreatePipelineFailureIncident
end
def title
- "#{now.strftime('%A %F %R UTC')} - `#{ENV['CI_PROJECT_PATH']}` broken `#{ENV['CI_COMMIT_REF_NAME']}` " \
- "with #{failed_jobs.size} failed jobs"
+ @title ||= begin
+ full_title = "#{now.strftime('%A %F %R UTC')} - `#{ENV['CI_PROJECT_PATH']}` " \
+ "broken `#{ENV['CI_COMMIT_REF_NAME']}` with #{failed_jobs.map(&:name).join(', ')}"
+
+ if full_title.size >= 255
+ "#{full_title[...252]}..." # max title length is 255, and we add an elipsis
+ else
+ full_title
+ end
+ end
end
def description
@@ -85,7 +99,7 @@ class CreatePipelineFailureIncident
You can create a merge request, assign to any available maintainer, and ping people that were involved/related to the introduction of the failure.
Additionally, a message can be posted in `#backend_maintainers` or `#frontend_maintainers` to get a maintainer take a look at the fix ASAP.
- In both cases, make sure to add the ~"pipeline:expedite-master-fixing" label, and `master:broken` or `master:foss-broken` label, to speed up the `master`-fixing pipelines.
+ In both cases, make sure to add the ~"pipeline:expedite" label, and `master:broken` or `master:foss-broken` label, to speed up the `master`-fixing pipelines.
### Resolution
diff --git a/scripts/lib/gitlab.rb b/scripts/lib/gitlab.rb
index 556e2037edf..a7ca6b7c5df 100644
--- a/scripts/lib/gitlab.rb
+++ b/scripts/lib/gitlab.rb
@@ -4,10 +4,10 @@ module Gitlab
module_function
def ee?
- File.exist?(File.expand_path('../../ee/app/models/license.rb', __dir__)) && !%w[true 1].include?(ENV['FOSS_ONLY'].to_s)
+ File.exist?(File.expand_path('../../ee/app/models/license.rb', __dir__)) && !%w[true 1].include?(ENV['FOSS_ONLY'].to_s) # rubocop:disable Rails/NegateInclude
end
def jh?
- ee? && Dir.exist?(File.expand_path('../../jh', __dir__)) && !%w[true 1].include?(ENV['EE_ONLY'].to_s)
+ ee? && Dir.exist?(File.expand_path('../../jh', __dir__)) && !%w[true 1].include?(ENV['EE_ONLY'].to_s) # rubocop:disable Rails/NegateInclude
end
end
diff --git a/scripts/lib/glfm/constants.rb b/scripts/lib/glfm/constants.rb
index c432e5495dd..16ffb12db57 100644
--- a/scripts/lib/glfm/constants.rb
+++ b/scripts/lib/glfm/constants.rb
@@ -4,6 +4,11 @@ require 'pathname'
module Glfm
module Constants
+ # Version and titles for rendering
+ GLFM_SPEC_VERSION = 'alpha'
+ GLFM_SPEC_TXT_TITLE = 'GitLab Flavored Markdown Official Specification'
+ ES_SNAPSHOT_SPEC_TITLE = 'GitLab Flavored Markdown Internal Extensions'
+
# Root dir containing all specification files
specification_path = Pathname.new(File.expand_path("../../../glfm_specification", __dir__))
@@ -25,6 +30,12 @@ module Glfm
GLFM_OUTPUT_SPEC_PATH = specification_path.join('output_spec')
GLFM_SPEC_TXT_PATH = GLFM_OUTPUT_SPEC_PATH.join('spec.txt')
GLFM_SPEC_HTML_PATH = GLFM_OUTPUT_SPEC_PATH.join('spec.html')
+ GLFM_SPEC_TXT_HEADER = <<~MARKDOWN
+ ---
+ title: #{GLFM_SPEC_TXT_TITLE}
+ version: #{GLFM_SPEC_VERSION}
+ ...
+ MARKDOWN
# Example Snapshot (ES) files
ES_OUTPUT_EXAMPLE_SNAPSHOTS_PATH = specification_path.join('output_example_snapshots')
@@ -34,14 +45,14 @@ module Glfm
ES_MARKDOWN_YML_PATH = ES_OUTPUT_EXAMPLE_SNAPSHOTS_PATH.join('markdown.yml')
ES_HTML_YML_PATH = ES_OUTPUT_EXAMPLE_SNAPSHOTS_PATH.join('html.yml')
ES_PROSEMIRROR_JSON_YML_PATH = ES_OUTPUT_EXAMPLE_SNAPSHOTS_PATH.join('prosemirror_json.yml')
-
- # Other constants used for processing files
- GLFM_SPEC_TXT_HEADER = <<~MARKDOWN
+ ES_SNAPSHOT_SPEC_MD_HEADER = <<~MARKDOWN
---
- title: GitLab Flavored Markdown (GLFM) Spec
- version: alpha
+ title: #{ES_SNAPSHOT_SPEC_TITLE}
+ version: #{GLFM_SPEC_VERSION}
...
MARKDOWN
+
+ # Other constants used for processing files
EXAMPLE_BACKTICKS_LENGTH = 32
EXAMPLE_BACKTICKS_STRING = '`' * EXAMPLE_BACKTICKS_LENGTH
EXAMPLE_BEGIN_STRING = "#{EXAMPLE_BACKTICKS_STRING} example"
diff --git a/scripts/lib/glfm/render_static_html.rb b/scripts/lib/glfm/render_static_html.rb
index 6af73cd845d..bf7865c4d95 100644
--- a/scripts/lib/glfm/render_static_html.rb
+++ b/scripts/lib/glfm/render_static_html.rb
@@ -23,8 +23,8 @@ require_relative 'shared'
# the API and obtain the response.
#
# It is intended to be invoked as a helper subprocess from the `update_example_snapshots.rb`
-# script class. It's not intended to be run or used directly. This usage is also reinforced
-# by not naming the file with a `_spec.rb` ending.
+# and `update_specification.rb` script classes. It's not intended to be run or used directly.
+# This usage is also reinforced by not naming the file with a `_spec.rb` ending.
RSpec.describe 'Render Static HTML', :api, type: :request do
include Glfm::Constants
include Glfm::Shared
diff --git a/scripts/lib/glfm/render_wysiwyg_html_and_json.js b/scripts/lib/glfm/render_wysiwyg_html_and_json.js
index 8f94f50d62b..f9c4c417a6f 100644
--- a/scripts/lib/glfm/render_wysiwyg_html_and_json.js
+++ b/scripts/lib/glfm/render_wysiwyg_html_and_json.js
@@ -19,7 +19,6 @@ jest.mock('~/emoji');
// Jest because that is the simplest environment in which to execute the
// relevant Content Editor logic.
//
-//
// This script should be invoked via jest with the a command similar to the following:
// yarn jest --testMatch '**/render_wysiwyg_html_and_json.js' ./scripts/lib/glfm/render_wysiwyg_html_and_json.js
it('serializes html to prosemirror json', async () => {
diff --git a/scripts/lib/glfm/shared.rb b/scripts/lib/glfm/shared.rb
index b529d9ba94f..56cb2f95d6a 100644
--- a/scripts/lib/glfm/shared.rb
+++ b/scripts/lib/glfm/shared.rb
@@ -3,6 +3,7 @@ require 'fileutils'
require 'open3'
require 'active_support/core_ext/hash/keys'
+# This module contains shared methods used by other GLFM scripts and modules.
module Glfm
module Shared
def write_file(file_path, file_content_string)
diff --git a/scripts/lib/glfm/specification_html_template.erb b/scripts/lib/glfm/specification_html_template.erb
new file mode 100644
index 00000000000..f75cb400229
--- /dev/null
+++ b/scripts/lib/glfm/specification_html_template.erb
@@ -0,0 +1,244 @@
+<!DOCTYPE html>
+<!-- NOTE: Styling is based on the CommonMark specification template: -->
+<!-- - https://github.com/commonmark/commonmark-spec/blob/master/tools/make_spec.lua -->
+<!-- - https://github.com/commonmark/commonmark-spec/blob/master/tools/template.html -->
+<!-- -->
+<!-- NOTE: 'TODO:' comments will be followed up as task(s) on this issue: -->
+<!-- - https://gitlab.com/gitlab-org/gitlab/-/issues/361241 -->
+<html lang="en">
+<head>
+ <meta charset="UTF-8">
+ <title><%= title %></title>
+ <style type="text/css">
+ body {
+ font-family: Helvetica, arial, freesans, clean, sans-serif;
+ line-height: 1.4;
+ max-width: 48em;
+ margin: auto;
+ padding: 0 0.5em 4em;
+ color: #333333;
+ background-color: #ffffff;
+ font-size: 13pt;
+ }
+
+ div#TOC ul { list-style: none; }
+ h1 {
+ font-size: 140%;
+ font-weight: bold;
+ border-top: 1px solid gray;
+ padding-top: 0.5em;
+ }
+
+ h2 {
+ font-size: 120%;
+ font-weight: bold;
+ }
+
+ h3 {
+ font-size: 110%;
+ font-weight: bold;
+ }
+
+ h4 {
+ font-size: 100%;
+ font-weight: bold;
+ }
+
+ /* NOTE: "font-weight: bold" was applied to "a.definition" class in original CommonMark */
+ /* template, but in practice it was applied to all anchors */
+ a {
+ font-weight: bold;
+ }
+
+
+ /* TODO: Format whitespace in examples. This will require preprocessing to insert spans around them. */
+ /*span.space { position: relative; }*/
+ /*span.space:after {*/
+ /* content: "·";*/
+ /* position: absolute;*/
+ /* !* create a mark that indicates a space (trick from D. Greenspan) *!*/
+ /* top: 0; bottom: 7px; left: 1px; right: 1px;*/
+ /* color: #aaaaaa;*/
+ /*}*/
+ /*@media print {*/
+ /* a.dingus { display: none; }*/
+ /*}*/
+
+ div.example {
+ overflow: hidden;
+ }
+
+ p {
+ text-align: justify;
+ }
+
+ pre {
+ padding: 0.5em;
+ margin: 0.2em 0 0.5em;
+ font-size: 88%;
+ }
+
+ pre {
+ white-space: pre-wrap; /* css-3 */
+ white-space: -moz-pre-wrap; /* Mozilla, since 1999 */
+ white-space: -o-pre-wrap; /* Opera 7 */
+ word-wrap: break-word; /* Internet Explorer 5.5+ */
+ }
+
+ code {
+ font-family: monospace;
+ background-color: #d3e1e4;
+ }
+
+ pre > code {
+ background-color: transparent;
+ }
+
+ .example {
+ font-size: 0; /* hack to get width: 50% to work on inline-block */
+ padding-bottom: 6pt;
+ }
+
+ .column pre {
+ font-size: 11pt;
+ padding: 2pt 6pt;
+ }
+
+ div.examplenum {
+ font-size: 11pt;
+ text-align: left;
+ margin-bottom: 10px;
+ }
+
+ div.column {
+ display: inline-block;
+ width: 50%;
+ vertical-align: top;
+ }
+
+ div.example > div:nth-child(2) {
+ clear: left;
+ background-color: #d3e1e4;
+ }
+
+ div.example > div:nth-child(3) {
+ clear: right;
+ background-color: #c9cace;
+ }
+
+ @media print {
+ @page {
+ size: auto;
+ margin: 1.2in 1.2in 1.2in 1.2in;
+ }
+
+ body {
+ margin: 0;
+ line-height: 1.2;
+ font-size: 10pt;
+ }
+
+ .column pre {
+ font-size: 9pt;
+ }
+
+ div.examplenum {
+ font-size: 9pt;
+ }
+ }
+ </style>
+ <!-- TODO: Extract this javascript out to a separate file and unit test it -->
+ <script type="text/javascript">
+ /* NOTE: The following code performs many of the pre-processing steps originally handled */
+ /* in https://github.com/commonmark/commonmark-spec/blob/master/tools/make_spec.lua */
+
+ /* Adds a div.example wrapper around each pair of example code blocks. */
+ function addAttributesToExampleWrapperDivs() {
+ const exampleAnchorTags = document.querySelectorAll("a[href^=\"#example-\"]");
+ for (const exampleAnchorTag of exampleAnchorTags) {
+ const examplenumDiv = exampleAnchorTag.parentElement;
+ examplenumDiv.classList.add("examplenum");
+ const exampleDiv = examplenumDiv.parentElement;
+ exampleDiv.classList.add("example");
+ exampleDiv.id = exampleAnchorTag.getAttribute("href").substring(1);
+ }
+ }
+
+ function addColumnClassToMarkdownDivs() {
+ const markdownCodeBlockDivs = document.querySelectorAll("div.markdown-code-block");
+ for (const markdownCodeBlockDiv of markdownCodeBlockDivs) {
+ markdownCodeBlockDiv.classList.add("column");
+ }
+ }
+
+ function addNumbersToHeaders() {
+ const headers = document.querySelectorAll('h1,h2,h3');
+ let h1Index = -1; // NOTE: -1 because we don't assign a number to the title
+ let h2Index = 0;
+ let h3Index = 0;
+ const tocEntries = [];
+ for (const header of headers) {
+ if (h1Index === -1) {
+ h1Index++;
+ continue;
+ }
+
+ const originalHeaderTextContent = header.textContent.trim();
+ const headerAnchor = originalHeaderTextContent.toLowerCase().replaceAll(' ', '-');
+ header.id = headerAnchor;
+ let indent;
+ let headerTextContent;
+ if (header.tagName === 'H1') {
+ h1Index++;
+ h2Index = 0;
+ h3Index = 0;
+ header.textContent = headerTextContent = h1Index + ' ' + originalHeaderTextContent;
+ indent = 0;
+ } else if (header.tagName === 'H2') {
+ h2Index++;
+ h3Index = 0;
+ header.textContent =
+ headerTextContent = h1Index + '.' + h2Index + ' ' + originalHeaderTextContent;
+ indent = 1;
+ } else if (header.tagName === 'H3') {
+ h3Index++;
+ header.textContent = headerTextContent =
+ h1Index + '.' + h2Index + '.' + h3Index + ' ' + originalHeaderTextContent;
+ indent = 2;
+ }
+ tocEntries.push({headerAnchor, headerTextContent, indent});
+ }
+ }
+
+ document.addEventListener("DOMContentLoaded", function(_event) {
+ addAttributesToExampleWrapperDivs();
+ addColumnClassToMarkdownDivs();
+ const tocEntries = addNumbersToHeaders();
+ addToc(tocEntries);
+ });
+
+ /* NOTE: The following code is to support the "Try it" interactive "dingus", which */
+ /* we do not yet support. But it is being left here for comparison context with the */
+ /* original CommonMark template. */
+ // $$(document).ready(function() {
+ // $$("div.example").each(function(e) {
+ // var t = $$(this).find('code.language-markdown').text();
+ // $$(this).find('a.dingus').click(function(f) {
+ // window.open('/dingus/?text=' +
+ // encodeURIComponent(t.replace(/→/g,"\t")));
+ // });
+ // });
+ // $$("code.language-markdown").dblclick(function(e) { window.open('/dingus/?text=' +
+ // encodeURIComponent($$(this).text()));
+ // });
+ // });
+ </script>
+</head>
+<body>
+<h1 class="title"><%= title %></h1>
+<div class="version">Version <%= version %></div>
+
+<%= body %>
+
+</body>
+</html>
diff --git a/scripts/lib/glfm/update_specification.rb b/scripts/lib/glfm/update_specification.rb
index b87005bdb90..ef6f24d5a77 100644
--- a/scripts/lib/glfm/update_specification.rb
+++ b/scripts/lib/glfm/update_specification.rb
@@ -1,4 +1,5 @@
# frozen_string_literal: true
+require 'erb'
require 'fileutils'
require 'open-uri'
require 'pathname'
@@ -29,19 +30,21 @@ module Glfm
# create `output_spec/spec.txt`
glfm_spec_txt_header_lines = GLFM_SPEC_TXT_HEADER.split("\n").map { |line| "#{line}\n" }
official_spec_lines = readlines_from_path!(GLFM_OFFICIAL_SPECIFICATION_MD_PATH)
-
glfm_spec_txt_string = (glfm_spec_txt_header_lines + official_spec_lines).join('')
write_glfm_spec_txt(glfm_spec_txt_string)
# create `output_example_snapshots/snapshot_spec.md`
+ snapshot_spec_md_header_lines = ES_SNAPSHOT_SPEC_MD_HEADER.split("\n").map { |line| "#{line}\n" }
ghfm_spec_example_lines = extract_ghfm_spec_example_lines(ghfm_spec_lines)
official_spec_example_lines =
extract_glfm_spec_example_lines(official_spec_lines, GLFM_OFFICIAL_SPECIFICATION_MD_PATH)
internal_extension_lines = readlines_from_path!(GLFM_INTERNAL_EXTENSIONS_MD_PATH)
+ validate_internal_extensions_md(internal_extension_lines)
internal_extension_example_lines =
extract_glfm_spec_example_lines(internal_extension_lines, GLFM_INTERNAL_EXTENSIONS_MD_PATH)
+
snapshot_spec_md_string = (
- glfm_spec_txt_header_lines +
+ snapshot_spec_md_header_lines +
ghfm_spec_example_lines +
official_spec_example_lines +
["\n"] +
@@ -49,16 +52,37 @@ module Glfm
).join('')
write_snapshot_spec_md(snapshot_spec_md_string)
+ # Some unit tests can skip HTML generation if they don't need it, so they run faster
if skip_spec_html_generation
output("Skipping GLFM spec.html and snapshot_spec.html generation...")
return
end
- # create `output_spec/spec.html` and `output_snapshot_examples/snapshot_spec.html`
- spec_html_string, snapshot_spec_html_string =
- generate_spec_html_files(glfm_spec_txt_string, snapshot_spec_md_string)
- write_spec_html(spec_html_string)
- write_snapshot_spec_html(snapshot_spec_html_string)
+ # Use the backend markdown processing to render un-styled GLFM specification HTML files from the markdown
+ # We strip off the frontmatter headers before rendering.
+ spec_html_unstyled_string, snapshot_spec_html_unstyled_string =
+ generate_spec_html_files(
+ glfm_spec_txt_string.gsub!(GLFM_SPEC_TXT_HEADER, "[TOC]\n\n"),
+ snapshot_spec_md_string.gsub!(ES_SNAPSHOT_SPEC_MD_HEADER, "[TOC]\n\n"),
+ ghfm_spec_example_lines.join('')
+ )
+
+ # Add styling to the rendered HTML files, to make them look like the CommonMark and
+ # GitHub Flavored Markdown HTML-rendered specifications
+ spec_html_styled_string = add_styling_to_specification_html(
+ body: spec_html_unstyled_string,
+ title: GLFM_SPEC_TXT_TITLE,
+ version: GLFM_SPEC_VERSION
+ )
+ snapshot_spec_html_styled_string = add_styling_to_specification_html(
+ body: snapshot_spec_html_unstyled_string,
+ title: ES_SNAPSHOT_SPEC_TITLE,
+ version: GLFM_SPEC_VERSION
+ )
+
+ # Write out the styled HTML GLFM specification HTML files
+ write_spec_html(spec_html_styled_string)
+ write_snapshot_spec_html(snapshot_spec_html_styled_string)
end
private
@@ -156,6 +180,16 @@ module Glfm
spec_lines[(begin_tests_comment_line_index + 1)..(end_tests_comment_index - 1)]
end
+ def validate_internal_extensions_md(internal_extension_lines)
+ first_line = internal_extension_lines[0].strip
+ last_line = internal_extension_lines[-1].strip
+ return unless first_line != BEGIN_TESTS_COMMENT_LINE_TEXT || last_line != END_TESTS_COMMENT_LINE_TEXT
+
+ raise "Error: No content is allowed outside of the " \
+ "'#{BEGIN_TESTS_COMMENT_LINE_TEXT}' and '#{END_TESTS_COMMENT_LINE_TEXT}' comments " \
+ "in '#{GLFM_INTERNAL_EXTENSIONS_MD_PATH}'."
+ end
+
def write_glfm_spec_txt(glfm_spec_txt_string)
output("Writing #{GLFM_SPEC_TXT_PATH}...")
FileUtils.mkdir_p(Pathname.new(GLFM_SPEC_TXT_PATH).dirname)
@@ -168,11 +202,32 @@ module Glfm
write_file(ES_SNAPSHOT_SPEC_MD_PATH, snapshot_spec_md_string)
end
- def generate_spec_html_files(spec_txt_string, snapshot_spec_md_string)
+ def generate_spec_html_files(spec_txt_string, snapshot_spec_md_string, ghfm_spec_examples_string)
output("Generating spec.html and snapshot_spec.html from spec.txt and snapshot_spec.md markdown...")
- spec_txt_string_split_examples = split_examples_into_html_and_md(spec_txt_string)
- snapshot_spec_md_string_split_examples = split_examples_into_html_and_md(snapshot_spec_md_string)
+ # NOTE: spec.txt only contains official GLFM examples, but snapshot_spec.md contains ALL examples, with the
+ # official GLFM examples coming _after_ the GHFM (which contains CommonMark + GHFM) examples, and the
+ # internal extension examples coming last. In the snapshot_spec.md, The CommonMark and GLFM examples come
+ # first, in order for the example numbers to match tne numbers in those separate specifications [1]. But, we
+ # also need for the numbering of the official examples in spec.txt to match the numbering of the official
+ # examples in snapshot_spec.md. Here's the ordering:
+ #
+ # spec.txt:
+ # 1. GLFM Official
+ #
+ # snapshot_spec.md:
+ # 1. GHFM (contains CommonMark + GHFM)
+ # 2. GLFM Official
+ # 3. GLFM Internal
+ #
+ # [1] Note that the example numbering in the GLFM spec.html is currently out of sync with its corresponding
+ # spec.txt because its rendering is out of date. This has been reported in the following issue:
+ # https://github.com/github/cmark-gfm/issues/288
+ ghfm_spec_examples_count = ghfm_spec_examples_string.scan(EXAMPLE_BEGIN_STRING).length
+
+ spec_txt_string_split_examples =
+ transform_examples_for_rendering(spec_txt_string, starting_example_number: ghfm_spec_examples_count + 1)
+ snapshot_spec_md_string_split_examples = transform_examples_for_rendering(snapshot_spec_md_string)
input_markdown_yml_string = <<~MARKDOWN
---
@@ -212,15 +267,46 @@ module Glfm
[rendered_html_hash.fetch(:spec_txt), rendered_html_hash.fetch(:snapshot_spec_md)]
end
- def split_examples_into_html_and_md(spec_md_string)
- spec_md_string.gsub(
- /(^#{EXAMPLE_BEGIN_STRING}.*?$(?:.|\n)*?)^\.$(\n(?:.|\n)*?^#{EXAMPLE_END_STRING}$)/mo,
- "\\1#{EXAMPLE_BACKTICKS_STRING}\n\n#{EXAMPLE_BACKTICKS_STRING}\\2"
- )
+ # NOTE: body, title, and version are used by the ERB binding.
+ # noinspection RubyUnusedLocalVariable
+ def add_styling_to_specification_html(body:, title:, version:)
+ # noinspection RubyMismatchedArgumentType
+ ERB.new(File.read(File.expand_path('specification_html_template.erb', __dir__))).result(binding)
+ end
+
+ def transform_examples_for_rendering(spec_md_string, starting_example_number: 1)
+ # This method:
+ # 1. Splits the single example code block which has a period between the markdown and HTML into two code blocks
+ # 2. Adds a wrapper div for use in styling and target for the example number named anchor. This will get the
+ # 'class="example" id="example-n"' attributes applied via javascript (since markdown rendering does not
+ # preserve classes or IDs)
+ # 3. Adds a div which includes the example number named anchor and text. This will get the 'class="examplenum"'
+ # attribute applied via javascript.
+ #
+ # NOTE: Even though they will get stripped durning markdown rendering, we will go ahead and add the class and id
+ # attributes here, for easier debugging and comparison to the source markdown.
+ example_replacement_regex = /(^#{EXAMPLE_BEGIN_STRING}.*?$(?:.|\n)*?)^\.$(\n(?:.|\n)*?^#{EXAMPLE_END_STRING}$)/mo
+ example_num = starting_example_number
+ spec_md_string.gsub(example_replacement_regex) do |_example_string|
+ markdown_part = ::Regexp.last_match(1)
+ html_part = ::Regexp.last_match(2)
+ example_anchor_name = "example-#{example_num}"
+ examplenum_div = %(<div class="examplenum"><a href="##{example_anchor_name}">Example #{example_num}</a></div>\n)
+ example_num += 1
+ # NOTE: We need blank lines before the markdown code blocks so they will be rendered properly
+ %(<div class="example" id="#{example_anchor_name}">\n) +
+ "#{examplenum_div}\n" \
+ "#{markdown_part}" \
+ "#{EXAMPLE_BACKTICKS_STRING}" \
+ "\n\n" \
+ "#{EXAMPLE_BACKTICKS_STRING}" \
+ "#{html_part}\n" \
+ '</div>'
+ end
end
def write_spec_html(spec_html_string)
- output("Writing #{GLFM_SPEC_TXT_PATH}...")
+ output("Writing #{GLFM_SPEC_HTML_PATH}...")
FileUtils.mkdir_p(Pathname.new(GLFM_SPEC_HTML_PATH).dirname)
write_file(GLFM_SPEC_HTML_PATH, "#{spec_html_string}\n")
end
diff --git a/scripts/lint-doc-quality.sh b/scripts/lint-doc-quality.sh
new file mode 100755
index 00000000000..9d8409a7c80
--- /dev/null
+++ b/scripts/lint-doc-quality.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+
+echo '=> Generating code quality artifact...'
+echo
+
+# Generate code quality artifact for Vale warnings only on changed files.
+# Only works on merged results pipelines, so first checks if a merged results CI variable is present.
+# If not present, runs on all files.
+
+if [ -z "${CI_MERGE_REQUEST_TARGET_BRANCH_SHA}" ]
+then
+ MD_DOC_PATH=${MD_DOC_PATH:-doc}
+ echo "Merge request pipeline (detached) detected. Testing all files."
+else
+ MERGE_BASE=$(git merge-base "${CI_MERGE_REQUEST_TARGET_BRANCH_SHA}" "${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}")
+ MD_DOC_PATH=$(git diff --diff-filter=d --name-only "${MERGE_BASE}..${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}" -- 'doc/*.md')
+ if [ -n "${MD_DOC_PATH}" ]
+ then
+ echo -e "Merged results pipeline detected. Testing only the following files: ${MD_DOC_PATH}"
+ fi
+fi
+
+echo "vale --output=doc/.vale/vale-json.tmpl --minAlertLevel warning ${MD_DOC_PATH} > gl-code-quality-report-docs.json"
+vale --output=doc/.vale/vale-json.tmpl --minAlertLevel warning ${MD_DOC_PATH} > gl-code-quality-report-docs.json
diff --git a/scripts/review_apps/automated_cleanup.rb b/scripts/review_apps/automated_cleanup.rb
index f020283de52..2d87c18d7d2 100755
--- a/scripts/review_apps/automated_cleanup.rb
+++ b/scripts/review_apps/automated_cleanup.rb
@@ -22,6 +22,7 @@ module ReviewApps
IGNORED_KUBERNETES_ERRORS = [
'NotFound'
].freeze
+ ENVIRONMENTS_NOT_FOUND_THRESHOLD = 3
# $GITLAB_PROJECT_REVIEW_APP_CLEANUP_API_TOKEN => `Automated Review App Cleanup` project token
def initialize(
@@ -30,10 +31,11 @@ module ReviewApps
api_endpoint: ENV['CI_API_V4_URL'],
options: {}
)
- @project_path = project_path
- @gitlab_token = gitlab_token
- @api_endpoint = api_endpoint
- @dry_run = options[:dry_run]
+ @project_path = project_path
+ @gitlab_token = gitlab_token
+ @api_endpoint = api_endpoint
+ @dry_run = options[:dry_run]
+ @environments_not_found_count = 0
puts "Dry-run mode." if dry_run
end
@@ -91,13 +93,11 @@ module ReviewApps
release = Tooling::Helm3Client::Release.new(environment.slug, 1, deployed_at.to_s, nil, nil, environment.slug)
releases_to_delete << release
end
+ elsif deployed_at >= stop_threshold
+ print_release_state(subject: 'Review App', release_name: environment.slug, release_date: last_deploy, action: 'leaving')
else
- if deployed_at >= stop_threshold
- print_release_state(subject: 'Review App', release_name: environment.slug, release_date: last_deploy, action: 'leaving')
- else
- environment_state = fetch_environment(environment)&.state
- stop_environment(environment, deployment) if environment_state && environment_state != 'stopped'
- end
+ environment_state = fetch_environment(environment)&.state
+ stop_environment(environment, deployment) if environment_state && environment_state != 'stopped'
end
checked_environments << environment.slug
@@ -174,7 +174,7 @@ module ReviewApps
private
- attr_reader :project_path, :gitlab_token, :api_endpoint, :dry_run
+ attr_reader :api_endpoint, :dry_run, :gitlab_token, :project_path
def fetch_environment(environment)
gitlab.environment(project_path, environment.id)
@@ -188,10 +188,17 @@ module ReviewApps
print_release_state(subject: 'Review app', release_name: environment.slug, release_date: release_date, action: 'deleting')
gitlab.delete_environment(project_path, environment.id) unless dry_run
+ rescue Gitlab::Error::NotFound
+ puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) was not found: ignoring it"
+ @environments_not_found_count += 1
+
+ if @environments_not_found_count >= ENVIRONMENTS_NOT_FOUND_THRESHOLD
+ raise "At least #{ENVIRONMENTS_NOT_FOUND_THRESHOLD} environments were missing when we tried to delete them. Please investigate"
+ end
rescue Gitlab::Error::Forbidden
puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) is forbidden: skipping it"
rescue Gitlab::Error::InternalServerError
- puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) 500 error - ignoring it"
+ puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) 500 error: ignoring it"
end
def stop_environment(environment, deployment)
diff --git a/scripts/review_apps/base-config.yaml b/scripts/review_apps/base-config.yaml
index f845dd04e8f..0981aafec22 100644
--- a/scripts/review_apps/base-config.yaml
+++ b/scripts/review_apps/base-config.yaml
@@ -69,8 +69,8 @@ gitlab:
cpu: 400m
memory: 920Mi
limits:
- cpu: 600m
- memory: 1100Mi
+ cpu: 800m
+ memory: 1380Mi
sidekiq:
resources:
@@ -99,7 +99,7 @@ gitlab:
cpu: 746m
memory: 2809Mi
limits:
- cpu: 1119m
+ cpu: 1300m
memory: 4214Mi
minReplicas: 1
maxReplicas: 1
diff --git a/scripts/review_apps/gcp-quotas-checks.rb b/scripts/review_apps/gcp-quotas-checks.rb
new file mode 100755
index 00000000000..187277f87ea
--- /dev/null
+++ b/scripts/review_apps/gcp-quotas-checks.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+# We created this because we could not monitor k8s resource count directly in GCP monitoring (see
+# https://gitlab.com/gitlab-org/quality/engineering-productivity-infrastructure/-/issues/37)
+#
+# If this functionality ever becomes available, please replace this script with GCP monitoring!
+
+require 'json'
+
+class QuotaChecker
+ def initialize
+ @exit_with_error = false
+ end
+
+ def check_quotas(quotas, threshold: 0.8)
+ quotas.each do |quota|
+ print "Checking quota #{quota['metric']}..."
+ quota_percent_usage = quota['usage'].to_f / quota['limit']
+ if quota_percent_usage > threshold
+ puts "❌ #{quota['metric']} is above the #{threshold * 100}% threshold! (current value: #{quota_percent_usage})"
+ @exit_with_error = true
+ else
+ puts "✅"
+ end
+ end
+ end
+
+ def failed?
+ @exit_with_error
+ end
+end
+
+quota_checker = QuotaChecker.new
+
+puts "Checking regional quotas:"
+gcloud_command_output = `gcloud compute regions describe us-central1 --format=json`
+quotas = JSON.parse(gcloud_command_output)['quotas']
+quota_checker.check_quotas(quotas)
+puts
+
+puts "Checking project-wide quotas:"
+gcloud_command_output = `gcloud compute project-info describe --format=json`
+quotas = JSON.parse(gcloud_command_output)['quotas']
+quota_checker.check_quotas(quotas)
+
+exit 1 if quota_checker.failed?
diff --git a/scripts/review_apps/gcp_cleanup.sh b/scripts/review_apps/gcp_cleanup.sh
deleted file mode 100755
index 114ac6f7ec0..00000000000
--- a/scripts/review_apps/gcp_cleanup.sh
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env bash
-
-source scripts/utils.sh
-
-function setup_gcp_dependencies() {
- apt-get update && apt-get install -y jq
-
- gcloud auth activate-service-account --key-file="${REVIEW_APPS_GCP_CREDENTIALS}"
- gcloud config set project "${REVIEW_APPS_GCP_PROJECT}"
-}
-
-# These scripts require the following environment variables:
-# - REVIEW_APPS_GCP_REGION - e.g `us-central1`
-# - KUBE_NAMESPACE - e.g `review-apps`
-
-function delete_firewall_rules() {
- if [[ ${#@} -eq 0 ]]; then
- echoinfo "No firewall rules to be deleted" true
- return
- fi
-
- echoinfo "Deleting firewall rules:" true
- echo "${@}"
-
- if [[ ${DRY_RUN} = 1 ]]; then
- echo "[DRY RUN] gcloud compute firewall-rules delete -q" "${@}"
- else
- gcloud compute firewall-rules delete -q "${@}"
- fi
-}
-
-function delete_forwarding_rules() {
- if [[ ${#@} -eq 0 ]]; then
- echoinfo "No forwarding rules to be deleted" true
- return
- fi
-
- echoinfo "Deleting forwarding rules:" true
- echo "${@}"
-
- if [[ ${DRY_RUN} = 1 ]]; then
- echo "[DRY RUN] gcloud compute forwarding-rules delete -q" "${@}" "--region ${REVIEW_APPS_GCP_REGION}"
- else
- gcloud compute forwarding-rules delete -q "${@}" --region "${REVIEW_APPS_GCP_REGION}"
- fi
-}
-
-function delete_target_pools() {
- if [[ ${#@} -eq 0 ]]; then
- echoinfo "No target pools to be deleted" true
- return
- fi
-
- echoinfo "Deleting target pools:" true
- echo "${@}"
-
- if [[ ${DRY_RUN} = 1 ]]; then
- echo "[DRY RUN] gcloud compute target-pools delete -q" "${@}" "--region ${REVIEW_APPS_GCP_REGION}"
- else
- gcloud compute target-pools delete -q "${@}" --region "${REVIEW_APPS_GCP_REGION}"
- fi
-}
-
-function delete_http_health_checks() {
- if [[ ${#@} -eq 0 ]]; then
- echoinfo "No http health checks to be deleted" true
- return
- fi
-
- echoinfo "Deleting http health checks:" true
- echo "${@}"
-
- if [[ ${DRY_RUN} = 1 ]]; then
- echo "[DRY RUN] gcloud compute http-health-checks delete -q" "${@}"
- else
- gcloud compute http-health-checks delete -q "${@}"
- fi
-}
-
-function get_related_firewall_rules() {
- local forwarding_rule=${1}
-
- gcloud compute firewall-rules list --filter "name~${forwarding_rule}" --format "value(name)"
-}
-
-function get_service_name_in_forwarding_rule() {
- local forwarding_rule=${1}
-
- gcloud compute forwarding-rules describe "${forwarding_rule}" --region "${REVIEW_APPS_GCP_REGION}" --format "value(description)" | jq -r '.["kubernetes.io/service-name"]'
-}
-
-function forwarding_rule_k8s_service_exists() {
- local namespace="${KUBE_NAMESPACE}"
- local namespaced_service_name=$(get_service_name_in_forwarding_rule "$forwarding_rule")
-
- if [[ ! $namespaced_service_name =~ ^"${namespace}" ]]; then
- return 0 # this prevents `review-apps-ee` pipeline from deleting `review-apps-ce` resources and vice versa
- fi
-
- local service_name=$(echo "${namespaced_service_name}" | sed -e "s/${namespace}\///g")
-
- kubectl get svc "${service_name}" -n "${namespace}" >/dev/null 2>&1
- local status=$?
-
- return $status
-}
-
-function gcp_cleanup() {
- if [[ ! $(command -v kubectl) ]]; then
- echoerr "kubectl executable not found"
- return 1
- fi
-
- if [[ -z "${REVIEW_APPS_GCP_REGION}" ]]; then
- echoerr "REVIEW_APPS_GCP_REGION is not set."
- return 1
- fi
-
- if [[ -z "${KUBE_NAMESPACE}" ]]; then
- echoerr "KUBE_NAMESPACE is not set."
- return 1
- fi
-
- if [[ -n "${DRY_RUN}" ]]; then
- echoinfo "Running in DRY_RUN"
- fi
-
- local target_pools_to_delete=()
- local firewall_rules_to_delete=()
- local forwarding_rules_to_delete=()
- local http_health_checks_to_delete=()
-
- for forwarding_rule in $(gcloud compute forwarding-rules list --filter="region:(${REVIEW_APPS_GCP_REGION})" --format "value(name)"); do
- echoinfo "Inspecting forwarding rule ${forwarding_rule}" true
-
- # We perform clean up when there is no more kubernetes service that require the resources.
- # To identify the kubernetes service using the resources,
- # we find the service name indicated in the forwarding rule description, e.g:
- #
- # $ gcloud compute forwarding-rules describe aff68b997da1211e984a042010af0019
- # # ...
- # description: '{"kubernetes.io/service-name":"review-apps-ee/review-winh-eslin-809vqz-nginx-ingress-controller"}'
- # # ...
- if forwarding_rule_k8s_service_exists "${forwarding_rule}"; then
- echoinfo "Skip clean up for ${forwarding_rule}"
- else
- echoinfo "Queuing forwarding rule, firewall rule, target pool and health check for ${forwarding_rule} to be cleaned up"
-
- firewall_rules_to_delete+=($(get_related_firewall_rules "${forwarding_rule}"))
- forwarding_rules_to_delete+=(${forwarding_rule})
- target_pools_to_delete+=(${forwarding_rule})
- http_health_checks_to_delete+=(${forwarding_rule})
- fi
- done
-
- delete_firewall_rules "${firewall_rules_to_delete[@]}"
- delete_forwarding_rules "${forwarding_rules_to_delete[@]}"
- delete_target_pools "${target_pools_to_delete[@]}"
- delete_http_health_checks "${http_health_checks_to_delete[@]}"
-}
diff --git a/scripts/review_apps/k8s-resources-count-checks.sh b/scripts/review_apps/k8s-resources-count-checks.sh
new file mode 100755
index 00000000000..b63fa043065
--- /dev/null
+++ b/scripts/review_apps/k8s-resources-count-checks.sh
@@ -0,0 +1,90 @@
+#!/usr/bin/env bash
+
+# We created this because we could not monitor quotas easily in GCP monitoring (see
+# https://gitlab.com/gitlab-org/quality/engineering-productivity-infrastructure/-/issues/37)
+#
+# If this functionality ever becomes available, please replace this script with GCP monitoring!
+
+function k8s_resource_count() {
+ local resource_name="${1}"
+
+ kubectl get -A "${resource_name}" 2> /dev/null | wc -l | xargs
+}
+
+# ~13 services per review-app - ~230 review apps
+SERVICES_COUNT_THRESHOLD=3000
+REVIEW_APPS_COUNT_THRESHOLD=200
+
+exit_with_error=false
+
+# In the current GKE cluster configuration, we should never go higher than 4096 services per cluster.
+services_count=$(kubectl get services -A | wc -l | xargs)
+if [ "${services_count}" -gt "${SERVICES_COUNT_THRESHOLD}" ]; then
+ >&2 echo "❌ [ERROR] Services are above ${SERVICES_COUNT_THRESHOLD} (currently at ${services_count})"
+ exit_with_error=true
+fi
+
+review_apps_count=$(helm ls -A | wc -l | xargs)
+if [ "${review_apps_count}" -gt "${REVIEW_APPS_COUNT_THRESHOLD}" ]; then
+ >&2 echo "❌ [ERROR] Review apps count are above ${REVIEW_APPS_COUNT_THRESHOLD} (currently at ${review_apps_count})"
+ exit_with_error=true
+fi
+
+namespaces_count=$(kubectl get namespaces -A | wc -l | xargs)
+if [ "$(echo $(($namespaces_count - $review_apps_count)) | sed 's/-//')" -gt 30 ]; then
+ >&2 echo "❌ [ERROR] Difference between namespaces and deployed review-apps is above 30 (${namespaces_count} namespaces and ${review_apps_count} review-apps)"
+ exit_with_error=true
+fi
+
+if [ "${exit_with_error}" = true ] ; then
+ exit 1
+fi
+
+echo -e "\nShow k8s resources count: "
+cat > k8s-resources-count.out <<COMMANDS
+ $(k8s_resource_count backendconfigs.cloud.google.com) backendconfigs.cloud.google.com
+ $(k8s_resource_count capacityrequests.internal.autoscaling.gke.io) capacityrequests.internal.autoscaling.gke.io
+ $(k8s_resource_count capacityrequests.internal.autoscaling.k8s.io) capacityrequests.internal.autoscaling.k8s.io
+ $(k8s_resource_count certificaterequests.cert-manager.io) certificaterequests.cert-manager.io
+ $(k8s_resource_count certificates.cert-manager.io) certificates.cert-manager.io
+ $(k8s_resource_count challenges.acme.cert-manager.io) challenges.acme.cert-manager.io
+ $(k8s_resource_count configmaps) configmaps
+ $(k8s_resource_count containerwatcherstatuses.containerthreatdetection.googleapis.com) containerwatcherstatuses.containerthreatdetection.googleapis.com
+ $(k8s_resource_count controllerrevisions.apps) controllerrevisions.apps
+ $(k8s_resource_count cronjobs.batch) cronjobs.batch
+ $(k8s_resource_count csistoragecapacities.storage.k8s.io) csistoragecapacities.storage.k8s.io
+ $(k8s_resource_count daemonsets.apps) daemonsets.apps
+ $(k8s_resource_count deployments.apps) deployments.apps
+ $(k8s_resource_count endpoints) endpoints
+ $(k8s_resource_count frontendconfigs.networking.gke.io) frontendconfigs.networking.gke.io
+ $(k8s_resource_count horizontalpodautoscalers.autoscaling) horizontalpodautoscalers.autoscaling
+ $(k8s_resource_count ingressclasses) ingressclasses
+ $(k8s_resource_count ingresses.networking.k8s.io) ingresses.networking.k8s.io
+ $(k8s_resource_count issuers.cert-manager.io) issuers.cert-manager.io
+ $(k8s_resource_count jobs.batch) jobs.batch
+ $(k8s_resource_count leases.coordination.k8s.io) leases.coordination.k8s.io
+ $(k8s_resource_count limitranges) limitranges
+ $(k8s_resource_count managedcertificates.networking.gke.io) managedcertificates.networking.gke.io
+ $(k8s_resource_count networkpolicies.networking.k8s.io) networkpolicies.networking.k8s.io
+ $(k8s_resource_count orders.acme.cert-manager.io) orders.acme.cert-manager.io
+ $(k8s_resource_count persistentvolumeclaims) persistentvolumeclaims
+ $(k8s_resource_count poddisruptionbudgets.policy) poddisruptionbudgets.policy
+ $(k8s_resource_count pods) pods
+ $(k8s_resource_count podtemplates) podtemplates
+ $(k8s_resource_count replicasets.apps) replicasets.apps
+ $(k8s_resource_count replicationcontrollers) replicationcontrollers
+ $(k8s_resource_count resourcequotas) resourcequotas
+ $(k8s_resource_count rolebindings.rbac.authorization.k8s.io) rolebindings.rbac.authorization.k8s.io
+ $(k8s_resource_count roles.rbac.authorization.k8s.io) roles.rbac.authorization.k8s.io
+ $(k8s_resource_count scalingpolicies.scalingpolicy.kope.io) scalingpolicies.scalingpolicy.kope.io
+ $(k8s_resource_count secrets) secrets
+ $(k8s_resource_count serviceaccounts) serviceaccounts
+ $(k8s_resource_count serviceattachments.networking.gke.io) serviceattachments.networking.gke.io
+ $(k8s_resource_count servicenetworkendpointgroups.networking.gke.io) servicenetworkendpointgroups.networking.gke.io
+ $(k8s_resource_count services) services
+ $(k8s_resource_count statefulsets.apps) statefulsets.apps
+ $(k8s_resource_count updateinfos.nodemanagement.gke.io) updateinfos.nodemanagement.gke.io
+ $(k8s_resource_count volumesnapshots.snapshot.storage.k8s.io) volumesnapshots.snapshot.storage.k8s.io
+COMMANDS
+
+sort --reverse --numeric-sort < k8s-resources-count.out
diff --git a/scripts/review_apps/review-apps.sh b/scripts/review_apps/review-apps.sh
index 5883141a943..e185ed43e38 100755
--- a/scripts/review_apps/review-apps.sh
+++ b/scripts/review_apps/review-apps.sh
@@ -196,9 +196,9 @@ function create_application_secret() {
if [ -z "${REVIEW_APPS_EE_LICENSE_FILE}" ]; then echo "License not found" && return; fi
- gitlab_license_shared_secret=$(kubectl get secret --namespace ${namespace} --no-headers -o=custom-columns=NAME:.metadata.name shared-gitlab-license 2> /dev/null | tail -n 1)
+ gitlab_license_shared_secret=$(kubectl get secret --namespace "${namespace}" --no-headers -o=custom-columns=NAME:.metadata.name shared-gitlab-license 2> /dev/null | tail -n 1)
if [[ "${gitlab_license_shared_secret}" == "" ]]; then
- echoinfo "Creating the 'shared-gitlab-license' secret in the ${namespace} namespace..." true
+ echoinfo "Creating the 'shared-gitlab-license' secret in the "${namespace}" namespace..." true
kubectl create secret generic --namespace "${namespace}" \
"shared-gitlab-license" \
--from-file=license="${REVIEW_APPS_EE_LICENSE_FILE}" \
@@ -262,7 +262,7 @@ function deploy() {
gitlab_workhorse_image_repository="${IMAGE_REPOSITORY}/gitlab-workhorse-ee"
sentry_enabled="false"
- if [ -n ${REVIEW_APPS_SENTRY_DSN} ]; then
+ if [ -n "${REVIEW_APPS_SENTRY_DSN}" ]; then
echo "REVIEW_APPS_SENTRY_DSN detected, enabling Sentry"
sentry_enabled="true"
fi
@@ -342,11 +342,25 @@ EOF
}
function verify_deploy() {
- local namespace="${CI_ENVIRONMENT_SLUG}"
+ local deployed="false"
+
+ mkdir -p curl-logs/
- echoinfo "[$(date '+%H:%M:%S')] Verifying deployment at ${CI_ENVIRONMENT_URL}"
+ for i in {1..60}; do # try for 5 minutes
+ local now=$(date '+%H:%M:%S')
+ echo "[${now}] Verifying deployment at ${CI_ENVIRONMENT_URL}/users/sign_in"
+ log_name="curl-logs/${now}.log"
+ curl --connect-timeout 3 -o "${log_name}" -s "${CI_ENVIRONMENT_URL}/users/sign_in"
+
+ if grep "Remember me" "${log_name}" &> /dev/null; then
+ deployed="true"
+ break
+ fi
+
+ sleep 5
+ done
- if retry "test_url \"${CI_ENVIRONMENT_URL}\""; then
+ if [[ "${deployed}" == "true" ]]; then
echoinfo "[$(date '+%H:%M:%S')] Review app is deployed to ${CI_ENVIRONMENT_URL}"
return 0
else
@@ -358,6 +372,26 @@ function verify_deploy() {
function display_deployment_debug() {
local namespace="${CI_ENVIRONMENT_SLUG}"
- echoinfo "Environment debugging data:"
- kubectl get svc,pods,jobs --namespace "${namespace}"
+ # Install dig to inspect DNS entries
+ #
+ # Silent install: see https://stackoverflow.com/a/52642167/1620195
+ apt-get -qq update && apt-get -qq install -y dnsutils < /dev/null > /dev/null
+
+ echoinfo "[debugging data] Check review-app webservice DNS entry:"
+ dig +short $(echo "${CI_ENVIRONMENT_URL}" | sed 's~http[s]*://~~g')
+
+ echoinfo "[debugging data] Check external IP for nginx-ingress-controller service (should be THE SAME AS the DNS entry IP above):"
+ kubectl -n "${namespace}" get svc "${namespace}-nginx-ingress-controller" -o jsonpath='{.status.loadBalancer.ingress[].ip}'
+
+ echoinfo "[debugging data] k8s resources:"
+ kubectl -n "${namespace}" get pods
+
+ echoinfo "[debugging data] PostgreSQL logs:"
+ kubectl -n "${namespace}" logs -l app=postgresql --all-containers
+
+ echoinfo "[debugging data] DB migrations logs:"
+ kubectl -n "${namespace}" logs -l app=migrations --all-containers
+
+ echoinfo "[debugging data] Webservice logs:"
+ kubectl -n "${namespace}" logs -l app=webservice -c webservice
}
diff --git a/scripts/rspec_helpers.sh b/scripts/rspec_helpers.sh
index 14c5b94e921..923b633fcc9 100644
--- a/scripts/rspec_helpers.sh
+++ b/scripts/rspec_helpers.sh
@@ -3,44 +3,14 @@
function retrieve_tests_metadata() {
mkdir -p $(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}") $(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}") "${RSPEC_PROFILING_FOLDER_PATH}"
- if [[ -n "${RETRIEVE_TESTS_METADATA_FROM_PAGES}" ]]; then
- if [[ ! -f "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ]]; then
- curl --location -o "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ||
- echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
- fi
-
- if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then
- curl --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FLAKY_RSPEC_SUITE_REPORT_PATH}" ||
- echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
- fi
- else
- # ${CI_DEFAULT_BRANCH} might not be master in other forks but we want to
- # always target the canonical project here, so the branch must be hardcoded
- local project_path="gitlab-org/gitlab"
- local artifact_branch="master"
- local username="gitlab-bot"
- local job_name="update-tests-metadata"
- local test_metadata_job_id
-
- # Ruby
- test_metadata_job_id=$(scripts/api/get_job_id.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" -q "status=success" -q "ref=${artifact_branch}" -q "username=${username}" -Q "scope=success" --job-name "${job_name}")
-
- if [[ -n "${test_metadata_job_id}" ]]; then
- echo "test_metadata_job_id: ${test_metadata_job_id}"
-
- if [[ ! -f "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ]]; then
- scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
- fi
-
- if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then
- scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ||
- echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
- fi
- else
- echo "test_metadata_job_id couldn't be found!"
+ if [[ ! -f "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ]]; then
+ curl --location -o "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ||
echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
+ fi
+
+ if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then
+ curl --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FLAKY_RSPEC_SUITE_REPORT_PATH}" ||
echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
- fi
fi
}
@@ -74,31 +44,8 @@ function update_tests_metadata() {
function retrieve_tests_mapping() {
mkdir -p $(dirname "$RSPEC_PACKED_TESTS_MAPPING_PATH")
- if [[ -n "${RETRIEVE_TESTS_METADATA_FROM_PAGES}" ]]; then
- if [[ ! -f "${RSPEC_PACKED_TESTS_MAPPING_PATH}" ]]; then
- (curl --location -o "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz" "https://gitlab-org.gitlab.io/gitlab/${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz" && gzip -d "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz") || echo "{}" > "${RSPEC_PACKED_TESTS_MAPPING_PATH}"
- fi
- else
- # ${CI_DEFAULT_BRANCH} might not be master in other forks but we want to
- # always target the canonical project here, so the branch must be hardcoded
- local project_path="gitlab-org/gitlab"
- local artifact_branch="master"
- local username="gitlab-bot"
- local job_name="update-tests-metadata"
- local test_metadata_with_mapping_job_id
-
- test_metadata_with_mapping_job_id=$(scripts/api/get_job_id.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" -q "status=success" -q "ref=${artifact_branch}" -q "username=${username}" -Q "scope=success" --job-name "${job_name}")
-
- if [[ -n "${test_metadata_with_mapping_job_id}" ]]; then
- echo "test_metadata_with_mapping_job_id: ${test_metadata_with_mapping_job_id}"
-
- if [[ ! -f "${RSPEC_PACKED_TESTS_MAPPING_PATH}" ]]; then
- (scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_with_mapping_job_id}" --artifact-path "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz" && gzip -d "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz") || echo "{}" > "${RSPEC_PACKED_TESTS_MAPPING_PATH}"
- fi
- else
- echo "test_metadata_with_mapping_job_id couldn't be found!"
- echo "{}" > "${RSPEC_PACKED_TESTS_MAPPING_PATH}"
- fi
+ if [[ ! -f "${RSPEC_PACKED_TESTS_MAPPING_PATH}" ]]; then
+ (curl --location -o "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz" "https://gitlab-org.gitlab.io/gitlab/${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz" && gzip -d "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz") || echo "{}" > "${RSPEC_PACKED_TESTS_MAPPING_PATH}"
fi
scripts/unpack-test-mapping "${RSPEC_PACKED_TESTS_MAPPING_PATH}" "${RSPEC_TESTS_MAPPING_PATH}"
@@ -107,31 +54,8 @@ function retrieve_tests_mapping() {
function retrieve_frontend_fixtures_mapping() {
mkdir -p $(dirname "$FRONTEND_FIXTURES_MAPPING_PATH")
- if [[ -n "${RETRIEVE_TESTS_METADATA_FROM_PAGES}" ]]; then
- if [[ ! -f "${FRONTEND_FIXTURES_MAPPING_PATH}" ]]; then
- (curl --location -o "${FRONTEND_FIXTURES_MAPPING_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FRONTEND_FIXTURES_MAPPING_PATH}") || echo "{}" > "${FRONTEND_FIXTURES_MAPPING_PATH}"
- fi
- else
- # ${CI_DEFAULT_BRANCH} might not be master in other forks but we want to
- # always target the canonical project here, so the branch must be hardcoded
- local project_path="gitlab-org/gitlab"
- local artifact_branch="master"
- local username="gitlab-bot"
- local job_name="generate-frontend-fixtures-mapping"
- local test_metadata_with_mapping_job_id
-
- test_metadata_with_mapping_job_id=$(scripts/api/get_job_id.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" -q "ref=${artifact_branch}" -q "username=${username}" -Q "scope=success" --job-name "${job_name}")
-
- if [[ $? -eq 0 ]] && [[ -n "${test_metadata_with_mapping_job_id}" ]]; then
- echo "test_metadata_with_mapping_job_id: ${test_metadata_with_mapping_job_id}"
-
- if [[ ! -f "${FRONTEND_FIXTURES_MAPPING_PATH}" ]]; then
- (scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_with_mapping_job_id}" --artifact-path "${FRONTEND_FIXTURES_MAPPING_PATH}") || echo "{}" > "${FRONTEND_FIXTURES_MAPPING_PATH}"
- fi
- else
- echo "test_metadata_with_mapping_job_id couldn't be found!"
- echo "{}" > "${FRONTEND_FIXTURES_MAPPING_PATH}"
- fi
+ if [[ ! -f "${FRONTEND_FIXTURES_MAPPING_PATH}" ]]; then
+ (curl --location -o "${FRONTEND_FIXTURES_MAPPING_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FRONTEND_FIXTURES_MAPPING_PATH}") || echo "{}" > "${FRONTEND_FIXTURES_MAPPING_PATH}"
fi
}
@@ -324,9 +248,40 @@ function retry_failed_rspec_examples() {
# Merge the JUnit report from retry into the first-try report
junit_merge "${JUNIT_RETRY_FILE}" "${JUNIT_RESULT_FILE}" --update-only
+ if [[ $rspec_run_status -eq 0 ]]; then
+ # The test is flaky because it succeeded after being retried.
+ # Make the pipeline "pass with warnings" if the flaky test is part of this MR.
+ warn_on_successfully_retried_test
+ fi
+
exit $rspec_run_status
}
+# Exit with an allowed_failure exit code if the flaky test was part of the MR that triggered this pipeline
+function warn_on_successfully_retried_test {
+ local changed_files=$(git diff --name-only $CI_MERGE_REQUEST_TARGET_BRANCH_SHA | grep spec)
+ echoinfo "A test was flaky and succeeded after being retried. Checking to see if flaky test is part of this MR..."
+
+ if [[ "$changed_files" == "" ]]; then
+ echoinfo "Flaky test was not part of this MR."
+ return
+ fi
+
+ while read changed_file
+ do
+ # include the root path in the regexp to eliminate false positives
+ changed_file="^\./$changed_file"
+
+ if grep -q "$changed_file" "$RETRIED_TESTS_REPORT_PATH"; then
+ echoinfo "Flaky test '$changed_file' was found in the list of files changed by this MR."
+ echoinfo "Exiting with code $SUCCESSFULLY_RETRIED_TEST_EXIT_CODE."
+ exit $SUCCESSFULLY_RETRIED_TEST_EXIT_CODE
+ fi
+ done <<< "$changed_files"
+
+ echoinfo "Flaky test was not part of this MR."
+}
+
function rspec_rerun_previous_failed_tests() {
local test_file_count_threshold=${RSPEC_PREVIOUS_FAILED_TEST_FILE_COUNT_THRESHOLD:-10}
local matching_tests_file=${1}
diff --git a/scripts/rubocop-max-files-in-cache-check b/scripts/rubocop-max-files-in-cache-check
deleted file mode 100755
index 34caa0e197c..00000000000
--- a/scripts/rubocop-max-files-in-cache-check
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env ruby
-# frozen_string_literal: true
-
-require_relative '../config/bundler_setup'
-require 'rubocop'
-
-MINIMUM_MAX_FILES_IN_CACHE_MARGIN = 1.05
-RECOMMENDED_MAX_FILES_IN_CACHE_MARGIN = 1.25
-RUBOCOP_LIST_TARGET_FILES_COMMAND = 'bundle exec rubocop --list-target-files | wc -l'
-
-RuboCopMaxFilesInCacheIsTooSmall = Class.new(StandardError)
-
-rubocop_target_files_count = `#{RUBOCOP_LIST_TARGET_FILES_COMMAND}`.strip.to_i
-
-raise Error, "#{RUBOCOP_LIST_TARGET_FILES_COMMAND} failed with status #{$?}!" if rubocop_target_files_count == 0
-
-rubocop_target_files_count = rubocop_target_files_count.to_i
-rubocop_current_max_files_in_cache = RuboCop::ConfigLoader.load_yaml_configuration(File.expand_path('../.rubocop.yml', __dir__)).dig('AllCops', 'MaxFilesInCache').to_i
-minimum_max_files_in_cache = (rubocop_target_files_count * MINIMUM_MAX_FILES_IN_CACHE_MARGIN).round(-3)
-
-# We want AllCops.MaxFilesInCache to be at least 5% above the actual files count at any time to give us enough time to increase it accordingly
-if rubocop_current_max_files_in_cache <= minimum_max_files_in_cache
- recommended_max_files_in_cache = (rubocop_target_files_count * RECOMMENDED_MAX_FILES_IN_CACHE_MARGIN).round(-3)
- raise RuboCopMaxFilesInCacheIsTooSmall, "Current count of RuboCop target file is #{rubocop_target_files_count} but AllCops.MaxFilesInCache is set to #{rubocop_current_max_files_in_cache}. We recommend to increase it to #{recommended_max_files_in_cache}."
-else
- puts "Current count of RuboCop target file is #{rubocop_target_files_count} and AllCops.MaxFilesInCache is set to #{rubocop_current_max_files_in_cache}. All good."
- exit(0)
-end
diff --git a/scripts/static-analysis b/scripts/static-analysis
index c6cf09e056b..9a0057d8f4d 100755
--- a/scripts/static-analysis
+++ b/scripts/static-analysis
@@ -50,7 +50,6 @@ class StaticAnalysis
Task.new(%w[bin/rake gettext:lint], 105),
Task.new(%W[scripts/license-check.sh #{project_path}], 200),
Task.new(%w[bin/rake lint:static_verification], 40),
- Task.new(%w[scripts/rubocop-max-files-in-cache-check], 25),
Task.new(%w[bin/rake config_lint], 10),
Task.new(%w[bin/rake gitlab:sidekiq:all_queues_yml:check], 15),
(Gitlab.ee? ? Task.new(%w[bin/rake gitlab:sidekiq:sidekiq_queues_yml:check], 11) : nil),
@@ -134,7 +133,7 @@ class StaticAnalysis
def warning_count(static_analysis)
static_analysis.warned_results
- .count { |result| !ALLOWED_WARNINGS.include?(result.stderr.strip) }
+ .count { |result| !ALLOWED_WARNINGS.include?(result.stderr.strip) } # rubocop:disable Rails/NegateInclude
end
def tasks_to_run(node_total)
diff --git a/scripts/trigger-build.rb b/scripts/trigger-build.rb
index 897ca9f473e..411e5ed13c6 100755
--- a/scripts/trigger-build.rb
+++ b/scripts/trigger-build.rb
@@ -153,13 +153,15 @@ module Trigger
# Read version files from all components
def version_file_variables
- Dir.glob("*_VERSION").each_with_object({}) do |version_file, params|
+ Dir.glob("*_VERSION").each_with_object({}) do |version_file, params| # rubocop:disable Rails/IndexWith
params[version_file] = version_param_value(version_file)
end
end
end
class CNG < Base
+ ASSETS_HASH = "cached-assets-hash.txt"
+
def variables
# Delete variables that aren't useful when using native triggers.
super.tap do |hash|
@@ -187,7 +189,6 @@ module Trigger
"TRIGGER_BRANCH" => ref,
"GITLAB_VERSION" => ENV['CI_COMMIT_SHA'],
"GITLAB_TAG" => ENV['CI_COMMIT_TAG'], # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
- "GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_SHA'],
"FORCE_RAILS_IMAGE_BUILDS" => 'true',
"CE_PIPELINE" => Trigger.ee? ? nil : "true", # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
"EE_PIPELINE" => Trigger.ee? ? "true" : nil # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
diff --git a/scripts/undercoverage b/scripts/undercoverage
index 86153671d6a..348f421c0d5 100755
--- a/scripts/undercoverage
+++ b/scripts/undercoverage
@@ -21,6 +21,13 @@ end
compare_base = ARGV[0]
compare_base ||= IO.popen(%w(git merge-base origin/master HEAD)) { |p| p.read.chomp }
+coverage_file_path = 'coverage/lcov/gitlab.lcov'
+
+result = if File.exist?(coverage_file_path)
+ Undercover::CLI.run(%W(-c #{compare_base}))
+ else
+ warn "#{coverage_file_path} doesn't exist"
+ 0
+ end
-result = Undercover::CLI.run(%W(-c #{compare_base}))
exit result
diff --git a/scripts/used-feature-flags b/scripts/used-feature-flags
index eb7e85be229..74180d02a91 100755
--- a/scripts/used-feature-flags
+++ b/scripts/used-feature-flags
@@ -114,6 +114,9 @@ if unused_flags.count > 0
puts
puts "If they are really no longer needed REMOVE their .yml definition".red
puts "If they are needed you need to ENSURE that their usage is covered with specs to continue.".red
+ puts "Feature flag usage is detected via Rubocop, which is unable to resolve dynamic feature flag usage,".red.bold
+ puts "interpolated strings however are optimistically matched. For more details consult test suite:".red
+ puts "https://gitlab.com/gitlab-org/gitlab/-/blob/69cb5d36db95881b495966c95655672cfb816f62/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb".red
puts
unused_flags.keys.sort.each do |name|
puts "- #{name}".yellow
diff --git a/scripts/utils.sh b/scripts/utils.sh
index 50ca7f558f6..92f647958fe 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -15,9 +15,11 @@ function retry() {
function test_url() {
local url="${1}"
+ local curl_args="${2}"
local status
+ local cmd="curl ${curl_args} --output /dev/null -L -s -w ''%{http_code}'' \"${url}\""
- status=$(curl --output /dev/null -L -s -w ''%{http_code}'' "${url}")
+ status=$(eval "${cmd}")
if [[ $status == "200" ]]; then
return 0
@@ -105,6 +107,27 @@ function install_junit_merge_gem() {
run_timed_command "gem install junit_merge --no-document --version 0.1.2"
}
+function fail_on_warnings() {
+ local cmd="$*"
+ local warnings
+ warnings="$(mktemp)"
+
+ eval "$cmd 2>$warnings"
+ local ret=$?
+
+ if test -s "$warnings";
+ then
+ echoerr "There were warnings:"
+ cat "$warnings"
+ rm "$warnings"
+ return 1
+ fi
+
+ rm "$warnings"
+
+ return $ret
+}
+
function run_timed_command() {
local cmd="${1}"
local metric_name="${2:-no}"
@@ -203,3 +226,21 @@ function danger_as_local() {
# We need to base SHA to help danger determine the base commit for this shallow clone.
bundle exec danger dry_run --fail-on-errors=true --verbose --base="${CI_MERGE_REQUEST_DIFF_BASE_SHA}" --head="${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA:-$CI_COMMIT_SHA}" --dangerfile="${DANGER_DANGERFILE:-Dangerfile}"
}
+
+# We're inlining this function in `.gitlab/ci/package-and-test/main.gitlab-ci.yml` so make sure to reflect any changes there
+function assets_image_tag() {
+ local cache_assets_hash_file="cached-assets-hash.txt"
+
+ if [[ -n "${CI_COMMIT_TAG}" ]]; then
+ echo -n "${CI_COMMIT_REF_NAME}"
+ elif [[ -f "${cache_assets_hash_file}" ]]; then
+ echo -n "assets-hash-$(cat ${cache_assets_hash_file} | cut -c1-10)"
+ else
+ echo -n "${CI_COMMIT_SHA}"
+ fi
+}
+
+function setup_gcloud() {
+ gcloud auth activate-service-account --key-file="${REVIEW_APPS_GCP_CREDENTIALS}"
+ gcloud config set project "${REVIEW_APPS_GCP_PROJECT}"
+}
diff --git a/scripts/verify-tff-mapping b/scripts/verify-tff-mapping
index b4974f71ebf..302e50bf34f 100755
--- a/scripts/verify-tff-mapping
+++ b/scripts/verify-tff-mapping
@@ -35,6 +35,30 @@ tests = [
},
{
+ explanation: 'EE lib should map to respective spec.',
+ source: 'ee/lib/world.rb',
+ expected: ['ee/spec/lib/world_spec.rb']
+ },
+
+ {
+ explanation: 'https://gitlab.com/gitlab-org/gitlab/-/issues/368628',
+ source: 'lib/gitlab/usage_data_counters/foo.rb',
+ expected: ['spec/lib/gitlab/usage_data_spec.rb']
+ },
+
+ {
+ explanation: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues/54#note_1160811638',
+ source: 'lib/gitlab/ci/config/base.rb',
+ expected: ['spec/lib/gitlab/ci/yaml_processor_spec.rb']
+ },
+
+ {
+ explanation: 'https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues/54#note_1160811638',
+ source: 'ee/lib/gitlab/ci/config/base.rb',
+ expected: ['spec/lib/gitlab/ci/yaml_processor_spec.rb', 'ee/spec/lib/gitlab/ci/yaml_processor_spec.rb']
+ },
+
+ {
explanation: 'FOSS lib should map to respective spec',
source: 'lib/gitaly/server.rb',
expected: ['spec/lib/gitaly/server_spec.rb']
@@ -47,6 +71,42 @@ tests = [
},
{
+ explanation: 'Initializers should map to respective spec',
+ source: 'config/initializers/action_mailer_hooks.rb',
+ expected: ['spec/initializers/action_mailer_hooks_spec.rb']
+ },
+
+ {
+ explanation: 'DB structure should map to schema spec',
+ source: 'db/structure.sql',
+ expected: ['spec/db/schema_spec.rb']
+ },
+
+ {
+ explanation: 'Migration should map to its non-timestamped spec',
+ source: 'db/migrate/20210818220234_add_default_project_approval_rules_vuln_allowed.rb',
+ expected: ['spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb']
+ },
+
+ {
+ explanation: 'Migration should map to its timestamped spec',
+ source: 'db/post_migrate/20210915022415_cleanup_bigint_conversion_for_ci_builds.rb',
+ expected: ['spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb']
+ },
+
+ {
+ explanation: 'FOSS views should map to respective spec',
+ source: 'app/views/admin/dashboard/index.html.haml',
+ expected: ['spec/views/admin/dashboard/index.html.haml_spec.rb']
+ },
+
+ {
+ explanation: 'EE views should map to respective spec',
+ source: 'ee/app/views/subscriptions/new.html.haml',
+ expected: ['ee/spec/views/subscriptions/new.html.haml_spec.rb']
+ },
+
+ {
explanation: 'FOSS spec code should map to itself',
source: 'spec/models/issue_spec.rb',
expected: ['spec/models/issue_spec.rb']
@@ -77,57 +137,57 @@ tests = [
},
{
- explanation: 'Initializers should map to respective spec',
- source: 'config/initializers/action_mailer_hooks.rb',
- expected: ['spec/initializers/action_mailer_hooks_spec.rb']
+ explanation: 'Whats New should map to its respective spec',
+ source: 'data/whats_new/202101140001_13_08.yml',
+ expected: ['spec/lib/release_highlights/validator_spec.rb']
},
{
- explanation: 'FOSS views should map to respective spec',
- source: 'app/views/admin/dashboard/index.html.haml',
- expected: ['spec/views/admin/dashboard/index.html.haml_spec.rb']
+ explanation: 'The documentation index page is used in this haml_lint spec',
+ source: 'doc/index.md',
+ expected: ['spec/haml_lint/linter/documentation_links_spec.rb']
},
{
- explanation: 'EE views should map to respective spec',
- source: 'ee/app/views/subscriptions/new.html.haml',
- expected: ['ee/spec/views/subscriptions/new.html.haml_spec.rb']
+ explanation: 'Spec for FOSS sidekiq worker',
+ source: 'app/workers/new_worker.rb',
+ expected: ['spec/workers/every_sidekiq_worker_spec.rb']
},
{
- explanation: 'DB structure should map to schema spec',
- source: 'db/structure.sql',
- expected: ['spec/db/schema_spec.rb']
+ explanation: 'Spec for EE sidekiq worker',
+ source: 'ee/app/workers/new_worker.rb',
+ expected: ['spec/workers/every_sidekiq_worker_spec.rb']
},
{
- explanation: 'Migration should map to its non-timestamped spec',
- source: 'db/migrate/20210818220234_add_default_project_approval_rules_vuln_allowed.rb',
- expected: ['spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb']
+ explanation: 'Known events',
+ source: 'lib/gitlab/usage_data_counters/known_events/common.yml',
+ expected: ['spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb', 'spec/lib/gitlab/usage_data_spec.rb']
},
{
- explanation: 'Migration should map to its timestamped spec',
- source: 'db/post_migrate/20210915022415_cleanup_bigint_conversion_for_ci_builds.rb',
- expected: ['spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb']
+ explanation: 'FOSS mailer previews',
+ source: 'app/mailers/previews/foo.rb',
+ expected: ['spec/mailers/previews_spec.rb']
},
{
- explanation: 'Whats New should map to its respective spec',
- source: 'data/whats_new/202101140001_13_08.yml',
- expected: ['spec/lib/release_highlights/validator_spec.rb']
+ explanation: 'EE mailer previews',
+ source: 'ee/app/mailers/previews/foo.rb',
+ expected: ['spec/mailers/previews_spec.rb']
},
{
- explanation: 'Spec for every sidekiq worker',
- source: 'app/workers/new_worker.rb',
- expected: ['spec/workers/every_sidekiq_worker_spec.rb']
+ explanation: 'EE mailer extension previews',
+ source: 'ee/app/mailers/previews/license_mailer_preview.rb',
+ expected: ['spec/mailers/previews_spec.rb']
},
{
- explanation: 'Known events',
- source: 'lib/gitlab/usage_data_counters/known_events/common.yml',
- expected: ['spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb', 'spec/lib/gitlab/usage_data_spec.rb']
+ explanation: 'GLFM spec and config files for CE and EE should map to respective markdown snapshot specs',
+ source: 'glfm_specification/foo',
+ expected: ['spec/requests/api/markdown_snapshot_spec.rb', 'ee/spec/requests/api/markdown_snapshot_spec.rb']
}
]