summaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-09-19 23:18:09 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-09-19 23:18:09 +0000
commit6ed4ec3e0b1340f96b7c043ef51d1b33bbe85fde (patch)
treedc4d20fe6064752c0bd323187252c77e0a89144b /scripts
parent9868dae7fc0655bd7ce4a6887d4e6d487690eeed (diff)
downloadgitlab-ce-6ed4ec3e0b1340f96b7c043ef51d1b33bbe85fde.tar.gz
Add latest changes from gitlab-org/gitlab@15-4-stable-eev15.4.0-rc42
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/build_assets_image8
-rwxr-xr-xscripts/bundle_size_review68
-rwxr-xr-xscripts/checkout-mr-source-sha7
-rwxr-xr-xscripts/determine-qa-tests106
-rw-r--r--scripts/frontend/startup_css/constants.js3
-rwxr-xr-xscripts/generate-e2e-pipeline39
-rwxr-xr-xscripts/glfm/run-snapshot-tests.sh6
-rw-r--r--scripts/lib/glfm/constants.rb8
-rw-r--r--scripts/lib/glfm/parse_examples.rb25
-rw-r--r--scripts/lib/glfm/render_static_html.rb113
-rw-r--r--scripts/lib/glfm/shared.rb34
-rw-r--r--scripts/lib/glfm/update_example_snapshots.rb218
-rwxr-xr-xscripts/lint-doc.sh2
-rwxr-xr-xscripts/rspec_check_order_dependence76
-rw-r--r--scripts/rspec_helpers.sh19
-rwxr-xr-xscripts/rubocop-parse73
-rwxr-xr-xscripts/static-analysis10
-rwxr-xr-xscripts/trigger-build.rb65
-rw-r--r--scripts/utils.sh6
-rwxr-xr-xscripts/verify-tff-mapping12
20 files changed, 530 insertions, 368 deletions
diff --git a/scripts/build_assets_image b/scripts/build_assets_image
index 60bd9190b74..8aa6526061a 100755
--- a/scripts/build_assets_image
+++ b/scripts/build_assets_image
@@ -19,12 +19,8 @@ cp -r public/assets assets_container.build/public/
cp Dockerfile.assets assets_container.build/
COMMIT_REF_SLUG_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_SLUG}
-# Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA (MR HEAD commit) so that the image is in sync with Omnibus/CNG images.
-# Background: Due to the fact that we cannot retrieve the Merged Commit in the downstream omnibus/CNG pipelines,
-# we're building the Omnibus/CNG images for the MR HEAD commit.
-# In turn, the assets image also needs to be built from the MR HEAD commit, so that everything is build from the same commit.
-# For non-MR commits, we fallback to $CI_COMMIT_SHA.
-COMMIT_SHA_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA:-$CI_COMMIT_SHA}
+
+COMMIT_SHA_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_SHA}
COMMIT_REF_NAME_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_NAME}
DESTINATIONS="--destination=$COMMIT_REF_SLUG_DESTINATION --destination=$COMMIT_SHA_DESTINATION"
diff --git a/scripts/bundle_size_review b/scripts/bundle_size_review
new file mode 100755
index 00000000000..5067c3c3f2c
--- /dev/null
+++ b/scripts/bundle_size_review
@@ -0,0 +1,68 @@
+#!/usr/bin/env bash
+set -euo pipefail
+IFS=$'\n\t'
+
+#
+# # How does this work in general?
+#
+# 1. We run webpack in a production like mode and enable the BundleAnalyzerPlugin
+# 2. The Plugin builds a index.html for human consumption _and_ a stats.json
+# 3. This script creates a smaller analysis.json from the gargantuan stats.json
+# 4. In Merge Requests:
+# - compare that smaller to analysis.json to the one from the base commit on master
+# - report the comparison results via danger
+
+source scripts/utils.sh
+
+# For now we only want bundle-size-review to run in CI
+# Maybe we could create a "local mode"
+if [[ -z "${CI:-}" ]]; then
+ echo 'Not running in a CI context, skipping bundle analysis'
+ exit "0"
+fi
+
+# Get the _current_ commit sha
+if [[ -z "${CI_MERGE_REQUEST_IID:-}" ]]; then
+ echo 'Not in a merge request, setting COMMIT_SHA to $CI_COMMIT_SHA'
+ COMMIT_SHA="${CI_COMMIT_SHA}"
+else
+ echo 'In a merge request, setting COMMIT_SHA to $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA'
+ COMMIT_SHA="${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}"
+fi
+
+# Create output directory
+mkdir -p bundle-size-review
+
+# Running webpack
+export WEBPACK_REPORT="true"
+run_timed_command "yarn run webpack-prod > bundle-size-review/webpack-output.log"
+
+# Copy results from stats plugin
+cp webpack-report/index.html bundle-size-review/bundle-report.html
+
+# Run comparison in danger
+if [[ -z "${DANGER_GITLAB_API_TOKEN:-}" ]]; then
+ echo 'No Danger token available, skipping bundle analysis'
+ exit "0"
+fi
+
+# TODO: Make this a dependency of GitLab itself after a proper release
+yarn global add https://gitlab.com/gitlab-org/frontend/playground/webpack-memory-metrics.git
+
+# Create smaller analysis.json
+run_timed_command "webpack-entry-point-analyser --from-file ./webpack-report/stats.json --json ./bundle-size-review/analysis.json --sha ${COMMIT_SHA}"
+rm -rf webpack-report
+
+if [[ -z "${CI_MERGE_REQUEST_IID:-}" ]]; then
+ echo 'Not in a merge request, skipping comparison'
+ exit "0"
+fi
+
+# Run comparison
+run_timed_command "webpack-compare-reports --job ${CI_JOB_ID} --to-file ./bundle-size-review/analysis.json --html ./bundle-size-review/comparison.html --markdown ./bundle-size-review/comparison.md"
+
+# Execute danger
+danger_id=$(echo -n "${DANGER_GITLAB_API_TOKEN}" | md5sum | awk '{print $1}' | cut -c5-10)
+run_timed_command "danger --dangerfile=danger/Dangerfile-bundle_size --fail-on-errors=true --verbose --danger_id=bundle-size-review-${danger_id}"
+
+exit "0"
diff --git a/scripts/checkout-mr-source-sha b/scripts/checkout-mr-source-sha
deleted file mode 100755
index 962e3f1348d..00000000000
--- a/scripts/checkout-mr-source-sha
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-if [ -n "$CI_MERGE_REQUEST_SOURCE_BRANCH_SHA" ]; then
- echo "Checking out \$CI_MERGE_REQUEST_SOURCE_BRANCH_SHA ($CI_MERGE_REQUEST_SOURCE_BRANCH_SHA) instead of \$CI_COMMIT_SHA (merge result commit $CI_COMMIT_SHA) so that code is in sync with gitlab images built upstream."
- echo "See https://docs.gitlab.com/ee/development/testing_guide/end_to_end/index.html#with-pipeline-for-merged-results for more details."
- git checkout -f ${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}
-fi
diff --git a/scripts/determine-qa-tests b/scripts/determine-qa-tests
deleted file mode 100755
index b1e9d8e9312..00000000000
--- a/scripts/determine-qa-tests
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env ruby
-# frozen_string_literal: true
-
-require 'optparse'
-
-# This script returns end-to-end tests or test directories. The returned value is stored in QA_TESTS
-# variable for executing only those tests.
-
-class DetermineQATests # rubocop:disable Gitlab/NamespacedClass
- def initialize(options)
- @changed_files = options.delete(:changed_files)
- @mr_labels = options.delete(:mr_labels) || []
- end
-
- def execute
- # If only e2e test files have changed, run only those tests
- qa_tests = if has_qa_spec_only_changes?
- changed_files
-
- # If only non qa files have changed, use the devops MR label to run the files in the related directory
- # However, if a feature flag file has changed, do not return any specific test/test directory
- elsif has_non_qa_only_changes? && mr_labels.any? && !has_dev_ops_feature_flag_changes?
- devops_stage = devops_stage_from_mr_labels
-
- qa_spec_directories_for_devops_stage(devops_stage) if devops_stage
- end
-
- trim_path(qa_tests).join(' ') if qa_tests
- end
-
- private
-
- attr_reader :changed_files, :mr_labels
-
- # Are the changed files only qa specs?
- #
- # @return [Boolean] whether the changes files are only qa specs
- def has_qa_spec_only_changes?
- changed_files.all? { |file_path| file_path =~ %r{^qa/qa/specs/features/} }
- end
-
- # Are the changed files only outside the qa directory?
- #
- # @return [Boolean] whether the changes files are outside of qa directory
- def has_non_qa_only_changes?
- changed_files.none? { |file_path| file_path =~ %r{^qa/} }
- end
-
- # Are the changed files for development and ops feature flags?
- #
- # @return [Boolean] whether the changes files are for development and ops feature flags
- def has_dev_ops_feature_flag_changes?
- changed_files.any? { |file_path| file_path =~ %r{/feature_flags/(development|ops)/.*\.yml} }
- end
-
- # Remove the leading `qa/` from the file or directory paths
- #
- # @param [Array] paths Array of file or directory paths
- # @return [Array] Array of files or directories with the first occurance of `qa/` removed
- def trim_path(paths)
- paths.map { |path| path.delete_prefix("qa/") }
- end
-
- # Extract devops stage from MR labels
- #
- # @return [String] a devops stage
- def devops_stage_from_mr_labels
- mr_labels.find { |label| label =~ /^devops::/ }&.delete_prefix('devops::')
- end
-
- # Get qa spec directories for devops stage
- #
- # @param [String] devops_stage a devops stage
- # @return [Array] qa spec directories
- def qa_spec_directories_for_devops_stage(devops_stage)
- Dir.glob("qa/qa/specs/**/*/").select { |dir| dir =~ %r{\d+_#{devops_stage}/$} }
- end
-end
-
-if $0 == __FILE__
- options = {}
-
- OptionParser.new do |opts|
- opts.on("-f", "--files CHANGED_FILES_PATH", String,
- "A path to a file containing a list of changed files") do |value|
- changed_files_path = value
- abort("ERROR: The specified changed files path does not exist") unless File.exist?(changed_files_path)
-
- changed_files = File.read(changed_files_path).split(' ')
- abort("ERROR: There are no changed files") if changed_files.empty?
-
- options[:changed_files] = changed_files
- end
-
- opts.on("-l", "--labels MR_LABELS", String, "A comma separated list of MR labels") do |value|
- options[:mr_labels] = Array(value&.split(',')).compact
- end
-
- opts.on("-h", "--help", "Prints this help") do
- puts opts
- exit
- end
- end.parse!
-
- puts DetermineQATests.new(options).execute
-end
diff --git a/scripts/frontend/startup_css/constants.js b/scripts/frontend/startup_css/constants.js
index 10d60657e09..5143c04dc37 100644
--- a/scripts/frontend/startup_css/constants.js
+++ b/scripts/frontend/startup_css/constants.js
@@ -41,6 +41,7 @@ const ROOT_RAILS = IS_EE ? path.join(ROOT, 'ee') : ROOT;
const FIXTURES_FOLDER_NAME = IS_EE ? 'fixtures-ee' : 'fixtures';
const FIXTURES_ROOT = path.join(ROOT, 'tmp/tests/frontend', FIXTURES_FOLDER_NAME);
const PATH_SIGNIN_HTML = path.join(FIXTURES_ROOT, 'startup_css/sign-in.html');
+const PATH_SIGNIN_OLD_HTML = path.join(FIXTURES_ROOT, 'startup_css/sign-in-old.html');
const PATH_ASSETS = path.join(ROOT, 'tmp/startup_css_assets');
const PATH_STARTUP_SCSS = path.join(ROOT_RAILS, 'app/assets/stylesheets/startup');
@@ -80,7 +81,7 @@ const OUTPUTS = [
}),
{
outFile: 'startup-signin',
- htmlPaths: [PATH_SIGNIN_HTML],
+ htmlPaths: [PATH_SIGNIN_HTML, PATH_SIGNIN_OLD_HTML],
cssKeys: [APPLICATION_CSS_PREFIX, UTILITIES_CSS_PREFIX],
purgeOptions: {
safelist: {
diff --git a/scripts/generate-e2e-pipeline b/scripts/generate-e2e-pipeline
new file mode 100755
index 00000000000..f541ae6665c
--- /dev/null
+++ b/scripts/generate-e2e-pipeline
@@ -0,0 +1,39 @@
+#!/bin/bash
+
+set -e
+
+# Script to generate e2e test child pipeline
+# This is required because environment variables that are generated dynamically are not picked up by rules in child pipelines
+
+source $ENV_FILE
+
+echo "Generating child pipeline yml definitions for review-app and package-and-test child pipelines"
+
+if [ "$QA_SKIP_ALL_TESTS" == "true" ]; then
+ skip_pipeline=".gitlab/ci/_skip.yml"
+
+ echo "Using ${skip_pipeline} due to QA_SKIP_ALL_TESTS set to 'true'"
+ cp $skip_pipeline "$OMNIBUS_PIPELINE_YML"
+ cp $skip_pipeline "$REVIEW_PIPELINE_YML"
+ exit
+fi
+
+variables=$(cat <<YML
+variables:
+ GITLAB_VERSION: "$(cat VERSION)"
+ COLORIZED_LOGS: "true"
+ QA_TESTS: "$QA_TESTS"
+ QA_FEATURE_FLAGS: "${QA_FEATURE_FLAGS}"
+ QA_FRAMEWORK_CHANGES: "${QA_FRAMEWORK_CHANGES:-false}"
+ QA_SUITES: "$QA_SUITES"
+YML
+)
+
+echo "Using .gitlab/ci/review-apps/main.gitlab-ci.yml and .gitlab/ci/package-and-test/main.gitlab-ci.yml"
+cp .gitlab/ci/review-apps/main.gitlab-ci.yml "$REVIEW_PIPELINE_YML"
+echo "$variables" >>"$REVIEW_PIPELINE_YML"
+cp .gitlab/ci/package-and-test/main.gitlab-ci.yml "$OMNIBUS_PIPELINE_YML"
+echo "$variables" >>"$OMNIBUS_PIPELINE_YML"
+
+echo "Successfully generated review-app and package-and-test pipeline with following variables section:"
+echo "$variables"
diff --git a/scripts/glfm/run-snapshot-tests.sh b/scripts/glfm/run-snapshot-tests.sh
index 59a7c8f06b0..6a66d8fbd9a 100755
--- a/scripts/glfm/run-snapshot-tests.sh
+++ b/scripts/glfm/run-snapshot-tests.sh
@@ -28,8 +28,12 @@ printf "\n${BBlue}Running frontend 'yarn jest spec/frontend/content_editor/markd
yarn jest spec/frontend/content_editor/markdown_snapshot_spec.js
printf "\n${BBlue}'yarn jest spec/frontend/content_editor/markdown_snapshot_spec.js' passed!${Color_Off}\n\n"
-printf "\n${BBlue}Running backend 'bundle exec rspec spec/requests/api/markdown_snapshot_spec.rb'...${Color_Off}\n\n"
+printf "\n${BBlue}Running CE backend 'bundle exec rspec spec/requests/api/markdown_snapshot_spec.rb'...${Color_Off}\n\n"
bundle exec rspec spec/requests/api/markdown_snapshot_spec.rb
printf "\n${BBlue}'bundle exec rspec spec/requests/api/markdown_snapshot_spec.rb' passed!${Color_Off}\n\n"
+printf "\n${BBlue}Running EE backend 'bundle exec rspec ee/spec/requests/api/markdown_snapshot_spec.rb'...${Color_Off}\n\n"
+bundle exec rspec ee/spec/requests/api/markdown_snapshot_spec.rb
+printf "\n${BBlue}'bundle exec rspec ee/spec/requests/api/markdown_snapshot_spec.rb' passed!${Color_Off}\n\n"
+
printf "\n✅✅✅ ${BGreen}All GLFM snapshot example tests passed successfully!${Color_Off} ✅✅✅\n"
diff --git a/scripts/lib/glfm/constants.rb b/scripts/lib/glfm/constants.rb
index 42977248c0d..e5790bbdd88 100644
--- a/scripts/lib/glfm/constants.rb
+++ b/scripts/lib/glfm/constants.rb
@@ -18,6 +18,9 @@ module Glfm
GLFM_INTRO_TXT_PATH = specification_input_glfm_path.join('glfm_intro.txt')
GLFM_EXAMPLES_TXT_PATH = specification_input_glfm_path.join('glfm_canonical_examples.txt')
GLFM_EXAMPLE_STATUS_YML_PATH = specification_input_glfm_path.join('glfm_example_status.yml')
+ GLFM_EXAMPLE_METADATA_YML_PATH =
+ specification_input_glfm_path.join('glfm_example_metadata.yml')
+ GLFM_EXAMPLE_NORMALIZATIONS_YML_PATH = specification_input_glfm_path.join('glfm_example_normalizations.yml')
GLFM_SPEC_TXT_PATH = specification_path.join('output/spec.txt')
# Example Snapshot (ES) files
@@ -28,15 +31,16 @@ module Glfm
ES_PROSEMIRROR_JSON_YML_PATH = File.join(es_fixtures_path, 'prosemirror_json.yml')
# Other constants used for processing files
- GLFM_SPEC_TXT_HEADER = <<~GLFM_SPEC_TXT_HEADER
+ GLFM_SPEC_TXT_HEADER = <<~MARKDOWN
---
title: GitLab Flavored Markdown (GLFM) Spec
version: alpha
...
- GLFM_SPEC_TXT_HEADER
+ MARKDOWN
INTRODUCTION_HEADER_LINE_TEXT = /\A# Introduction\Z/.freeze
END_TESTS_COMMENT_LINE_TEXT = /\A<!-- END TESTS -->\Z/.freeze
MARKDOWN_TEMPFILE_BASENAME = %w[MARKDOWN_TEMPFILE_ .yml].freeze
+ METADATA_TEMPFILE_BASENAME = %w[METADATA_TEMPFILE_ .yml].freeze
STATIC_HTML_TEMPFILE_BASENAME = %w[STATIC_HTML_TEMPFILE_ .yml].freeze
WYSIWYG_HTML_AND_JSON_TEMPFILE_BASENAME = %w[WYSIWYG_HTML_AND_JSON_TEMPFILE_ .yml].freeze
end
diff --git a/scripts/lib/glfm/parse_examples.rb b/scripts/lib/glfm/parse_examples.rb
index 14634bcfb3e..a15a6ecc47b 100644
--- a/scripts/lib/glfm/parse_examples.rb
+++ b/scripts/lib/glfm/parse_examples.rb
@@ -26,7 +26,7 @@ module Glfm
EXAMPLE_BACKTICKS_LENGTH = 32
EXAMPLE_BACKTICKS_STRING = '`' * EXAMPLE_BACKTICKS_LENGTH
- # rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
+ # rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity, Metrics/AbcSize
def parse_examples(spec_txt_lines)
line_number = 0
start_line = 0
@@ -41,6 +41,7 @@ module Glfm
h1_regex = /\A# / # new logic compared to original Python code
h2_regex = /\A## / # new logic compared to original Python code
+ h3_regex = /\A### / # new logic compared to original Python code
header_regex = /\A#+ / # Added beginning of line anchor to original Python code
spec_txt_lines.each do |line|
@@ -102,12 +103,24 @@ module Glfm
# reset the headers array if we found a new H1
headers = [] if line =~ h1_regex
- # headers should be size 2 or less [<H1_headertext>, <H2_headertext>]
- # pop the last entry from the headers array if we are in an H2 and found a new H2
- headers.pop if headers.length == 2 && line =~ h2_regex
+ # headers should be size 3 or less [<H1_headertext>, <H2_headertext>, <H3_headertext>]
+
+ if headers.length == 1 && line =~ h3_regex
+ errmsg = "Error: The H3 '#{headertext}' may not be nested directly within the H1 '#{headers[0]}'. " \
+ " Add an H2 header before the H3 header."
+ raise errmsg
+ end
+
+ if (headers.length == 2 || headers.length == 3) && line =~ h2_regex
+ # drop the everything but first entry from the headers array if we are in an H2 and found a new H2
+ headers = [headers[0]]
+ elsif headers.length == 3 && line =~ h3_regex
+ # pop the last entry from the headers array if we are in an H3 and found a new H3
+ headers.pop
+ end
# push the new header text to the headers array
- headers << headertext if line =~ h1_regex || line =~ h2_regex
+ headers << headertext if line =~ h1_regex || line =~ h2_regex || line =~ h3_regex
else
# Else if we are in regular text...
@@ -119,7 +132,7 @@ module Glfm
# no-op - skips any other non-header regular text lines
end
end
- # rubocop:enable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
+ # rubocop:enable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity, Metrics/AbcSize
tests
end
diff --git a/scripts/lib/glfm/render_static_html.rb b/scripts/lib/glfm/render_static_html.rb
index a0bda05b41e..8d72aec7c3b 100644
--- a/scripts/lib/glfm/render_static_html.rb
+++ b/scripts/lib/glfm/render_static_html.rb
@@ -1,80 +1,73 @@
# frozen_string_literal: true
+require 'spec_helper'
require_relative 'constants'
require_relative 'shared'
# Purpose:
# - Reads a set of markdown examples from a hash which has been serialized to disk
-# - Converts each example to static HTML using the `markdown` helper
+# - Sets up the appropriate fixture data for the markdown examples
+# - Converts each example to static HTML using the appropriate API markdown endpoint
# - Writes the HTML for each example to a hash which is serialized to disk
#
-# It should be invoked via `rails runner` from the Rails root directory.
-# It is intended to be invoked from the `update_example_snapshots.rb` script class.
-module Glfm
- class RenderStaticHtml
- include Constants
- include Shared
-
- def process
- markdown_yml_path = ARGV[0]
- markdown_hash = YAML.load_file(markdown_yml_path)
+# Requirements:
+# The input and output files are specified via these environment variables:
+# - INPUT_MARKDOWN_YML_PATH
+# - OUTPUT_STATIC_HTML_TEMPFILE_PATH
+#
+# Although it is implemented as an RSpec test, it is not a unit test. We use
+# RSpec because that is the simplest environment in which we can use the
+# Factorybot factory methods to create persisted model objects with stable
+# and consistent data values, to ensure consistent example snapshot HTML
+# across various machines and environments. RSpec also makes it easy to invoke
+# the API # and obtain the response.
+#
+# It is intended to be invoked as a helper subprocess from the `update_example_snapshots.rb`
+# script class. It's not intended to be run or used directly. This usage is also reinforced
+# by not naming the file with a `_spec.rb` ending.
+RSpec.describe 'Render Static HTML', :api, type: :request do # rubocop:disable RSpec/TopLevelDescribePath
+ include Glfm::Constants
+ include Glfm::Shared
- context = build_context
+ # noinspection RailsParamDefResolve (RubyMine can't find the shared context from this file location)
+ include_context 'with GLFM example snapshot fixtures'
- # NOTE: We COULD parallelize this loop like the Javascript WYSIWYG example generation does,
- # but it wouldn't save much time. Most of the time is spent loading the Rails environment
- # via `rails runner`. In initial testing, this loop only took ~7 seconds while the entire
- # script took ~20 seconds. Unfortunately, there's no easy way to execute
- # `Banzai.render_and_post_process` without using `rails runner`
- static_html_hash = markdown_hash.transform_values do |markdown|
- Banzai.render_and_post_process(markdown, context)
- end
+ it 'can create a project dependency graph using factories' do
+ markdown_hash = YAML.safe_load(File.open(ENV.fetch('INPUT_MARKDOWN_YML_PATH')), symbolize_names: true)
+ metadata_hash = YAML.safe_load(File.open(ENV.fetch('INPUT_METADATA_YML_PATH')), symbolize_names: true)
- static_html_tempfile_path = Dir::Tmpname.create(STATIC_HTML_TEMPFILE_BASENAME) do |path|
- tmpfile = File.open(path, 'w')
- YAML.dump(static_html_hash, tmpfile)
- tmpfile.close
- end
+ # NOTE: We cannot parallelize this loop like the Javascript WYSIWYG example generation does,
+ # because the rspec `post` API cannot be parallized (it is not thread-safe, it can't find
+ # the controller).
+ static_html_hash = markdown_hash.transform_values.with_index do |markdown, index|
+ name = markdown_hash.keys[index]
+ api_url = metadata_hash.dig(name, :api_request_override_path) || (api "/markdown")
- # Write the path to the output file to stdout
- print static_html_tempfile_path
- end
+ post api_url, params: { text: markdown, gfm: true }
+ # noinspection RubyResolve
+ expect(response).to be_successful
- private
+ returned_html_value =
+ begin
+ parsed_response = Gitlab::Json.parse(response.body, symbolize_names: true)
+ # Some responses have the HTML in the `html` key, others in the `body` key.
+ parsed_response[:body] || parsed_response[:html]
+ rescue JSON::ParserError
+ # if we got a parsing error, just return the raw response body for debugging purposes.
+ response.body
+ end
- def build_context
- user_username = 'glfm_user_username'
- user = User.find_by_username(user_username) ||
- User.create!(
- email: "glfm_user_email@example.com",
- name: "glfm_user_name",
- password: "glfm_user_password",
- username: user_username
- )
+ returned_html_value
+ end
- # Ensure that we never try to hit Gitaly, even if we
- # reload the project
- Project.define_method(:skip_disk_validation) do
- true
- end
+ write_output_file(static_html_hash)
+ end
- project_name = 'glfm_project_name'
- project = Project.find_by_name(project_name) ||
- Project.create!(
- creator: user,
- description: "glfm_project_description",
- name: project_name,
- namespace: user.namespace,
- path: 'glfm_project_path'
- )
+ private
- {
- only_path: false,
- current_user: user,
- project: project
- }
- end
+ def write_output_file(static_html_hash)
+ tmpfile = File.open(ENV.fetch('OUTPUT_STATIC_HTML_TEMPFILE_PATH'), 'w')
+ yaml_string = dump_yaml_with_formatting(static_html_hash)
+ write_file(tmpfile, yaml_string)
end
end
-
-Glfm::RenderStaticHtml.new.process
diff --git a/scripts/lib/glfm/shared.rb b/scripts/lib/glfm/shared.rb
index f11c66eb8be..b529d9ba94f 100644
--- a/scripts/lib/glfm/shared.rb
+++ b/scripts/lib/glfm/shared.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'fileutils'
require 'open3'
+require 'active_support/core_ext/hash/keys'
module Glfm
module Shared
@@ -39,5 +40,38 @@ module Glfm
warn(stdout_and_stderr_str)
raise
end
+
+ # Construct an AST so we can control YAML formatting for
+ # YAML block scalar literals and key quoting.
+ #
+ # Note that when Psych dumps the markdown to YAML, it will
+ # automatically use the default "clip" behavior of the Block Chomping Indicator (`|`)
+ # https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator,
+ # when the markdown strings contain a trailing newline. The type of
+ # Block Chomping Indicator is automatically determined, you cannot specify it
+ # manually.
+ def dump_yaml_with_formatting(hash, literal_scalars: false)
+ stringified_keys_hash = hash.deep_stringify_keys
+ visitor = Psych::Visitors::YAMLTree.create
+ visitor << stringified_keys_hash
+ ast = visitor.tree
+
+ # Force all scalars to have literal formatting (using Block Chomping Indicator instead of quotes)
+ if literal_scalars
+ ast.grep(Psych::Nodes::Scalar).each do |node|
+ node.style = Psych::Nodes::Scalar::LITERAL
+ end
+ end
+
+ # Do not quote the keys
+ ast.grep(Psych::Nodes::Mapping).each do |node|
+ node.children.each_slice(2) do |k, _|
+ k.quoted = false
+ k.style = Psych::Nodes::Scalar::PLAIN
+ end
+ end
+
+ ast.to_yaml
+ end
end
end
diff --git a/scripts/lib/glfm/update_example_snapshots.rb b/scripts/lib/glfm/update_example_snapshots.rb
index d8d6cf3cdbc..7dc0d0f7c4b 100644
--- a/scripts/lib/glfm/update_example_snapshots.rb
+++ b/scripts/lib/glfm/update_example_snapshots.rb
@@ -29,8 +29,6 @@ module Glfm
def process(skip_static_and_wysiwyg: false)
output('Updating example snapshots...')
- setup_environment
-
output('(Skipping static HTML generation)') if skip_static_and_wysiwyg
output("Reading #{GLFM_SPEC_TXT_PATH}...")
@@ -49,19 +47,11 @@ module Glfm
private
- def setup_environment
- # Set 'GITLAB_TEST_FOOTNOTE_ID' in order to override random number generation in
- # Banzai::Filter::FootnoteFilter#random_number, and thus avoid the need to
- # perform normalization on the value. See:
- # https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#normalization
- ENV['GITLAB_TEST_FOOTNOTE_ID'] = '42'
- end
-
def add_example_names(all_examples)
# NOTE: This method and the parse_examples method assume:
# 1. Section 2 is the first section which contains examples
- # 2. Examples are always nested exactly 2 levels deep in an H2
- # 3. There may exist H3 headings with no examples (e.g. "Motivation" in the GLFM spec.txt)
+ # 2. Examples are always nested in an H2 or an H3, never directly in an H1
+ # 3. There may exist headings with no examples (e.g. "Motivation" in the GLFM spec.txt)
# 4. The Appendix doesn't ever contain any examples, so it doesn't show up
# in the H1 header count. So, even though due to the concatenation it appears before the
# GitLab examples sections, it doesn't result in their header counts being off by +1.
@@ -70,35 +60,49 @@ module Glfm
# GFM `spec_test.py` script (but it's NOT in the original CommonMark `spec_test.py`).
# 6. If a section contains ONLY disabled examples, the section numbering will still be
# incremented to match the rendered HTML specification section numbering.
- # 7. Every H2 must contain at least one example, but it is allowed that they are all disabled.
+ # 7. Every H2 or H3 must contain at least one example, but it is allowed that they are
+ # all disabled.
h1_count = 1 # examples start in H1 section 2; section 1 is the overview with no examples.
h2_count = 0
+ h3_count = 0
previous_h1 = ''
previous_h2 = ''
- index_within_h2 = 0
+ previous_h3 = ''
+ index_within_current_heading = 0
all_examples.each do |example|
headers = example[:headers]
if headers[0] != previous_h1
h1_count += 1
h2_count = 0
+ h3_count = 0
previous_h1 = headers[0]
end
if headers[1] != previous_h2
h2_count += 1
+ h3_count = 0
previous_h2 = headers[1]
- index_within_h2 = 0
+ index_within_current_heading = 0
end
- index_within_h2 += 1
+ if headers[2] && headers[2] != previous_h3
+ h3_count += 1
+ previous_h3 = headers[2]
+ index_within_current_heading = 0
+ end
+
+ index_within_current_heading += 1
# convert headers array to lowercase string with underscores, and double underscores between headers
formatted_headers_text = headers.join('__').tr('-', '_').tr(' ', '_').downcase
- hierarchy_level = "#{h1_count.to_s.rjust(2, '0')}_#{h2_count.to_s.rjust(2, '0')}"
- position_within_section = index_within_h2.to_s.rjust(3, '0')
+ hierarchy_level =
+ "#{h1_count.to_s.rjust(2, '0')}_" \
+ "#{h2_count.to_s.rjust(2, '0')}_" \
+ "#{h3_count.to_s.rjust(2, '0')}"
+ position_within_section = index_within_current_heading.to_s.rjust(3, '0')
name = "#{hierarchy_level}__#{formatted_headers_text}__#{position_within_section}"
converted_name = name.tr('(', '').tr(')', '') # remove any parens from the name
example[:name] = converted_name
@@ -111,7 +115,7 @@ module Glfm
def write_snapshot_example_files(all_examples, skip_static_and_wysiwyg:)
output("Reading #{GLFM_EXAMPLE_STATUS_YML_PATH}...")
- glfm_examples_statuses = YAML.safe_load(File.open(GLFM_EXAMPLE_STATUS_YML_PATH))
+ glfm_examples_statuses = YAML.safe_load(File.open(GLFM_EXAMPLE_STATUS_YML_PATH), symbolize_names: true)
validate_glfm_example_status_yml(glfm_examples_statuses)
write_examples_index_yml(all_examples)
@@ -123,9 +127,13 @@ module Glfm
return
end
- markdown_yml_tempfile_path = write_markdown_yml_tempfile
- static_html_hash = generate_static_html(markdown_yml_tempfile_path)
- wysiwyg_html_and_json_hash = generate_wysiwyg_html_and_json(markdown_yml_tempfile_path)
+ # NOTE: We pass the INPUT_MARKDOWN_YML_PATH and INPUT_METADATA_YML_PATH via
+ # environment variables to the static/wysiwyg HTML generation scripts. This is because they
+ # are implemented as subprocesses which invoke rspec/jest scripts, and rspec/jest do not make
+ # it straightforward to pass arguments via the command line.
+ ENV['INPUT_MARKDOWN_YML_PATH'], ENV['INPUT_METADATA_YML_PATH'] = copy_tempfiles_for_subprocesses
+ static_html_hash = generate_static_html
+ wysiwyg_html_and_json_hash = generate_wysiwyg_html_and_json
write_html_yml(all_examples, static_html_hash, wysiwyg_html_and_json_hash, glfm_examples_statuses)
@@ -135,8 +143,8 @@ module Glfm
def validate_glfm_example_status_yml(glfm_examples_statuses)
glfm_examples_statuses.each do |example_name, statuses|
next unless statuses &&
- statuses['skip_update_example_snapshots'] &&
- statuses.any? { |key, value| key.include?('skip_update_example_snapshot_') && !!value }
+ statuses[:skip_update_example_snapshots] &&
+ statuses.any? { |key, value| key.to_s.include?('skip_update_example_snapshot_') && !!value }
raise "Error: '#{example_name}' must not have any 'skip_update_example_snapshot_*' values specified " \
"if 'skip_update_example_snapshots' is truthy"
@@ -147,66 +155,90 @@ module Glfm
generate_and_write_for_all_examples(
all_examples, ES_EXAMPLES_INDEX_YML_PATH, literal_scalars: false
) do |example, hash|
- hash[example.fetch(:name)] = {
+ name = example.fetch(:name).to_sym
+ hash[name] = {
'spec_txt_example_position' => example.fetch(:example),
- 'source_specification' =>
- if example[:extensions].empty?
- 'commonmark'
- elsif example[:extensions].include?('gitlab')
- 'gitlab'
- else
- 'github'
- end
+ 'source_specification' => source_specification_for_extensions(example.fetch(:extensions))
}
end
end
+ def source_specification_for_extensions(extensions)
+ unprocessed_extensions = extensions.map(&:to_sym)
+ unprocessed_extensions.delete(:disabled)
+
+ source_specification =
+ if unprocessed_extensions.empty?
+ 'commonmark'
+ elsif unprocessed_extensions.include?(:gitlab)
+ unprocessed_extensions.delete(:gitlab)
+ 'gitlab'
+ else
+ 'github'
+ end
+
+ # We should only be left with at most one extension, which is an optional name for the example
+ raise "Error: Invalid extension(s) found: #{unprocessed_extensions.join(', ')}" if unprocessed_extensions.size > 1
+
+ source_specification
+ end
+
def write_markdown_yml(all_examples)
generate_and_write_for_all_examples(all_examples, ES_MARKDOWN_YML_PATH) do |example, hash|
- hash[example.fetch(:name)] = example.fetch(:markdown)
+ name = example.fetch(:name).to_sym
+ hash[name] = example.fetch(:markdown)
end
end
- def write_markdown_yml_tempfile
- # NOTE: We must copy the markdown YAML file to a separate temporary file for the
- # `render_static_html.rb` script to read it, because the script is run in a
- # separate process, and during unit testing we are unable to substitute the mock
- # StringIO when reading the input file in the subprocess.
- Dir::Tmpname.create(MARKDOWN_TEMPFILE_BASENAME) do |path|
- io = File.open(ES_MARKDOWN_YML_PATH)
- io.seek(0) # rewind the file. This is necessary when testing with a mock StringIO
- contents = io.read
- write_file(path, contents)
+ def copy_tempfiles_for_subprocesses
+ # NOTE: We must copy the input YAML files used by the `render_static_html.rb`
+ # and `render_wysiwyg_html_and_json.js` scripts to a separate temporary file in order for
+ # the scripts to read them, because the scripts are run in
+ # separate subprocesses, and during unit testing we are unable to substitute the mock
+ # StringIO when reading the input files in the subprocess.
+ {
+ ES_MARKDOWN_YML_PATH => MARKDOWN_TEMPFILE_BASENAME,
+ GLFM_EXAMPLE_METADATA_YML_PATH => METADATA_TEMPFILE_BASENAME
+ }.map do |original_file_path, tempfile_basename|
+ Dir::Tmpname.create(tempfile_basename) do |path|
+ io = File.open(original_file_path)
+ io.seek(0) # rewind the file. This is necessary when testing with a mock StringIO
+ contents = io.read
+ write_file(path, contents)
+ end
end
end
- def generate_static_html(markdown_yml_tempfile_path)
+ def generate_static_html
output("Generating static HTML from markdown examples...")
- # NOTE 1: We shell out to perform the conversion of markdown to static HTML via the internal Rails app
- # helper method. This allows us to avoid using the Rails API or environment in this script,
- # which makes developing and running the unit tests for this script much faster,
+ # NOTE 1: We shell out to perform the conversion of markdown to static HTML by invoking a
+ # separate subprocess. This allows us to avoid using the Rails API or environment in this
+ # script, which makes developing and running the unit tests for this script much faster,
# because they can use 'fast_spec_helper' which does not require the entire Rails environment.
- # NOTE 2: We pass the input file path as a command line argument, and receive the output
- # tempfile path as a return value. This is simplest in the case where we are invoking Ruby.
- cmd = %(rails runner #{__dir__}/render_static_html.rb #{markdown_yml_tempfile_path})
- cmd_output = run_external_cmd(cmd)
- # NOTE: Running under a debugger can add extra output, only take the last line
- static_html_tempfile_path = cmd_output.split("\n").last
+ # NOTE 2: We run this as an RSpec process, for the same reasons we run via Jest process below:
+ # because that's the easiest way to ensure a reliable, fully-configured environment in which
+ # to execute the markdown-generation logic. Also, in the static/backend case, Rspec
+ # provides the easiest and most reliable way to generate example data via Factorybot
+ # creation of stable model records. This ensures consistent snapshot values across
+ # machines/environments.
+
+ # Dir::Tmpname.create requires a block, but we are using the non-block form to get the path
+ # via the return value, so we pass an empty block to avoid an error.
+ static_html_tempfile_path = Dir::Tmpname.create(STATIC_HTML_TEMPFILE_BASENAME) {}
+ ENV['OUTPUT_STATIC_HTML_TEMPFILE_PATH'] = static_html_tempfile_path
+
+ cmd = %(bin/rspec #{__dir__}/render_static_html.rb)
+ run_external_cmd(cmd)
output("Reading generated static HTML from tempfile #{static_html_tempfile_path}...")
- YAML.load_file(static_html_tempfile_path)
+ YAML.safe_load(File.open(static_html_tempfile_path), symbolize_names: true)
end
- def generate_wysiwyg_html_and_json(markdown_yml_tempfile_path)
+ def generate_wysiwyg_html_and_json
output("Generating WYSIWYG HTML and prosemirror JSON from markdown examples...")
- # NOTE: Unlike when we invoke a Ruby script, here we pass the input and output file paths
- # via environment variables. This is because it's not straightforward/clean to pass command line
- # arguments when we are invoking `yarn jest ...`
- ENV['INPUT_MARKDOWN_YML_PATH'] = markdown_yml_tempfile_path
-
# Dir::Tmpname.create requires a block, but we are using the non-block form to get the path
# via the return value, so we pass an empty block to avoid an error.
wysiwyg_html_and_json_tempfile_path = Dir::Tmpname.create(WYSIWYG_HTML_AND_JSON_TEMPFILE_BASENAME) {}
@@ -217,26 +249,26 @@ module Glfm
output("Reading generated WYSIWYG HTML and prosemirror JSON from tempfile " \
"#{wysiwyg_html_and_json_tempfile_path}...")
- YAML.load_file(wysiwyg_html_and_json_tempfile_path)
+ YAML.safe_load(File.open(wysiwyg_html_and_json_tempfile_path), symbolize_names: true)
end
def write_html_yml(all_examples, static_html_hash, wysiwyg_html_and_json_hash, glfm_examples_statuses)
generate_and_write_for_all_examples(
- all_examples, ES_HTML_YML_PATH, glfm_examples_statuses
+ all_examples, ES_HTML_YML_PATH, glfm_examples_statuses: glfm_examples_statuses
) do |example, hash, existing_hash|
- name = example.fetch(:name)
+ name = example.fetch(:name).to_sym
example_statuses = glfm_examples_statuses[name] || {}
- static = if example_statuses['skip_update_example_snapshot_html_static']
- existing_hash.dig(name, 'static')
+ static = if example_statuses[:skip_update_example_snapshot_html_static]
+ existing_hash.dig(name, :static)
else
static_html_hash[name]
end
- wysiwyg = if example_statuses['skip_update_example_snapshot_html_wysiwyg']
- existing_hash.dig(name, 'wysiwyg')
+ wysiwyg = if example_statuses[:skip_update_example_snapshot_html_wysiwyg]
+ existing_hash.dig(name, :wysiwyg)
else
- wysiwyg_html_and_json_hash.dig(name, 'html')
+ wysiwyg_html_and_json_hash.dig(name, :html)
end
hash[name] = {
@@ -249,14 +281,14 @@ module Glfm
def write_prosemirror_json_yml(all_examples, wysiwyg_html_and_json_hash, glfm_examples_statuses)
generate_and_write_for_all_examples(
- all_examples, ES_PROSEMIRROR_JSON_YML_PATH, glfm_examples_statuses
+ all_examples, ES_PROSEMIRROR_JSON_YML_PATH, glfm_examples_statuses: glfm_examples_statuses
) do |example, hash, existing_hash|
- name = example.fetch(:name)
+ name = example.fetch(:name).to_sym
- json = if glfm_examples_statuses.dig(name, 'skip_update_example_snapshot_prosemirror_json')
+ json = if glfm_examples_statuses.dig(name, :skip_update_example_snapshot_prosemirror_json)
existing_hash[name]
else
- wysiwyg_html_and_json_hash.dig(name, 'json')
+ wysiwyg_html_and_json_hash.dig(name, :json)
end
# Do not assign nil values
@@ -265,15 +297,15 @@ module Glfm
end
def generate_and_write_for_all_examples(
- all_examples, output_file_path, glfm_examples_statuses = {}, literal_scalars: true
+ all_examples, output_file_path, glfm_examples_statuses: {}, literal_scalars: true
)
preserve_existing = !glfm_examples_statuses.empty?
output("#{preserve_existing ? 'Creating/Updating' : 'Creating/Overwriting'} #{output_file_path}...")
- existing_hash = preserve_existing ? YAML.safe_load(File.open(output_file_path)) : {}
+ existing_hash = preserve_existing ? YAML.safe_load(File.open(output_file_path), symbolize_names: true) : {}
output_hash = all_examples.each_with_object({}) do |example, hash|
- name = example.fetch(:name)
- if (reason = glfm_examples_statuses.dig(name, 'skip_update_example_snapshots'))
+ name = example.fetch(:name).to_sym
+ if (reason = glfm_examples_statuses.dig(name, :skip_update_example_snapshots))
# Output the reason for skipping the example, but only once, not multiple times for each file
output("Skipping '#{name}'. Reason: #{reason}") unless glfm_examples_statuses.dig(name, :already_printed)
# We just store the `:already_printed` flag in the hash entry itself. Then we
@@ -293,37 +325,5 @@ module Glfm
yaml_string = dump_yaml_with_formatting(output_hash, literal_scalars: literal_scalars)
write_file(output_file_path, yaml_string)
end
-
- # Construct an AST so we can control YAML formatting for
- # YAML block scalar literals and key quoting.
- #
- # Note that when Psych dumps the markdown to YAML, it will
- # automatically use the default "clip" behavior of the Block Chomping Indicator (`|`)
- # https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator,
- # when the markdown strings contain a trailing newline. The type of
- # Block Chomping Indicator is automatically determined, you cannot specify it
- # manually.
- def dump_yaml_with_formatting(hash, literal_scalars:)
- visitor = Psych::Visitors::YAMLTree.create
- visitor << hash
- ast = visitor.tree
-
- # Force all scalars to have literal formatting (using Block Chomping Indicator instead of quotes)
- if literal_scalars
- ast.grep(Psych::Nodes::Scalar).each do |node|
- node.style = Psych::Nodes::Scalar::LITERAL
- end
- end
-
- # Do not quote the keys
- ast.grep(Psych::Nodes::Mapping).each do |node|
- node.children.each_slice(2) do |k, _|
- k.quoted = false
- k.style = Psych::Nodes::Scalar::ANY
- end
- end
-
- ast.to_yaml
- end
end
end
diff --git a/scripts/lint-doc.sh b/scripts/lint-doc.sh
index afc04da19a7..f954b2d8106 100755
--- a/scripts/lint-doc.sh
+++ b/scripts/lint-doc.sh
@@ -128,7 +128,7 @@ function run_locally_or_in_docker() {
$cmd $args
elif hash docker 2>/dev/null
then
- docker run -t -v ${PWD}:/gitlab -w /gitlab --rm registry.gitlab.com/gitlab-org/gitlab-docs/lint-markdown:alpine-3.15-vale-2.15.5-markdownlint-0.31.1 ${cmd} ${args}
+ docker run -t -v ${PWD}:/gitlab -w /gitlab --rm registry.gitlab.com/gitlab-org/gitlab-docs/lint-markdown:alpine-3.16-vale-2.20.1-markdownlint-0.32.2 ${cmd} ${args}
else
echo
echo " ✖ ERROR: '${cmd}' not found. Install '${cmd}' or Docker to proceed." >&2
diff --git a/scripts/rspec_check_order_dependence b/scripts/rspec_check_order_dependence
new file mode 100755
index 00000000000..91d4c5938a7
--- /dev/null
+++ b/scripts/rspec_check_order_dependence
@@ -0,0 +1,76 @@
+#!/usr/bin/env bash
+
+## Usage: scripts/rspec_check_order_dependence <files...>
+#
+# List of RSpec files to be checked for their order dependency.
+#
+# If the files pass the following checks it's likely they are not
+# order-dependent and are removed from `spec/support/rspec_order_todo.yml`
+# to make them run in random order.
+#
+# The following checks are available:
+# * Run specs in _defined_ order
+# * Run specs in _reverse_ order
+# * Run specs in _random_ order
+
+if [ $# -eq 0 ]; then
+ echo "Usage: $0 <files...>"
+ exit
+fi
+
+TODO_YAML='./spec/support/rspec_order_todo.yml'
+RSPEC_ARGS=(--format progress)
+
+abort() {
+ echo "$@"
+ echo "Aborting..."
+ exit 1
+}
+
+for file in "$@"
+do
+ # Drop potential file prefix `./`
+ file=${file#./}
+
+ # Match only the prefix so we can specify a directory to match all the files
+ # under it. For example, `spec/rubocop` will match, test and remove all TODO
+ # entries starting with `./spec/rubocop`.
+ grep -E -- "- './$file" "$TODO_YAML" > /dev/null || abort "Could not find '$file' in '$TODO_YAML'"
+done
+
+set -xe
+
+bin/rspec --order defined "${RSPEC_ARGS[@]}" "$@"
+RSPEC_ORDER=reverse bin/rspec "${RSPEC_ARGS[@]}" "$@"
+bin/rspec --order random "${RSPEC_ARGS[@]}" "$@"
+
+set +xe
+
+green='\033[0;32m'
+clear='\033[0m' # No Color
+
+echo -e "$green"
+echo "
+The files passed all checks!
+
+They are likely not order-dependent and can be run in random order and thus
+are being removed from 'spec/support/rspec_order_todo.yml':
+"
+
+for file in "$@"
+do
+ # Drop potential file prefix `./`
+ file=${file#./}
+
+ echo " * Removing '$file'"
+
+ # Escape forward slashes to make it compatible with sed below
+ escaped_file=${file//\//\\/}
+
+ # We must use -i.bak to make sed work on Linux and MacOS.
+ # See https://riptutorial.com/sed/topic/9436/bsd-macos-sed-vs--gnu-sed-vs--the-posix-sed-specification
+ sed -i.bak "/- '.\/$escaped_file/d" "$TODO_YAML"
+ rm "$TODO_YAML.bak"
+done
+
+echo -e "$clear"
diff --git a/scripts/rspec_helpers.sh b/scripts/rspec_helpers.sh
index b31e3663eaa..5d7bd844c2c 100644
--- a/scripts/rspec_helpers.sh
+++ b/scripts/rspec_helpers.sh
@@ -269,7 +269,7 @@ function rspec_paralellized_job() {
debug_rspec_variables
if [[ -n $RSPEC_TESTS_MAPPING_ENABLED ]]; then
- tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" --filter "tmp/matching_tests.txt" || rspec_run_status=$?
+ tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" --filter "${RSPEC_MATCHING_TESTS_PATH}" || rspec_run_status=$?
else
tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" || rspec_run_status=$?
fi
@@ -360,9 +360,22 @@ function rspec_fail_fast() {
function rspec_matched_foss_tests() {
local test_file_count_threshold=20
local matching_tests_file=${1}
+ local foss_matching_tests_file="${matching_tests_file}-foss"
+
+ # Keep only files that exists (i.e. exclude EE speficic files)
+ cat ${matching_tests_file} | ruby -e 'puts $stdin.read.split(" ").select { |f| File.exist?(f) && f.include?("spec/") }.join(" ")' > "${foss_matching_tests_file}"
+
+ echo "Matching tests file:"
+ cat ${matching_tests_file}
+ echo -e "\n\n"
+
+ echo "FOSS matching tests file:"
+ cat ${foss_matching_tests_file}
+ echo -e "\n\n"
+
local rspec_opts=${2}
- local test_files="$(cat "${matching_tests_file}")"
- local test_file_count=$(wc -w "${matching_tests_file}" | awk {'print $1'})
+ local test_files="$(cat ${foss_matching_tests_file})"
+ local test_file_count=$(wc -w "${foss_matching_tests_file}" | awk {'print $1'})
if [[ "${test_file_count}" -gt "${test_file_count_threshold}" ]]; then
echo "This job is intentionally failed because there are more than ${test_file_count_threshold} FOSS test files matched,"
diff --git a/scripts/rubocop-parse b/scripts/rubocop-parse
new file mode 100755
index 00000000000..4c82be5934b
--- /dev/null
+++ b/scripts/rubocop-parse
@@ -0,0 +1,73 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+# Emit AST from parsed Ruby code by RuboCop.
+#
+# This is an alternative to `ruby-parser` shipped with `parser` gem.
+#
+# Usage:
+# rubocop-parse -e 'puts "hello"'
+# (send nil :puts
+# (str "hello"))
+#
+# rubocop-parse -e 'puts "hello"' -v 3.0
+# (send nil :puts
+# (str "hello"))
+#
+# rubocop-parse app/models/project.rb
+# (begin
+# (send nil :require
+# (str "carrierwave/orm/activerecord"))
+# (class
+# (const nil :Project)
+# (const nil :ApplicationRecord)
+# (begin
+# (send nil :include
+# ...
+
+require_relative '../config/bundler_setup'
+
+require 'rubocop'
+require 'optparse'
+
+def print_ast(file, source, version)
+ version ||= RuboCop::ConfigStore.new.for_file(file).target_ruby_version
+ puts RuboCop::AST::ProcessedSource.new(source, version).ast.to_s
+end
+
+options = Struct.new(:eval, :ruby_version, :print_help, keyword_init: true).new
+
+parser = OptionParser.new do |opts|
+ opts.banner = "Usage: #{$0} [-e code] [FILE...]"
+
+ opts.on('-e FRAGMENT', '--eval FRAGMENT', 'Process a fragment of Ruby code') do |code|
+ options.eval = code
+ end
+
+ opts.on('-v RUBY_VERSION', '--ruby-version RUBY_VERSION',
+ 'Parse as Ruby would. Defaults to RuboCop TargetRubyVersion setting.') do |ruby_version|
+ options.ruby_version = Float(ruby_version)
+ end
+
+ opts.on('-h', '--help') do
+ options.print_help = true
+ end
+end
+
+args = parser.parse!
+
+if options.print_help
+ puts parser
+ exit
+end
+
+print_ast('', options.eval, options.ruby_version) if options.eval
+
+args.each do |arg|
+ if File.file?(arg)
+ source = File.read(arg)
+ print_ast(arg, source, options.ruby_version)
+ else
+ warn "Skipping non-file #{arg.inspect}"
+ end
+end
diff --git a/scripts/static-analysis b/scripts/static-analysis
index 1e9fe1cc724..53f84c19ac6 100755
--- a/scripts/static-analysis
+++ b/scripts/static-analysis
@@ -46,11 +46,11 @@ class StaticAnalysis
# around this we will only enable this task on EE installations.
TASKS_WITH_DURATIONS_SECONDS = [
(Gitlab.ee? ? Task.new(%w[bin/rake gettext:updated_check], 360) : nil),
- Task.new(%w[yarn run lint:prettier], 160),
- Task.new(%w[bin/rake gettext:lint], 85),
- Task.new(%W[scripts/license-check.sh #{project_path}], 20),
- Task.new(%w[bin/rake lint:static_verification], 35),
- Task.new(%w[scripts/rubocop-max-files-in-cache-check], 20),
+ Task.new(%w[yarn run lint:prettier], 200),
+ Task.new(%w[bin/rake gettext:lint], 105),
+ Task.new(%W[scripts/license-check.sh #{project_path}], 200),
+ Task.new(%w[bin/rake lint:static_verification], 40),
+ Task.new(%w[scripts/rubocop-max-files-in-cache-check], 25),
Task.new(%w[bin/rake config_lint], 10),
Task.new(%w[bin/rake gitlab:sidekiq:all_queues_yml:check], 15),
(Gitlab.ee? ? Task.new(%w[bin/rake gitlab:sidekiq:sidekiq_queues_yml:check], 11) : nil),
diff --git a/scripts/trigger-build.rb b/scripts/trigger-build.rb
index 57cc6a8551e..b368bbdb1f1 100755
--- a/scripts/trigger-build.rb
+++ b/scripts/trigger-build.rb
@@ -144,12 +144,10 @@ module Trigger
end
def base_variables
- # Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA for omnibus checkouts due to pipeline for merged results,
- # and fallback to CI_COMMIT_SHA for the `detached` pipelines.
{
'GITLAB_REF_SLUG' => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_REF_SLUG'],
'TRIGGERED_USER' => ENV['TRIGGERED_USER'] || ENV['GITLAB_USER_NAME'],
- 'TOP_UPSTREAM_SOURCE_SHA' => Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA']
+ 'TOP_UPSTREAM_SOURCE_SHA' => ENV['CI_COMMIT_SHA']
}
end
@@ -161,53 +159,6 @@ module Trigger
end
end
- class Omnibus < Base
- def self.access_token
- # Default to "Multi-pipeline (from 'gitlab-org/gitlab' 'package-and-qa' job)" at https://gitlab.com/gitlab-org/build/omnibus-gitlab-mirror/-/settings/access_tokens
- ENV['OMNIBUS_GITLAB_PROJECT_ACCESS_TOKEN'] || super
- end
-
- private
-
- def downstream_project_path
- ENV.fetch('OMNIBUS_PROJECT_PATH', 'gitlab-org/build/omnibus-gitlab-mirror')
- end
-
- def ref_param_name
- 'OMNIBUS_BRANCH'
- end
-
- def primary_ref
- 'master'
- end
-
- def trigger_stable_branch_if_detected?
- true
- end
-
- def extra_variables
- # Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA (MR HEAD commit) so that the image is in sync with the assets and QA images.
- # See https://docs.gitlab.com/ee/development/testing_guide/end_to_end/index.html#with-pipeline-for-merged-results.
- # We also set IMAGE_TAG so the GitLab Docker image is tagged with that SHA.
- source_sha = Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA']
-
- {
- 'GITLAB_VERSION' => source_sha,
- 'IMAGE_TAG' => source_sha,
- 'QA_IMAGE' => ENV['QA_IMAGE'],
- 'SKIP_QA_DOCKER' => 'true',
- 'ALTERNATIVE_SOURCES' => 'true',
- 'SECURITY_SOURCES' => Trigger.security? ? 'true' : 'false',
- 'ee' => Trigger.ee? ? 'true' : 'false',
- 'QA_BRANCH' => ENV['QA_BRANCH'] || 'master',
- 'CACHE_UPDATE' => ENV['OMNIBUS_GITLAB_CACHE_UPDATE'],
- 'GITLAB_QA_OPTIONS' => ENV['GITLAB_QA_OPTIONS'],
- 'QA_TESTS' => ENV['QA_TESTS'],
- 'ALLURE_JOB_NAME' => ENV['ALLURE_JOB_NAME']
- }
- end
- end
-
class CNG < Base
def variables
# Delete variables that aren't useful when using native triggers.
@@ -232,14 +183,11 @@ module Trigger
end
def extra_variables
- # Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA (MR HEAD commit) so that the image is in sync with the assets and QA images.
- source_sha = Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA']
-
{
"TRIGGER_BRANCH" => ref,
- "GITLAB_VERSION" => source_sha,
+ "GITLAB_VERSION" => ENV['CI_COMMIT_SHA'],
"GITLAB_TAG" => ENV['CI_COMMIT_TAG'], # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
- "GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : source_sha,
+ "GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_SHA'],
"FORCE_RAILS_IMAGE_BUILDS" => 'true',
"CE_PIPELINE" => Trigger.ee? ? nil : "true", # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
"EE_PIPELINE" => Trigger.ee? ? "true" : nil # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
@@ -403,10 +351,9 @@ module Trigger
def extra_variables
{
- # Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA for omnibus checkouts due to pipeline for merged results
- # and fallback to CI_COMMIT_SHA for the `detached` pipelines.
'GITLAB_COMMIT_SHA' => Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA'],
- 'TRIGGERED_USER_LOGIN' => ENV['GITLAB_USER_LOGIN']
+ 'TRIGGERED_USER_LOGIN' => ENV['GITLAB_USER_LOGIN'],
+ 'TOP_UPSTREAM_SOURCE_SHA' => Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA']
}
end
@@ -482,8 +429,6 @@ end
if $0 == __FILE__
case ARGV[0]
- when 'omnibus'
- Trigger::Omnibus.new.invoke!(downstream_job_name: 'Trigger:qa-test').wait!
when 'cng'
Trigger::CNG.new.invoke!.wait!
when 'gitlab-com-database-testing'
diff --git a/scripts/utils.sh b/scripts/utils.sh
index 8db525abc93..10b7f856ee6 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -38,6 +38,8 @@ function bundle_install_script() {
exit 1;
fi;
+ echo -e "section_start:`date +%s`:bundle-install[collapsed=true]\r\e[0KInstalling gems"
+
gem --version
bundle --version
gem install bundler --no-document --conservative --version 2.3.15
@@ -48,7 +50,7 @@ function bundle_install_script() {
echo "${BUNDLE_WITHOUT}"
bundle config
- run_timed_command "bundle install ${BUNDLE_INSTALL_FLAGS} ${extra_install_args} && bundle check"
+ run_timed_command "bundle install ${BUNDLE_INSTALL_FLAGS} ${extra_install_args}"
if [[ $(bundle info pg) ]]; then
# When we test multiple versions of PG in the same pipeline, we have a single `setup-test-env`
@@ -56,6 +58,8 @@ function bundle_install_script() {
# Uncomment the following line if multiple versions of PG are tested in the same pipeline.
run_timed_command "bundle pristine pg"
fi
+
+ echo -e "section_end:`date +%s`:bundle-install\r\e[0K"
}
function setup_db_user_only() {
diff --git a/scripts/verify-tff-mapping b/scripts/verify-tff-mapping
index 9eb1d43c65b..b4974f71ebf 100755
--- a/scripts/verify-tff-mapping
+++ b/scripts/verify-tff-mapping
@@ -116,6 +116,18 @@ tests = [
explanation: 'Whats New should map to its respective spec',
source: 'data/whats_new/202101140001_13_08.yml',
expected: ['spec/lib/release_highlights/validator_spec.rb']
+ },
+
+ {
+ explanation: 'Spec for every sidekiq worker',
+ source: 'app/workers/new_worker.rb',
+ expected: ['spec/workers/every_sidekiq_worker_spec.rb']
+ },
+
+ {
+ explanation: 'Known events',
+ source: 'lib/gitlab/usage_data_counters/known_events/common.yml',
+ expected: ['spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb', 'spec/lib/gitlab/usage_data_spec.rb']
}
]