diff options
author | GitLab Bot <gitlab-bot@gitlab.com> | 2020-12-17 11:59:07 +0000 |
---|---|---|
committer | GitLab Bot <gitlab-bot@gitlab.com> | 2020-12-17 11:59:07 +0000 |
commit | 8b573c94895dc0ac0e1d9d59cf3e8745e8b539ca (patch) | |
tree | 544930fb309b30317ae9797a9683768705d664c4 /scripts | |
parent | 4b1de649d0168371549608993deac953eb692019 (diff) | |
download | gitlab-ce-8b573c94895dc0ac0e1d9d59cf3e8745e8b539ca.tar.gz |
Add latest changes from gitlab-org/gitlab@13-7-stable-eev13.7.0-rc42
Diffstat (limited to 'scripts')
-rwxr-xr-x | scripts/api/cancel_pipeline | 58 | ||||
-rwxr-xr-x | scripts/api/download_job_artifact | 94 | ||||
-rwxr-xr-x | scripts/api/get_job_id | 140 | ||||
-rwxr-xr-x | scripts/api/play_job | 60 | ||||
-rwxr-xr-x | scripts/build_assets_image | 10 | ||||
-rwxr-xr-x | scripts/frontend/test.js | 123 | ||||
-rwxr-xr-x | scripts/get-job-id | 43 | ||||
-rwxr-xr-x | scripts/gitaly-test-build | 2 | ||||
-rwxr-xr-x | scripts/gitaly-test-spawn | 1 | ||||
-rw-r--r-- | scripts/gitaly_test.rb | 17 | ||||
-rwxr-xr-x | scripts/lint-doc.sh | 27 | ||||
-rwxr-xr-x | scripts/regenerate-schema | 7 | ||||
-rw-r--r-- | scripts/rspec_helpers.sh | 60 | ||||
-rwxr-xr-x | scripts/update-workhorse | 59 | ||||
-rwxr-xr-x | scripts/used-feature-flags | 5 | ||||
-rw-r--r-- | scripts/utils.sh | 57 | ||||
-rwxr-xr-x | scripts/verify-tff-mapping | 10 |
17 files changed, 499 insertions, 274 deletions
diff --git a/scripts/api/cancel_pipeline b/scripts/api/cancel_pipeline new file mode 100755 index 00000000000..0965877a69a --- /dev/null +++ b/scripts/api/cancel_pipeline @@ -0,0 +1,58 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require 'rubygems' +require 'gitlab' +require 'optparse' +require_relative 'get_job_id' + +class CancelPipeline + DEFAULT_OPTIONS = { + project: ENV['CI_PROJECT_ID'], + pipeline_id: ENV['CI_PIPELINE_ID'], + api_token: ENV['GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN'] + }.freeze + + def initialize(options) + @project = options.delete(:project) + @pipeline_id = options.delete(:pipeline_id) + + Gitlab.configure do |config| + config.endpoint = 'https://gitlab.com/api/v4' + config.private_token = options.delete(:api_token) + end + end + + def execute + Gitlab.cancel_pipeline(project, pipeline_id) + end + + private + + attr_reader :project, :pipeline_id +end + +if $0 == __FILE__ + options = CancelPipeline::DEFAULT_OPTIONS.dup + + OptionParser.new do |opts| + opts.on("-p", "--project PROJECT", String, "Project where to find the job (defaults to $CI_PROJECT_ID)") do |value| + options[:project] = value + end + + opts.on("-i", "--pipeline-id PIPELINE_ID", String, "A pipeline ID (defaults to $CI_PIPELINE_ID)") do |value| + options[:pipeline_id] = value + end + + opts.on("-t", "--api-token API_TOKEN", String, "A value API token with the `read_api` scope") do |value| + options[:api_token] = value + end + + opts.on("-h", "--help", "Prints this help") do + puts opts + exit + end + end.parse! + + CancelPipeline.new(options).execute +end diff --git a/scripts/api/download_job_artifact b/scripts/api/download_job_artifact new file mode 100755 index 00000000000..8e2207c6fa7 --- /dev/null +++ b/scripts/api/download_job_artifact @@ -0,0 +1,94 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require 'rubygems' +require 'optparse' +require 'fileutils' +require 'uri' +require 'cgi' +require 'net/http' + +class ArtifactFinder + DEFAULT_OPTIONS = { + project: ENV['CI_PROJECT_ID'], + api_token: ENV['GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN'] + }.freeze + + def initialize(options) + @project = options.delete(:project) + @job_id = options.delete(:job_id) + @api_token = options.delete(:api_token) + @artifact_path = options.delete(:artifact_path) + + warn "No API token given." unless api_token + end + + def execute + url = "https://gitlab.com/api/v4/projects/#{CGI.escape(project)}/jobs/#{job_id}/artifacts" + + if artifact_path + FileUtils.mkdir_p(File.dirname(artifact_path)) + url += "/#{artifact_path}" + end + + fetch(url) + end + + private + + attr_reader :project, :job_id, :api_token, :artifact_path + + def fetch(uri_str, limit = 10) + raise 'Too many HTTP redirects' if limit == 0 + + uri = URI(uri_str) + request = Net::HTTP::Get.new(uri) + request['Private-Token'] = api_token if api_token + + Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http| + http.request(request) do |response| + case response + when Net::HTTPSuccess then + File.open(artifact_path || 'artifacts.zip', 'w') do |file| + response.read_body(&file.method(:write)) + end + when Net::HTTPRedirection then + location = response['location'] + warn "Redirected (#{limit - 1} redirections remaining)." + fetch(location, limit - 1) + else + raise "Unexpected response: #{response.value}" + end + end + end + end +end + +if $0 == __FILE__ + options = ArtifactFinder::DEFAULT_OPTIONS.dup + + OptionParser.new do |opts| + opts.on("-p", "--project PROJECT", String, "Project where to find the job (defaults to $CI_PROJECT_ID)") do |value| + options[:project] = value + end + + opts.on("-j", "--job-id JOB_ID", String, "A job ID") do |value| + options[:job_id] = value + end + + opts.on("-a", "--artifact-path ARTIFACT_PATH", String, "A valid artifact path") do |value| + options[:artifact_path] = value + end + + opts.on("-t", "--api-token API_TOKEN", String, "A value API token with the `read_api` scope") do |value| + options[:api_token] = value + end + + opts.on("-h", "--help", "Prints this help") do + puts opts + exit + end + end.parse! + + ArtifactFinder.new(options).execute +end diff --git a/scripts/api/get_job_id b/scripts/api/get_job_id new file mode 100755 index 00000000000..c7fe859db91 --- /dev/null +++ b/scripts/api/get_job_id @@ -0,0 +1,140 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require 'rubygems' +require 'gitlab' +require 'optparse' + +class JobFinder + DEFAULT_OPTIONS = { + project: ENV['CI_PROJECT_ID'], + pipeline_id: ENV['CI_PIPELINE_ID'], + pipeline_query: {}, + job_query: {}, + api_token: ENV['GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN'] + }.freeze + + def initialize(options) + @project = options.delete(:project) + @pipeline_query = options.delete(:pipeline_query) + @job_query = options.delete(:job_query) + @pipeline_id = options.delete(:pipeline_id) + @job_name = options.delete(:job_name) + @artifact_path = options.delete(:artifact_path) + + # Force the token to be a string so that if api_token is nil, it's set to '', allowing unauthenticated requests (for forks). + api_token = options.delete(:api_token).to_s + + warn "No API token given." if api_token.empty? + + Gitlab.configure do |config| + config.endpoint = 'https://gitlab.com/api/v4' + config.private_token = api_token + end + end + + def execute + find_job_with_artifact || find_job_with_filtered_pipelines || find_job_in_pipeline + end + + private + + attr_reader :project, :pipeline_query, :job_query, :pipeline_id, :job_name, :artifact_path + + def find_job_with_artifact + return if artifact_path.nil? + + Gitlab.pipelines(project, pipeline_query_params).auto_paginate do |pipeline| + Gitlab.pipeline_jobs(project, pipeline.id, job_query_params).auto_paginate do |job| + return job if found_job_with_artifact?(job) # rubocop:disable Cop/AvoidReturnFromBlocks + end + end + + raise 'Job not found!' + end + + def find_job_with_filtered_pipelines + return if pipeline_query.empty? + + Gitlab.pipelines(project, pipeline_query_params).auto_paginate do |pipeline| + Gitlab.pipeline_jobs(project, pipeline.id, job_query_params).auto_paginate do |job| + return job if found_job_by_name?(job) # rubocop:disable Cop/AvoidReturnFromBlocks + end + end + + raise 'Job not found!' + end + + def find_job_in_pipeline + return unless pipeline_id + + Gitlab.pipeline_jobs(project, pipeline_id, job_query_params).auto_paginate do |job| + return job if found_job_by_name?(job) # rubocop:disable Cop/AvoidReturnFromBlocks + end + + raise 'Job not found!' + end + + def found_job_with_artifact?(job) + artifact_url = "https://gitlab.com/api/v4/projects/#{CGI.escape(project)}/jobs/#{job.id}/artifacts/#{artifact_path}" + response = HTTParty.head(artifact_url) # rubocop:disable Gitlab/HTTParty + response.success? + end + + def found_job_by_name?(job) + job.name == job_name + end + + def pipeline_query_params + @pipeline_query_params ||= { per_page: 100, **pipeline_query } + end + + def job_query_params + @job_query_params ||= { per_page: 100, **job_query } + end +end + +if $0 == __FILE__ + options = JobFinder::DEFAULT_OPTIONS.dup + + OptionParser.new do |opts| + opts.on("-p", "--project PROJECT", String, "Project where to find the job (defaults to $CI_PROJECT_ID)") do |value| + options[:project] = value + end + + opts.on("-i", "--pipeline-id pipeline_id", String, "A pipeline ID (defaults to $CI_PIPELINE_ID)") do |value| + options[:pipeline_id] = value + end + + opts.on("-q", "--pipeline-query pipeline_query", String, "Query to pass to the Pipeline API request") do |value| + options[:pipeline_query].merge!(Hash[*value.split('=')]) + end + + opts.on("-Q", "--job-query job_query", String, "Query to pass to the Job API request") do |value| + options[:job_query].merge!(Hash[*value.split('=')]) + end + + opts.on("-j", "--job-name job_name", String, "A job name that needs to exist in the found pipeline") do |value| + options[:job_name] = value + end + + opts.on("-a", "--artifact-path ARTIFACT_PATH", String, "A valid artifact path") do |value| + options[:artifact_path] = value + end + + opts.on("-t", "--api-token API_TOKEN", String, "A value API token with the `read_api` scope") do |value| + options[:api_token] = value + end + + opts.on("-h", "--help", "Prints this help") do + puts opts + exit + end + end.parse! + + job = JobFinder.new(options).execute + + return if job.nil? + + puts job.id +end diff --git a/scripts/api/play_job b/scripts/api/play_job new file mode 100755 index 00000000000..199f7e65633 --- /dev/null +++ b/scripts/api/play_job @@ -0,0 +1,60 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require 'rubygems' +require 'gitlab' +require 'optparse' +require_relative 'get_job_id' + +class PlayJob + DEFAULT_OPTIONS = { + project: ENV['CI_PROJECT_ID'], + pipeline_id: ENV['CI_PIPELINE_ID'], + api_token: ENV['GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN'] + }.freeze + + def initialize(options) + @project = options.delete(:project) + @options = options + + Gitlab.configure do |config| + config.endpoint = 'https://gitlab.com/api/v4' + config.private_token = options.fetch(:api_token) + end + end + + def execute + job = JobFinder.new(project, options.slice(:api_token, :pipeline_id, :job_name).merge(scope: 'manual')).execute + + Gitlab.job_play(project, job.id) + end + + private + + attr_reader :project, :options +end + +if $0 == __FILE__ + options = PlayJob::DEFAULT_OPTIONS.dup + + OptionParser.new do |opts| + opts.on("-p", "--project PROJECT", String, "Project where to find the job (defaults to $CI_PROJECT_ID)") do |value| + options[:project] = value + end + + opts.on("-j", "--job-name JOB_NAME", String, "A job name that needs to exist in the found pipeline") do |value| + options[:job_name] = value + end + + opts.on("-t", "--api-token API_TOKEN", String, "A value API token with the `read_api` scope") do |value| + options[:api_token] = value + end + + opts.on("-h", "--help", "Prints this help") do + puts opts + exit + end + end.parse! + + PlayJob.new(options).execute +end diff --git a/scripts/build_assets_image b/scripts/build_assets_image index e6a5f036fe5..12beddfa184 100755 --- a/scripts/build_assets_image +++ b/scripts/build_assets_image @@ -20,21 +20,13 @@ cp Dockerfile.assets assets_container.build/ COMMIT_REF_SLUG_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_SLUG} COMMIT_SHA_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_SHA} +COMMIT_REF_NAME_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_NAME} DESTINATIONS="--destination=$COMMIT_REF_SLUG_DESTINATION --destination=$COMMIT_SHA_DESTINATION" -# For EE branch builds, add a truncated SHA destination for later use by Omnibus -# auto-deploy builds -if [[ "${ASSETS_IMAGE_NAME}" == "gitlab-assets-ee" ]] && [ -n "$CI_COMMIT_BRANCH" ] -then - COMMIT_SHORT_SHA_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_SHA:0:11} - DESTINATIONS="$DESTINATIONS --destination=$COMMIT_SHORT_SHA_DESTINATION" -fi - # Also tag the image with GitLab version, if running on a tag pipeline, so # other projects can simply use that instead of computing the slug. if [ -n "$CI_COMMIT_TAG" ]; then - COMMIT_REF_NAME_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_NAME} DESTINATIONS="$DESTINATIONS --destination=$COMMIT_REF_NAME_DESTINATION" fi diff --git a/scripts/frontend/test.js b/scripts/frontend/test.js deleted file mode 100755 index 71a8bebf0f2..00000000000 --- a/scripts/frontend/test.js +++ /dev/null @@ -1,123 +0,0 @@ -#!/usr/bin/env node - -const { spawn } = require('child_process'); -const { EOL } = require('os'); -const program = require('commander'); -const chalk = require('chalk'); - -const SUCCESS_CODE = 0; -const JEST_ROUTE = 'spec/frontend'; -const KARMA_ROUTE = 'spec/javascripts'; -const COMMON_ARGS = ['--colors']; -const jestArgs = [...COMMON_ARGS, '--passWithNoTests']; -const karmaArgs = [...COMMON_ARGS, '--no-fail-on-empty-test-suite']; - -program - .usage('[options] <file ...>') - .option('-p, --parallel', 'Run tests suites in parallel') - .option( - '-w, --watch', - 'Rerun tests when files change (tests will be run in parallel if this enabled)', - ) - .parse(process.argv); - -const shouldParallelize = program.parallel || program.watch; - -const isSuccess = code => code === SUCCESS_CODE; - -const combineExitCodes = codes => { - const firstFail = codes.find(x => !isSuccess(x)); - - return firstFail === undefined ? SUCCESS_CODE : firstFail; -}; - -const skipIfFail = fn => code => (isSuccess(code) ? fn() : code); - -const endWithEOL = str => (str[str.length - 1] === '\n' ? str : `${str}${EOL}`); - -const runTests = paths => { - if (shouldParallelize) { - return Promise.all([runJest(paths), runKarma(paths)]).then(combineExitCodes); - } else { - return runJest(paths).then(skipIfFail(() => runKarma(paths))); - } -}; - -const spawnYarnScript = (cmd, args) => { - return new Promise((resolve, reject) => { - const proc = spawn('yarn', ['run', cmd, ...args]); - const output = data => { - const text = data - .toString() - .split(/\r?\n/g) - .map((line, idx, { length }) => - idx === length - 1 && !line ? line : `${chalk.gray(cmd)}: ${line}`, - ) - .join(EOL); - - return endWithEOL(text); - }; - - proc.stdout.on('data', data => { - process.stdout.write(output(data)); - }); - - proc.stderr.on('data', data => { - process.stderr.write(output(data)); - }); - - proc.on('close', code => { - process.stdout.write(output(`exited with code ${code}`)); - - // We resolve even on a failure code because a `reject` would cause - // Promise.all to reject immediately (without waiting for other promises) - // to finish. - resolve(code); - }); - }); -}; - -const runJest = args => { - return spawnYarnScript('jest', [...jestArgs, ...toJestArgs(args)]); -}; - -const runKarma = args => { - return spawnYarnScript('karma', [...karmaArgs, ...toKarmaArgs(args)]); -}; - -const replacePath = to => path => - path - .replace(JEST_ROUTE, to) - .replace(KARMA_ROUTE, to) - .replace('app/assets/javascripts', to); - -const replacePathForJest = replacePath(JEST_ROUTE); - -const replacePathForKarma = replacePath(KARMA_ROUTE); - -const toJestArgs = paths => paths.map(replacePathForJest); - -const toKarmaArgs = paths => - paths.reduce((acc, path) => acc.concat('-f', replacePathForKarma(path)), []); - -const main = paths => { - if (program.watch) { - jestArgs.push('--watch'); - karmaArgs.push('--single-run', 'false', '--auto-watch'); - } - runTests(paths).then(code => { - console.log('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'); - if (isSuccess(code)) { - console.log(chalk.bgGreen(chalk.black('All tests passed :)'))); - } else { - console.log(chalk.bgRed(chalk.white(`Some tests failed :(`))); - } - console.log('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'); - - if (!isSuccess(code)) { - process.exit(code); - } - }); -}; - -main(program.args); diff --git a/scripts/get-job-id b/scripts/get-job-id deleted file mode 100755 index a5d34dc545b..00000000000 --- a/scripts/get-job-id +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env ruby -# frozen_string_literal: true - -require 'gitlab' -require 'optparse' - -# -# Configure credentials to be used with gitlab gem -# -Gitlab.configure do |config| - config.endpoint = 'https://gitlab.com/api/v4' - config.private_token = ENV['GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN'] -end - -options = {} -OptionParser.new do |opts| - opts.on("-s", "--scope=SCOPE", "Find job with matching scope") do |scope| - options[:scope] = scope - end -end.parse! - -class PipelineJobFinder - def initialize(project_id, pipeline_id, job_name, options) - @project_id = project_id - @pipeline_id = pipeline_id - @job_name = job_name - @options = options - end - - def execute - Gitlab.pipeline_jobs(@project_id, @pipeline_id, @options).auto_paginate do |job| - break job if job.name == @job_name - end - end -end - -project_id, pipeline_id, job_name = ARGV - -job = PipelineJobFinder.new(project_id, pipeline_id, job_name, options).execute - -return if job.nil? - -puts job.id diff --git a/scripts/gitaly-test-build b/scripts/gitaly-test-build index 5254d957afd..00927646046 100755 --- a/scripts/gitaly-test-build +++ b/scripts/gitaly-test-build @@ -19,8 +19,10 @@ class GitalyTestBuild # Starting gitaly further validates its configuration gitaly_pid = start_gitaly + gitaly2_pid = start_gitaly2 praefect_pid = start_praefect Process.kill('TERM', gitaly_pid) + Process.kill('TERM', gitaly2_pid) Process.kill('TERM', praefect_pid) # Make the 'gitaly' executable look newer than 'GITALY_SERVER_VERSION'. diff --git a/scripts/gitaly-test-spawn b/scripts/gitaly-test-spawn index 8e16b2bb656..c2ff9cd08aa 100755 --- a/scripts/gitaly-test-spawn +++ b/scripts/gitaly-test-spawn @@ -15,6 +15,7 @@ class GitalyTestSpawn # In local development this pid file is used by rspec. IO.write(File.expand_path('../tmp/tests/gitaly.pid', __dir__), start_gitaly) + IO.write(File.expand_path('../tmp/tests/gitaly2.pid', __dir__), start_gitaly2) IO.write(File.expand_path('../tmp/tests/praefect.pid', __dir__), start_praefect) end end diff --git a/scripts/gitaly_test.rb b/scripts/gitaly_test.rb index 54bf07b3773..559ad8f4345 100644 --- a/scripts/gitaly_test.rb +++ b/scripts/gitaly_test.rb @@ -62,21 +62,36 @@ module GitalyTest case service when :gitaly File.join(tmp_tests_gitaly_dir, 'config.toml') + when :gitaly2 + File.join(tmp_tests_gitaly_dir, 'gitaly2.config.toml') when :praefect File.join(tmp_tests_gitaly_dir, 'praefect.config.toml') end end + def service_binary(service) + case service + when :gitaly, :gitaly2 + 'gitaly' + when :praefect + 'praefect' + end + end + def start_gitaly start(:gitaly) end + def start_gitaly2 + start(:gitaly2) + end + def start_praefect start(:praefect) end def start(service) - args = ["#{tmp_tests_gitaly_dir}/#{service}"] + args = ["#{tmp_tests_gitaly_dir}/#{service_binary(service)}"] args.push("-config") if service == :praefect args.push(config_path(service)) pid = spawn(env, *args, [:out, :err] => "log/#{service}-test.log") diff --git a/scripts/lint-doc.sh b/scripts/lint-doc.sh index 9ae6ce400da..23e7cb6c455 100755 --- a/scripts/lint-doc.sh +++ b/scripts/lint-doc.sh @@ -18,6 +18,21 @@ then ((ERRORCODE++)) fi +# Test for non-standard spaces (NBSP, NNBSP) in documentation. +echo '=> Checking for non-standard spaces...' +echo +grep --extended-regexp --binary-file=without-match --recursive '[ ]' doc/ >/dev/null 2>&1 +if [ $? -eq 0 ] +then + echo '✖ ERROR: Non-standard spaces (NBSP, NNBSP) should not be used in documentation. + https://docs.gitlab.com/ee/development/documentation/styleguide/index.html#spaces-between-words + Replace with standard spaces:' >&2 + # Find the spaces, then add color codes with sed to highlight each NBSP or NNBSP in the output. + grep --extended-regexp --binary-file=without-match --recursive --color=auto '[ ]' doc \ + | sed -e ''/ /s//`printf "\033[0;101m \033[0m"`/'' -e ''/ /s//`printf "\033[0;101m \033[0m"`/'' + ((ERRORCODE++)) +fi + # Ensure that the CHANGELOG.md does not contain duplicate versions DUPLICATE_CHANGELOG_VERSIONS=$(grep --extended-regexp '^## .+' CHANGELOG.md | sed -E 's| \(.+\)||' | sort -r | uniq -d) echo '=> Checking for CHANGELOG.md duplicate entries...' @@ -65,10 +80,16 @@ then echo "Merge request pipeline (detached) detected. Testing all files." else MERGE_BASE=$(git merge-base ${CI_MERGE_REQUEST_TARGET_BRANCH_SHA} ${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}) - MD_DOC_PATH=$(git diff --name-only "${MERGE_BASE}..${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}" 'doc/*.md') - if [ -n "${MD_DOC_PATH}" ] + if git diff --name-only "${MERGE_BASE}..${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}" | grep -E "\.vale|\.markdownlint|lint-doc\.sh" then - echo -e "Merged results pipeline detected. Testing only the following files:\n${MD_DOC_PATH}" + MD_DOC_PATH=${MD_DOC_PATH:-doc} + echo "Vale, Markdownlint, or lint-doc.sh configuration changed. Testing all files." + else + MD_DOC_PATH=$(git diff --name-only "${MERGE_BASE}..${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA}" 'doc/*.md') + if [ -n "${MD_DOC_PATH}" ] + then + echo -e "Merged results pipeline detected. Testing only the following files:\n${MD_DOC_PATH}" + fi fi fi diff --git a/scripts/regenerate-schema b/scripts/regenerate-schema index e2d46d14218..65c73a8116a 100755 --- a/scripts/regenerate-schema +++ b/scripts/regenerate-schema @@ -33,6 +33,7 @@ class SchemaRegenerator checkout_clean_schema hide_migrations remove_schema_migration_files + stop_spring reset_db unhide_migrations migrate @@ -149,6 +150,12 @@ class SchemaRegenerator end ## + # Stop spring before modifying the database + def stop_spring + run %q[bin/spring stop] + end + + ## # Run rake task to reset the database. def reset_db run %q[bin/rails db:reset RAILS_ENV=test] diff --git a/scripts/rspec_helpers.sh b/scripts/rspec_helpers.sh index 5f003d032b7..5b724c9251b 100644 --- a/scripts/rspec_helpers.sh +++ b/scripts/rspec_helpers.sh @@ -3,12 +3,18 @@ function retrieve_tests_metadata() { mkdir -p knapsack/ rspec_flaky/ rspec_profiling/ + local project_path="gitlab-org/gitlab" + local test_metadata_job_id + + # Ruby + test_metadata_job_id=$(scripts/api/get_job_id --project "${project_path}" -q "status=success" -q "ref=master" -q "username=gitlab-bot" -Q "scope=success" --job-name "update-tests-metadata") + if [[ ! -f "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ]]; then - wget -O "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" + scripts/api/download_job_artifact --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" fi if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then - wget -O "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/${FLAKY_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}" + scripts/api/download_job_artifact --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "${FLAKY_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}" fi } @@ -16,29 +22,11 @@ function update_tests_metadata() { echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" scripts/merge-reports "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" knapsack/rspec*.json - if [[ -n "${TESTS_METADATA_S3_BUCKET}" ]]; then - if [[ "$CI_PIPELINE_SOURCE" == "schedule" ]]; then - scripts/sync-reports put "${TESTS_METADATA_S3_BUCKET}" "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" - else - echo "Not uplaoding report to S3 as the pipeline is not a scheduled one." - fi - fi - rm -f knapsack/rspec*.json - scripts/merge-reports "${FLAKY_RSPEC_SUITE_REPORT_PATH}" rspec_flaky/all_*.json - export FLAKY_RSPEC_GENERATE_REPORT="true" + scripts/merge-reports "${FLAKY_RSPEC_SUITE_REPORT_PATH}" rspec_flaky/all_*.json scripts/flaky_examples/prune-old-flaky-examples "${FLAKY_RSPEC_SUITE_REPORT_PATH}" - - if [[ -n ${TESTS_METADATA_S3_BUCKET} ]]; then - if [[ "$CI_PIPELINE_SOURCE" == "schedule" ]]; then - scripts/sync-reports put "${TESTS_METADATA_S3_BUCKET}" "${FLAKY_RSPEC_SUITE_REPORT_PATH}" - else - echo "Not uploading report to S3 as the pipeline is not a scheduled one." - fi - fi - rm -f rspec_flaky/all_*.json rspec_flaky/new_*.json if [[ "$CI_PIPELINE_SOURCE" == "schedule" ]]; then @@ -51,8 +39,13 @@ function update_tests_metadata() { function retrieve_tests_mapping() { mkdir -p crystalball/ + local project_path="gitlab-org/gitlab" + local test_metadata_with_mapping_job_id + + test_metadata_with_mapping_job_id=$(scripts/api/get_job_id --project "${project_path}" -q "status=success" -q "ref=master" -q "username=gitlab-bot" -Q "scope=success" --job-name "update-tests-metadata" --artifact-path "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz") + if [[ ! -f "${RSPEC_PACKED_TESTS_MAPPING_PATH}" ]]; then - (wget -O "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz" "http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz" && gzip -d "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz") || echo "{}" > "${RSPEC_PACKED_TESTS_MAPPING_PATH}" + (scripts/api/download_job_artifact --project "${project_path}" --job-id "${test_metadata_with_mapping_job_id}" --artifact-path "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz" && gzip -d "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz") || echo "{}" > "${RSPEC_PACKED_TESTS_MAPPING_PATH}" fi scripts/unpack-test-mapping "${RSPEC_PACKED_TESTS_MAPPING_PATH}" "${RSPEC_TESTS_MAPPING_PATH}" @@ -65,24 +58,13 @@ function update_tests_mapping() { fi scripts/generate-test-mapping "${RSPEC_TESTS_MAPPING_PATH}" crystalball/rspec*.yml - scripts/pack-test-mapping "${RSPEC_TESTS_MAPPING_PATH}" "${RSPEC_PACKED_TESTS_MAPPING_PATH}" - gzip "${RSPEC_PACKED_TESTS_MAPPING_PATH}" - - if [[ -n "${TESTS_METADATA_S3_BUCKET}" ]]; then - if [[ "$CI_PIPELINE_SOURCE" == "schedule" ]]; then - scripts/sync-reports put "${TESTS_METADATA_S3_BUCKET}" "${RSPEC_PACKED_TESTS_MAPPING_PATH}.gz" - else - echo "Not uploading report to S3 as the pipeline is not a scheduled one." - fi - fi - - rm -f crystalball/rspec*.yml + rm -f crystalball/rspec*.yml "${RSPEC_PACKED_TESTS_MAPPING_PATH}" } function crystalball_rspec_data_exists() { - compgen -G "crystalball/rspec*.yml" > /dev/null; + compgen -G "crystalball/rspec*.yml" >/dev/null } function rspec_simple_job() { @@ -143,7 +125,13 @@ function rspec_paralellized_job() { export MEMORY_TEST_PATH="tmp/memory_test/${report_name}_memory.csv" - knapsack rspec "-Ispec -rspec_helper --color --format documentation --format RspecJunitFormatter --out junit_rspec.xml ${rspec_opts}" + local rspec_args="-Ispec -rspec_helper --color --format documentation --format RspecJunitFormatter --out junit_rspec.xml ${rspec_opts}" + + if [[ -n $RSPEC_TESTS_MAPPING_ENABLED ]]; then + tooling/bin/parallel_rspec --rspec_args "${rspec_args}" --filter "tmp/matching_tests.txt" + else + tooling/bin/parallel_rspec --rspec_args "${rspec_args}" + fi date } diff --git a/scripts/update-workhorse b/scripts/update-workhorse new file mode 100755 index 00000000000..2c43b249fe4 --- /dev/null +++ b/scripts/update-workhorse @@ -0,0 +1,59 @@ +#!/bin/sh +set -e +WORKHORSE_DIR=workhorse/ +WORKHORSE_REF="$(cat GITLAB_WORKHORSE_VERSION)" +WORKHORSE_URL=${GITLAB_WORKHORSE_URL:-https://gitlab.com/gitlab-org/gitlab-workhorse.git} + +if [ $# -gt 1 ] || ([ $# = 1 ] && [ x$1 != xcheck ]); then + echo "Usage: update-workhorse [check]" + exit 1 +fi + +if echo "$WORKHORSE_REF" | grep -q '^[0-9]\+\.[0-9]\+\.[0-9]\+' ; then + # Assume this is a tagged release + WORKHORSE_REF="v${WORKHORSE_REF}" +fi + +clean="$(git status --porcelain)" +if [ -n "$clean" ] ; then + echo 'error: working directory is not clean:' + echo "$clean" + exit 1 +fi + +git fetch "$WORKHORSE_URL" "$WORKHORSE_REF" +git rm -rf --quiet -- "$WORKHORSE_DIR" +git read-tree --prefix="$WORKHORSE_DIR" -u FETCH_HEAD + +status="$(git status --porcelain)" + +if [ x$1 = xcheck ]; then + if [ -n "$status" ]; then + cat <<MSG +error: $WORKHORSE_DIR does not match $WORKHORSE_REF + +During the transition period of https://gitlab.com/groups/gitlab-org/-/epics/4826, +the workhorse/ directory in this repository is read-only. To make changes: + +1. Submit a MR to https://gitlab.com/gitlab-org/gitlab-workhorse +2. Once your MR is merged, have a new gitlab-workhorse tag made + by a maintainer +3. Update the GITLAB_WORKHORSE_VERSION file in this repository +4. Run scripts/update-workhorse to update the workhorse/ directory + +MSG + exit 1 + fi + exit 0 +fi + +if [ -z "$status" ]; then + echo "warn: $WORKHORSE_DIR is already up to date, exiting without commit" + exit 0 +fi + +tree=$(git write-tree) +msg="Update vendored workhorse to $WORKHORSE_REF" +commit=$(git commit-tree -p HEAD -p FETCH_HEAD^{commit} -m "$msg" "$tree") +git update-ref HEAD "$commit" +git log -1 diff --git a/scripts/used-feature-flags b/scripts/used-feature-flags index 07b8d2063ef..7ef3dbafd36 100755 --- a/scripts/used-feature-flags +++ b/scripts/used-feature-flags @@ -1,5 +1,7 @@ #!/usr/bin/env ruby +require 'set' + class String def red "\e[31m#{self}\e[0m" @@ -37,6 +39,9 @@ flags_paths.each do |flags_path| Dir.glob(flags_path).each do |path| feature_flag_name = File.basename(path, '.yml') + # TODO: we need a better way of tracking use of Gitaly FF across Gitaly and GitLab + next if feature_flag_name.start_with?('gitaly_') + all_flags[feature_flag_name] = File.exist?(File.join('tmp', 'feature_flags', feature_flag_name + '.used')) end end diff --git a/scripts/utils.sh b/scripts/utils.sh index 3829bcdf24e..6747efa73d7 100644 --- a/scripts/utils.sh +++ b/scripts/utils.sh @@ -36,7 +36,7 @@ function install_gitlab_gem() { } function install_tff_gem() { - gem install test_file_finder --version 0.1.0 + gem install test_file_finder --version 0.1.1 } function run_timed_command() { @@ -87,65 +87,14 @@ function echosuccess() { fi } -function get_job_id() { - local job_name="${1}" - local query_string="${2:+&${2}}" - local api_token="${API_TOKEN-${GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN}}" - if [ -z "${api_token}" ]; then - echoerr "Please provide an API token with \$API_TOKEN or \$GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN." - return - fi - - local max_page=3 - local page=1 - - while true; do - local url="https://gitlab.com/api/v4/projects/${CI_PROJECT_ID}/pipelines/${CI_PIPELINE_ID}/jobs?per_page=100&page=${page}${query_string}" - echoinfo "GET ${url}" - - local job_id - job_id=$(curl --silent --show-error --header "PRIVATE-TOKEN: ${api_token}" "${url}" | jq "map(select(.name == \"${job_name}\")) | map(.id) | last") - [[ "${job_id}" == "null" && "${page}" -lt "$max_page" ]] || break - - let "page++" - done - - if [[ "${job_id}" == "null" ]]; then # jq prints "null" for non-existent attribute - echoerr "The '${job_name}' job ID couldn't be retrieved!" - else - echoinfo "The '${job_name}' job ID is ${job_id}" - echo "${job_id}" - fi -} - -function play_job() { - local job_name="${1}" - local job_id - job_id=$(get_job_id "${job_name}" "scope=manual"); - if [ -z "${job_id}" ]; then return; fi - - local api_token="${API_TOKEN-${GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN}}" - if [ -z "${api_token}" ]; then - echoerr "Please provide an API token with \$API_TOKEN or \$GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN." - return - fi - - local url="https://gitlab.com/api/v4/projects/${CI_PROJECT_ID}/jobs/${job_id}/play" - echoinfo "POST ${url}" - - local job_url - job_url=$(curl --silent --show-error --request POST --header "PRIVATE-TOKEN: ${api_token}" "${url}" | jq ".web_url") - echoinfo "Manual job '${job_name}' started at: ${job_url}" -} - function fail_pipeline_early() { local dont_interrupt_me_job_id - dont_interrupt_me_job_id=$(get_job_id 'dont-interrupt-me' 'scope=success') + dont_interrupt_me_job_id=$(scripts/api/get_job_id --job-query "scope=success" --job-name "dont-interrupt-me") if [[ -n "${dont_interrupt_me_job_id}" ]]; then echoinfo "This pipeline cannot be interrupted due to \`dont-interrupt-me\` job ${dont_interrupt_me_job_id}" else echoinfo "Failing pipeline early for fast feedback due to test failures in rspec fail-fast." - curl --request POST --header "PRIVATE-TOKEN: ${GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN}" "https://${CI_SERVER_HOST}/api/v4/projects/${CI_PROJECT_ID}/pipelines/${CI_PIPELINE_ID}/cancel" + scripts/api/cancel_pipeline fi } diff --git a/scripts/verify-tff-mapping b/scripts/verify-tff-mapping index 1f73753be82..1c66e19df50 100755 --- a/scripts/verify-tff-mapping +++ b/scripts/verify-tff-mapping @@ -28,9 +28,9 @@ tests = [ }, { - explanation: 'Some EE extensions also map to its EE class spec, but this is not recommended: https://docs.gitlab.com/ee/development/ee_features.html#testing-ee-features-based-on-ce-features', - source: 'ee/app/models/ee/user.rb', - expected: ['ee/spec/models/user_spec.rb', 'spec/models/user_spec.rb'] + explanation: 'Some EE extensions have specs placement that do not follow the recommendation: https://docs.gitlab.com/ee/development/ee_features.html#testing-ee-features-based-on-ce-features. `tff` should still find these misplaced specs.', + source: 'ee/app/models/ee/project.rb', + expected: ['ee/spec/models/project_spec.rb', 'spec/models/project_spec.rb'] }, { @@ -53,8 +53,8 @@ tests = [ { explanation: 'EE spec code should map to itself', - source: 'ee/spec/models/user_spec.rb', - expected: ['ee/spec/models/user_spec.rb'] + source: 'ee/spec/models/ee/user_spec.rb', + expected: ['ee/spec/models/ee/user_spec.rb', 'spec/models/user_spec.rb'] }, { |