summaryrefslogtreecommitdiff
path: root/scripts
diff options
context:
space:
mode:
authorMike Greiling <mike@pixelcog.com>2018-09-28 18:33:41 +0000
committerMike Greiling <mike@pixelcog.com>2018-09-28 18:33:41 +0000
commitcb1e4a8abb33a32da9bec300e0259d50fa2afea9 (patch)
treec4fdd4076025bb5b612bc88debab62709c311e5f /scripts
parent2a6e33d838caa813e248de6af603201f5addb7eb (diff)
parentd5734de3d60983fcb3d3f3099d47fe9831e453ad (diff)
downloadgitlab-ce-cb1e4a8abb33a32da9bec300e0259d50fa2afea9.tar.gz
Merge branch 'master' into 'remove-dropzonejs-rails'
# Conflicts: # Gemfile.lock
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/codequality19
-rw-r--r--scripts/create_mysql_user.sh1
-rw-r--r--scripts/create_postgres_user.sh4
-rw-r--r--scripts/frontend/extract_gettext_all.js72
-rw-r--r--scripts/frontend/frontend_script_utils.js8
-rw-r--r--scripts/frontend/postinstall.js22
-rw-r--r--scripts/frontend/prettier.js95
-rwxr-xr-xscripts/gitaly-test-build37
-rwxr-xr-xscripts/gitaly-test-spawn26
-rw-r--r--scripts/gitaly_test.rb97
-rwxr-xr-xscripts/lint-doc.sh36
-rwxr-xr-xscripts/lint-rugged5
-rwxr-xr-xscripts/no-ee-check7
-rw-r--r--scripts/prepare_build.sh20
-rwxr-xr-xscripts/prune-old-flaky-specs28
-rwxr-xr-xscripts/rails5-gemfile-lock-check19
-rwxr-xr-xscripts/review_apps/automated_cleanup.rb109
-rwxr-xr-xscripts/review_apps/review-apps.sh184
-rw-r--r--scripts/schema_changed.sh13
-rwxr-xr-xscripts/trigger-build207
-rwxr-xr-xscripts/trigger-build-docs20
-rwxr-xr-xscripts/trigger-build-omnibus107
-rw-r--r--scripts/utils.sh18
23 files changed, 920 insertions, 234 deletions
diff --git a/scripts/codequality b/scripts/codequality
deleted file mode 100755
index 2f3ccef7d2d..00000000000
--- a/scripts/codequality
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-
-set -eo pipefail
-
-code_path=$(pwd)
-
-# docker run --tty will merge stderr and stdout, we don't need this on CI or
-# it will break codequality json file
-[ "$CI" != "" ] || docker_tty="--tty"
-
-# The codebase and instructions for the following image can be found at https://gitlab.com/gitlab-org/codeclimate-rubocop/wikis/home
-docker pull dev.gitlab.org:5005/gitlab/gitlab-build-images:gitlab-codeclimate-rubocop-0-52-1 > /dev/null
-docker tag dev.gitlab.org:5005/gitlab/gitlab-build-images:gitlab-codeclimate-rubocop-0-52-1 codeclimate/codeclimate-rubocop:gitlab-codeclimate-rubocop-0-52-1 > /dev/null
-
-exec docker run --rm $docker_tty --env CODECLIMATE_CODE="$code_path" \
- --volume "$code_path":/code \
- --volume /var/run/docker.sock:/var/run/docker.sock \
- --volume /tmp/cc:/tmp/cc \
- "codeclimate/codeclimate:${CODECLIMATE_VERSION:-0.71.1}" "$@"
diff --git a/scripts/create_mysql_user.sh b/scripts/create_mysql_user.sh
index 286b1325f1d..35f68c581f3 100644
--- a/scripts/create_mysql_user.sh
+++ b/scripts/create_mysql_user.sh
@@ -1,7 +1,6 @@
#!/bin/bash
mysql --user=root --host=mysql <<EOF
-CREATE DATABASE IF NOT EXISTS gitlabhq_test DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci;
CREATE USER IF NOT EXISTS 'gitlab'@'%';
GRANT ALL PRIVILEGES ON gitlabhq_test.* TO 'gitlab'@'%';
FLUSH PRIVILEGES;
diff --git a/scripts/create_postgres_user.sh b/scripts/create_postgres_user.sh
index 8a744df3226..8a049bcd7fb 100644
--- a/scripts/create_postgres_user.sh
+++ b/scripts/create_postgres_user.sh
@@ -1,8 +1,6 @@
#!/bin/bash
psql -h postgres -U postgres postgres <<EOF
-DROP DATABASE IF EXISTS gitlabhq_test;
-CREATE DATABASE gitlabhq_test;
CREATE USER gitlab;
-GRANT ALL PRIVILEGES ON DATABASE gitlabhq_test TO gitlab;
+GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO gitlab;
EOF
diff --git a/scripts/frontend/extract_gettext_all.js b/scripts/frontend/extract_gettext_all.js
new file mode 100644
index 00000000000..725522a3540
--- /dev/null
+++ b/scripts/frontend/extract_gettext_all.js
@@ -0,0 +1,72 @@
+const argumentsParser = require('commander');
+
+const { GettextExtractor, JsExtractors } = require('gettext-extractor');
+const {
+ decorateJSParserWithVueSupport,
+ decorateExtractorWithHelpers,
+} = require('gettext-extractor-vue');
+const ensureSingleLine = require('../../app/assets/javascripts/locale/ensure_single_line.js');
+
+const args = argumentsParser
+ .option('-f, --file <file>', 'Extract message from one single file')
+ .option('-a, --all', 'Extract message from all js/vue files')
+ .parse(process.argv);
+
+const extractor = decorateExtractorWithHelpers(new GettextExtractor());
+
+extractor.addMessageTransformFunction(ensureSingleLine);
+
+const jsParser = extractor.createJsParser([
+ // Place all the possible expressions to extract here:
+ JsExtractors.callExpression('__', {
+ arguments: {
+ text: 0,
+ },
+ }),
+ JsExtractors.callExpression('n__', {
+ arguments: {
+ text: 0,
+ textPlural: 1,
+ },
+ }),
+ JsExtractors.callExpression('s__', {
+ arguments: {
+ text: 0,
+ },
+ }),
+]);
+
+const vueParser = decorateJSParserWithVueSupport(jsParser);
+
+function printJson() {
+ const messages = extractor.getMessages().reduce((result, message) => {
+ let text = message.text;
+ if (message.textPlural) {
+ text += `\u0000${message.textPlural}`;
+ }
+
+ message.references.forEach(reference => {
+ const filename = reference.replace(/:\d+$/, '');
+
+ if (!Array.isArray(result[filename])) {
+ result[filename] = [];
+ }
+
+ result[filename].push([text, reference]);
+ });
+
+ return result;
+ }, {});
+
+ console.log(JSON.stringify(messages));
+}
+
+if (args.file) {
+ vueParser.parseFile(args.file).then(() => printJson());
+} else if (args.all) {
+ vueParser.parseFilesGlob('{ee/app,app}/assets/javascripts/**/*.{js,vue}').then(() => printJson());
+} else {
+ console.warn('ERROR: Please use the script correctly:');
+ args.outputHelp();
+ process.exit(1);
+}
diff --git a/scripts/frontend/frontend_script_utils.js b/scripts/frontend/frontend_script_utils.js
index 2c06747255c..e42b912d359 100644
--- a/scripts/frontend/frontend_script_utils.js
+++ b/scripts/frontend/frontend_script_utils.js
@@ -1,4 +1,3 @@
-/* eslint import/no-commonjs: "off" */
const execFileSync = require('child_process').execFileSync;
const exec = (command, args) => {
@@ -18,12 +17,7 @@ const execGitCmd = args =>
module.exports = {
getStagedFiles: fileExtensionFilter => {
- const gitOptions = [
- 'diff',
- '--name-only',
- '--cached',
- '--diff-filter=ACMRTUB',
- ];
+ const gitOptions = ['diff', '--name-only', '--cached', '--diff-filter=ACMRTUB'];
if (fileExtensionFilter) gitOptions.push(...fileExtensionFilter);
return execGitCmd(gitOptions);
},
diff --git a/scripts/frontend/postinstall.js b/scripts/frontend/postinstall.js
new file mode 100644
index 00000000000..682039a41b3
--- /dev/null
+++ b/scripts/frontend/postinstall.js
@@ -0,0 +1,22 @@
+const chalk = require('chalk');
+
+// check that fsevents is available if we're on macOS
+if (process.platform === 'darwin') {
+ try {
+ require.resolve('fsevents');
+ } catch (e) {
+ console.error(`${chalk.red('error')} Dependency postinstall check failed.`);
+ console.error(
+ chalk.red(`
+ The fsevents driver is not installed properly.
+ If you are running a new version of Node, please
+ ensure that it is supported by the fsevents library.
+
+ You can try installing again with \`${chalk.cyan('yarn install --force')}\`
+ `)
+ );
+ process.exit(1);
+ }
+}
+
+console.log(`${chalk.green('success')} Dependency postinstall check passed.`);
diff --git a/scripts/frontend/prettier.js b/scripts/frontend/prettier.js
index 863572bf64d..b66ba885701 100644
--- a/scripts/frontend/prettier.js
+++ b/scripts/frontend/prettier.js
@@ -1,16 +1,23 @@
-/* eslint import/no-commonjs: "off", import/no-extraneous-dependencies: "off", no-console: "off" */
const glob = require('glob');
const prettier = require('prettier');
const fs = require('fs');
+const path = require('path');
+const prettierIgnore = require('ignore')();
const getStagedFiles = require('./frontend_script_utils').getStagedFiles;
const mode = process.argv[2] || 'check';
const shouldSave = mode === 'save' || mode === 'save-all';
const allFiles = mode === 'check-all' || mode === 'save-all';
+let dirPath = process.argv[3] || '';
+if (dirPath && dirPath.charAt(dirPath.length - 1) !== '/') dirPath += '/';
const config = {
patterns: ['**/*.js', '**/*.vue', '**/*.scss'],
+ /*
+ * The ignore patterns below are just to reduce search time with glob, as it includes the
+ * folders with the most ignored assets, the actual `.prettierignore` will be used later on
+ */
ignore: ['**/node_modules/**', '**/vendor/**', '**/public/**'],
parsers: {
js: 'babylon',
@@ -18,18 +25,31 @@ const config = {
scss: 'css',
},
};
+
+/*
+ * Unfortunately the prettier API does not expose support for `.prettierignore` files, they however
+ * use the ignore package, so we do the same. We simply cannot use the glob package, because
+ * gitignore style is not compatible with globs ignore style.
+ */
+prettierIgnore.add(
+ fs
+ .readFileSync(path.join(__dirname, '../../', '.prettierignore'))
+ .toString()
+ .trim()
+ .split(/\r?\n/)
+);
+
const availableExtensions = Object.keys(config.parsers);
-console.log(`Loading ${allFiles ? 'All' : 'Staged'} Files ...`);
+console.log(`Loading ${allFiles ? 'All' : 'Selected'} Files ...`);
-const stagedFiles = allFiles
- ? null
- : getStagedFiles(availableExtensions.map(ext => `*.${ext}`));
+const stagedFiles =
+ allFiles || dirPath ? null : getStagedFiles(availableExtensions.map(ext => `*.${ext}`));
if (stagedFiles) {
if (!stagedFiles.length || (stagedFiles.length === 1 && !stagedFiles[0])) {
console.log('No matching staged files.');
- return;
+ process.exit(1);
}
console.log(`Matching staged Files : ${stagedFiles.length}`);
}
@@ -41,30 +61,33 @@ let files;
if (allFiles) {
const ignore = config.ignore;
const patterns = config.patterns;
- const globPattern =
- patterns.length > 1 ? `{${patterns.join(',')}}` : `${patterns.join(',')}`;
- files = glob
- .sync(globPattern, { ignore })
- .filter(f => allFiles || stagedFiles.includes(f));
+ const globPattern = patterns.length > 1 ? `{${patterns.join(',')}}` : `${patterns.join(',')}`;
+ files = glob.sync(globPattern, { ignore }).filter(f => allFiles || stagedFiles.includes(f));
+} else if (dirPath) {
+ const ignore = config.ignore;
+ const patterns = config.patterns.map(item => {
+ return dirPath + item;
+ });
+ const globPattern = patterns.length > 1 ? `{${patterns.join(',')}}` : `${patterns.join(',')}`;
+ files = glob.sync(globPattern, { ignore });
} else {
- files = stagedFiles.filter(f =>
- availableExtensions.includes(f.split('.').pop()),
- );
+ files = stagedFiles.filter(f => availableExtensions.includes(f.split('.').pop()));
}
+files = prettierIgnore.filter(files);
+
if (!files.length) {
console.log('No Files found to process with Prettier');
- return;
+ process.exit(1);
}
console.log(`${shouldSave ? 'Updating' : 'Checking'} ${files.length} file(s)`);
-prettier
- .resolveConfig('.')
- .then(options => {
- console.log('Found options : ', options);
- files.forEach(file => {
- try {
+files.forEach(file => {
+ try {
+ prettier
+ .resolveConfig(file)
+ .then(options => {
const fileExtension = file.split('.').pop();
Object.assign(options, {
parser: config.parsers[fileExtension],
@@ -81,23 +104,23 @@ prettier
} else if (!prettier.check(input, options)) {
if (!didWarn) {
console.log(
- '\n===============================\nGitLab uses Prettier to format all JavaScript code.\nPlease format each file listed below or run "yarn prettier-staged-save"\n===============================\n',
+ '\n===============================\nGitLab uses Prettier to format all JavaScript code.\nPlease format each file listed below or run "yarn prettier-staged-save"\n===============================\n'
);
didWarn = true;
}
console.log(`Prettify Manually : ${file}`);
}
- } catch (error) {
- didError = true;
- console.log(`\n\nError with ${file}: ${error.message}`);
- }
- });
-
- if (didWarn || didError) {
- process.exit(1);
- }
- })
- .catch(e => {
- console.log(`Error on loading the Config File: ${e.message}`);
- process.exit(1);
- });
+ })
+ .catch(e => {
+ console.log(`Error on loading the Config File: ${e.message}`);
+ process.exit(1);
+ });
+ } catch (error) {
+ didError = true;
+ console.log(`\n\nError with ${file}: ${error.message}`);
+ }
+});
+
+if (didWarn || didError) {
+ process.exit(1);
+}
diff --git a/scripts/gitaly-test-build b/scripts/gitaly-test-build
index b42ae2a2595..374401caf89 100755
--- a/scripts/gitaly-test-build
+++ b/scripts/gitaly-test-build
@@ -2,28 +2,29 @@
require 'fileutils'
+require_relative 'gitaly_test'
+
# This script assumes tmp/tests/gitaly already contains the correct
# Gitaly version. We just have to compile it and run its 'bundle
-# install'. We have this separate script for that because weird things
-# were happening in CI when we have a 'bundle exec' process that later
-# called 'bundle install' using a different Gemfile, as happens with
-# gitlab-ce and gitaly.
+# install'. We have this separate script for that to avoid bundle
+# poisoning in CI. This script should only be run in CI.
+class GitalyTestBuild
+ include GitalyTest
-tmp_tests_gitaly_dir = File.expand_path('../tmp/tests/gitaly', __dir__)
+ def run
+ abort 'gitaly build failed' unless system(env, 'make', chdir: tmp_tests_gitaly_dir)
-# Use the top-level bundle vendor folder so that we don't reinstall gems twice
-bundle_vendor_path = File.expand_path('../vendor', __dir__)
+ check_gitaly_config!
-env = {
- # This ensure the `clean` config set in `scripts/prepare_build.sh` isn't taken into account
- 'BUNDLE_IGNORE_CONFIG' => 'true',
- 'BUNDLE_GEMFILE' => File.join(tmp_tests_gitaly_dir, 'ruby', 'Gemfile'),
- 'BUNDLE_FLAGS' => "--jobs=4 --path=#{bundle_vendor_path} --retry=3"
-}
+ # Starting gitaly further validates its configuration
+ pid = start_gitaly
+ Process.kill('TERM', pid)
-abort 'gitaly build failed' unless system(env, 'make', chdir: tmp_tests_gitaly_dir)
+ # Make the 'gitaly' executable look newer than 'GITALY_SERVER_VERSION'.
+ # Without this a gitaly executable created in the setup-test-env job
+ # will look stale compared to GITALY_SERVER_VERSION.
+ FileUtils.touch(File.join(tmp_tests_gitaly_dir, 'gitaly'), mtime: Time.now + (1 << 24))
+ end
+end
-# Make the 'gitaly' executable look newer than 'GITALY_SERVER_VERSION'.
-# Without this a gitaly executable created in the setup-test-env job
-# will look stale compared to GITALY_SERVER_VERSION.
-FileUtils.touch(File.join(tmp_tests_gitaly_dir, 'gitaly'), mtime: Time.now + (1 << 24))
+GitalyTestBuild.new.run
diff --git a/scripts/gitaly-test-spawn b/scripts/gitaly-test-spawn
index ecb68c6acc6..e9f91f75650 100755
--- a/scripts/gitaly-test-spawn
+++ b/scripts/gitaly-test-spawn
@@ -1,9 +1,23 @@
#!/usr/bin/env ruby
-gitaly_dir = 'tmp/tests/gitaly'
-env = { 'HOME' => File.expand_path('tmp/tests'),
- 'GEM_PATH' => Gem.path.join(':') }
-args = %W[#{gitaly_dir}/gitaly #{gitaly_dir}/config.toml]
+# This script is used both in CI and in local development 'rspec' runs.
-# Print the PID of the spawned process
-puts spawn(env, *args, [:out, :err] => 'log/gitaly-test.log')
+require_relative 'gitaly_test'
+
+class GitalyTestSpawn
+ include GitalyTest
+
+ def run
+ check_gitaly_config!
+
+ # # Uncomment line below to see all gitaly logs merged into CI trace
+ # spawn('sleep 1; tail -f log/gitaly-test.log')
+
+ pid = start_gitaly
+
+ # In local development this pid file is used by rspec.
+ IO.write(File.expand_path('../tmp/tests/gitaly.pid', __dir__), pid)
+ end
+end
+
+GitalyTestSpawn.new.run
diff --git a/scripts/gitaly_test.rb b/scripts/gitaly_test.rb
new file mode 100644
index 00000000000..dee4c2eba7e
--- /dev/null
+++ b/scripts/gitaly_test.rb
@@ -0,0 +1,97 @@
+# This file contains environment settings for gitaly when it's running
+# as part of the gitlab-ce/ee test suite.
+#
+# Please be careful when modifying this file. Your changes must work
+# both for local development rspec runs, and in CI.
+
+require 'socket'
+
+module GitalyTest
+ def tmp_tests_gitaly_dir
+ File.expand_path('../tmp/tests/gitaly', __dir__)
+ end
+
+ def gemfile
+ File.join(tmp_tests_gitaly_dir, 'ruby', 'Gemfile')
+ end
+
+ def env
+ env_hash = {
+ 'HOME' => File.expand_path('tmp/tests'),
+ 'GEM_PATH' => Gem.path.join(':'),
+ 'BUNDLE_APP_CONFIG' => File.join(File.dirname(gemfile), '.bundle/config'),
+ 'BUNDLE_FLAGS' => "--jobs=4 --retry=3",
+ 'BUNDLE_INSTALL_FLAGS' => nil,
+ 'BUNDLE_GEMFILE' => gemfile,
+ 'RUBYOPT' => nil
+ }
+
+ if ENV['CI']
+ bundle_path = File.expand_path('../vendor/gitaly-ruby', __dir__)
+ env_hash['BUNDLE_FLAGS'] << " --path=#{bundle_path}"
+ end
+
+ env_hash
+ end
+
+ def config_path
+ File.join(tmp_tests_gitaly_dir, 'config.toml')
+ end
+
+ def start_gitaly
+ args = %W[#{tmp_tests_gitaly_dir}/gitaly #{config_path}]
+ pid = spawn(env, *args, [:out, :err] => 'log/gitaly-test.log')
+
+ begin
+ try_connect!
+ rescue
+ Process.kill('TERM', pid)
+ raise
+ end
+
+ pid
+ end
+
+ def check_gitaly_config!
+ puts 'Checking gitaly-ruby bundle...'
+ abort 'bundle check failed' unless system(env, 'bundle', 'check', chdir: File.dirname(gemfile))
+ end
+
+ def read_socket_path
+ # This code needs to work in an environment where we cannot use bundler,
+ # so we cannot easily use the toml-rb gem. This ad-hoc parser should be
+ # good enough.
+ config_text = IO.read(config_path)
+
+ config_text.lines.each do |line|
+ match_data = line.match(/^\s*socket_path\s*=\s*"([^"]*)"$/)
+
+ return match_data[1] if match_data
+ end
+
+ raise "failed to find socket_path in #{config_path}"
+ end
+
+ def try_connect!
+ print "Trying to connect to gitaly: "
+ timeout = 20
+ delay = 0.1
+ socket = read_socket_path
+
+ Integer(timeout / delay).times do
+ begin
+ UNIXSocket.new(socket)
+ puts ' OK'
+
+ return
+ rescue Errno::ENOENT, Errno::ECONNREFUSED
+ print '.'
+ sleep delay
+ end
+ end
+
+ puts ' FAILED'
+
+ raise "could not connect to #{socket}"
+ end
+end
diff --git a/scripts/lint-doc.sh b/scripts/lint-doc.sh
index e5242fee32b..848364b4a9b 100755
--- a/scripts/lint-doc.sh
+++ b/scripts/lint-doc.sh
@@ -3,9 +3,9 @@
cd "$(dirname "$0")/.."
# Use long options (e.g. --header instead of -H) for curl examples in documentation.
-echo 'Checking for curl short options...'
+echo '=> Checking for cURL short options...'
grep --extended-regexp --recursive --color=auto 'curl (.+ )?-[^- ].*' doc/ >/dev/null 2>&1
-if [ $? == 0 ]
+if [ $? -eq 0 ]
then
echo '✖ ERROR: Short options for curl should not be used in documentation!
Use long options (e.g., --header instead of -H):' >&2
@@ -15,7 +15,7 @@ fi
# Ensure that the CHANGELOG.md does not contain duplicate versions
DUPLICATE_CHANGELOG_VERSIONS=$(grep --extended-regexp '^## .+' CHANGELOG.md | sed -E 's| \(.+\)||' | sort -r | uniq -d)
-echo 'Checking for CHANGELOG.md duplicate entries...'
+echo '=> Checking for CHANGELOG.md duplicate entries...'
if [ "${DUPLICATE_CHANGELOG_VERSIONS}" != "" ]
then
echo '✖ ERROR: Duplicate versions in CHANGELOG.md:' >&2
@@ -25,7 +25,7 @@ fi
# Make sure no files in doc/ are executable
EXEC_PERM_COUNT=$(find doc/ app/ -type f -perm 755 | wc -l)
-echo 'Checking for executable permissions...'
+echo '=> Checking for executable permissions...'
if [ "${EXEC_PERM_COUNT}" -ne 0 ]
then
echo '✖ ERROR: Executable permissions should not be used in documentation! Use `chmod 644` to the files in question:' >&2
@@ -33,5 +33,33 @@ then
exit 1
fi
+# Do not use 'README.md', instead use 'index.md'
+# Number of 'README.md's as of 2018-03-26
+NUMBER_READMES_CE=42
+NUMBER_READMES_EE=46
+FIND_READMES=$(find doc/ -name "README.md" | wc -l)
+echo '=> Checking for new README.md files...'
+if [ "${CI_PROJECT_NAME}" == 'gitlab-ce' ]
+then
+ if [ ${FIND_READMES} -ne ${NUMBER_READMES_CE} ]
+ then
+ echo
+ echo ' ✖ ERROR: New README.md file(s) detected, prefer index.md over README.md.' >&2
+ echo ' https://docs.gitlab.com/ee/development/writing_documentation.html#location-and-naming-documents'
+ echo
+ exit 1
+ fi
+elif [ "${CI_PROJECT_NAME}" == 'gitlab-ee' ]
+then
+ if [ ${FIND_READMES} -ne $NUMBER_READMES_EE ]
+ then
+ echo
+ echo ' ✖ ERROR: New README.md file(s) detected, prefer index.md over README.md.' >&2
+ echo ' https://docs.gitlab.com/ee/development/writing_documentation.html#location-and-naming-documents'
+ echo
+ exit 1
+ fi
+fi
+
echo "✔ Linting passed"
exit 0
diff --git a/scripts/lint-rugged b/scripts/lint-rugged
index cabd083e9f9..d0c2c544c47 100755
--- a/scripts/lint-rugged
+++ b/scripts/lint-rugged
@@ -14,7 +14,10 @@ ALLOWED = [
'lib/tasks/gitlab/cleanup.rake',
# The only place where Rugged code is still allowed in production
- 'lib/gitlab/git/'
+ 'lib/gitlab/git/',
+
+ # Needed to avoid using the git binary to validate a branch name
+ 'lib/gitlab/git_ref_validator.rb'
].freeze
rugged_lines = IO.popen(%w[git grep -i -n rugged -- app config lib], &:read).lines
diff --git a/scripts/no-ee-check b/scripts/no-ee-check
new file mode 100755
index 00000000000..29d319dc822
--- /dev/null
+++ b/scripts/no-ee-check
@@ -0,0 +1,7 @@
+#!/usr/bin/env ruby
+ee_path = File.join(File.expand_path(__dir__), '../ee')
+
+if Dir.exist?(ee_path)
+ puts 'The repository contains /ee directory. There should be no /ee directory in CE repo.'
+ exit 1
+end
diff --git a/scripts/prepare_build.sh b/scripts/prepare_build.sh
index 206d62dbc78..75a3cea0448 100644
--- a/scripts/prepare_build.sh
+++ b/scripts/prepare_build.sh
@@ -11,7 +11,7 @@ fi
# Only install knapsack after bundle install! Otherwise oddly some native
# gems could not be found under some circumstance. No idea why, hours wasted.
-retry gem install knapsack
+retry gem install knapsack --no-ri --no-rdoc
cp config/gitlab.yml.example config/gitlab.yml
sed -i 's/bin_path: \/usr\/bin\/git/bin_path: \/usr\/local\/bin\/git/' config/gitlab.yml
@@ -49,20 +49,8 @@ sed -i 's/localhost/redis/g' config/redis.queues.yml
cp config/redis.shared_state.yml.example config/redis.shared_state.yml
sed -i 's/localhost/redis/g' config/redis.shared_state.yml
-# Some tasks (e.g. db:seed_fu) need to have a properly-configured database
-# user but not necessarily a full schema loaded
-if [ "$CREATE_DB_USER" != "false" ]; then
- if [ "$GITLAB_DATABASE" = 'postgresql' ]; then
- . scripts/create_postgres_user.sh
- else
- . scripts/create_mysql_user.sh
- fi
-fi
-
if [ "$SETUP_DB" != "false" ]; then
- bundle exec rake db:drop db:create db:schema:load db:migrate
-
- if [ "$GITLAB_DATABASE" = "mysql" ]; then
- bundle exec rake add_limits_mysql
- fi
+ setup_db
+elif getent hosts postgres || getent hosts mysql; then
+ setup_db_user_only
fi
diff --git a/scripts/prune-old-flaky-specs b/scripts/prune-old-flaky-specs
new file mode 100755
index 00000000000..a00a334fd6e
--- /dev/null
+++ b/scripts/prune-old-flaky-specs
@@ -0,0 +1,28 @@
+#!/usr/bin/env ruby
+
+# lib/rspec_flaky/flaky_examples_collection.rb is requiring
+# `active_support/hash_with_indifferent_access`, and we install the `activesupport`
+# gem manually on the CI
+require 'rubygems'
+
+# In newer Ruby, alias_method is not private then we don't need __send__
+singleton_class.__send__(:alias_method, :require_dependency, :require) # rubocop:disable GitlabSecurity/PublicSend
+$:.unshift(File.expand_path('../lib', __dir__))
+
+require 'rspec_flaky/report'
+
+report_file = ARGV.shift
+unless report_file
+ puts 'usage: prune-old-flaky-specs <report-file> <new-report-file>'
+ exit 1
+end
+
+new_report_file = ARGV.shift || report_file
+report = RspecFlaky::Report.load(report_file)
+puts "Loading #{report_file}..."
+puts "Current report has #{report.size} entries."
+
+new_report = report.prune_outdated
+
+puts "New report has #{new_report.size} entries: #{report.size - new_report.size} entries older than 90 days were removed."
+puts "Saved #{new_report_file}." if new_report.write(new_report_file)
diff --git a/scripts/rails5-gemfile-lock-check b/scripts/rails5-gemfile-lock-check
new file mode 100755
index 00000000000..da6f1b7145e
--- /dev/null
+++ b/scripts/rails5-gemfile-lock-check
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+
+echo -e "=> Checking if Gemfile.rails5.lock is up-to-date...\\n"
+
+cp Gemfile.rails5.lock Gemfile.rails5.lock.orig
+BUNDLE_GEMFILE=Gemfile.rails5 bundle install "$BUNDLE_INSTALL_FLAGS"
+diff -u Gemfile.rails5.lock.orig Gemfile.rails5.lock >/dev/null 2>&1
+
+if [ $? == 1 ]
+then
+ diff -u Gemfile.rails5.lock.orig Gemfile.rails5.lock
+
+ echo -e "\\n✖ ERROR: Gemfile.rails5.lock is not up-to-date!
+ Please run 'BUNDLE_GEMFILE=Gemfile.rails5 bundle install'\\n" >&2
+ exit 1
+fi
+
+echo "✔ Gemfile.rails5.lock is up-to-date"
+exit 0
diff --git a/scripts/review_apps/automated_cleanup.rb b/scripts/review_apps/automated_cleanup.rb
new file mode 100755
index 00000000000..ea53f89c844
--- /dev/null
+++ b/scripts/review_apps/automated_cleanup.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'gitlab'
+require_relative File.expand_path('../../lib/quality/helm_client.rb', __dir__)
+require_relative File.expand_path('../../lib/quality/kubernetes_client.rb', __dir__)
+
+class AutomatedCleanup
+ attr_reader :project_path, :gitlab_token, :cleaned_up_releases
+
+ def initialize(project_path: ENV['CI_PROJECT_PATH'], gitlab_token: ENV['GITLAB_BOT_REVIEW_APPS_CLEANUP_TOKEN'])
+ @project_path = project_path
+ @gitlab_token = gitlab_token
+ @cleaned_up_releases = []
+ end
+
+ def gitlab
+ @gitlab ||= begin
+ Gitlab.configure do |config|
+ config.endpoint = 'https://gitlab.com/api/v4'
+ # gitlab-bot's token "GitLab review apps cleanup"
+ config.private_token = gitlab_token
+ end
+
+ Gitlab
+ end
+ end
+
+ def helm
+ @helm ||= Quality::HelmClient.new
+ end
+
+ def kubernetes
+ @kubernetes ||= Quality::KubernetesClient.new
+ end
+
+ def perform_gitlab_environment_cleanup!(days_for_stop:, days_for_delete:)
+ puts "Checking for review apps not updated in the last #{days_for_stop} days..."
+
+ checked_environments = []
+ delete_threshold = threshold_time(days: days_for_delete)
+ stop_threshold = threshold_time(days: days_for_stop)
+ gitlab.deployments(project_path, per_page: 50).auto_paginate do |deployment|
+ next unless deployment.environment.name.start_with?('review/')
+ next if checked_environments.include?(deployment.environment.slug)
+
+ puts
+
+ checked_environments << deployment.environment.slug
+ deployed_at = Time.parse(deployment.created_at)
+
+ if deployed_at < delete_threshold
+ print_release_state(subject: 'Review app', release_name: deployment.environment.slug, release_date: deployment.created_at, action: 'deleting')
+ gitlab.delete_environment(project_path, deployment.environment.id)
+ cleaned_up_releases << deployment.environment.slug
+ elsif deployed_at < stop_threshold
+ print_release_state(subject: 'Review app', release_name: deployment.environment.slug, release_date: deployment.created_at, action: 'stopping')
+ gitlab.stop_environment(project_path, deployment.environment.id)
+ cleaned_up_releases << deployment.environment.slug
+ else
+ print_release_state(subject: 'Review app', release_name: deployment.environment.slug, release_date: deployment.created_at, action: 'leaving')
+ end
+ end
+ end
+
+ def perform_helm_releases_cleanup!(days:)
+ puts "Checking for Helm releases not updated in the last #{days} days..."
+
+ threshold_day = threshold_time(days: days)
+ helm.releases(args: ['--deployed', '--failed', '--date', '--reverse', '--max 25']).each do |release|
+ next if cleaned_up_releases.include?(release.name)
+
+ if release.last_update < threshold_day
+ print_release_state(subject: 'Release', release_name: release.name, release_date: release.last_update, action: 'cleaning')
+ helm.delete(release_name: release.name)
+ kubernetes.cleanup(release_name: release.name)
+ else
+ print_release_state(subject: 'Release', release_name: release.name, release_date: release.last_update, action: 'leaving')
+ end
+ end
+ end
+
+ def threshold_time(days:)
+ Time.now - days * 24 * 3600
+ end
+
+ def print_release_state(subject:, release_name:, release_date:, action:)
+ puts "\n#{subject} '#{release_name}' was last deployed on #{release_date}: #{action} it."
+ end
+end
+
+def timed(task)
+ start = Time.now
+ yield(self)
+ puts "#{task} finished in #{Time.now - start} seconds.\n"
+end
+
+automated_cleanup = AutomatedCleanup.new
+
+timed('Review apps cleanup') do
+ automated_cleanup.perform_gitlab_environment_cleanup!(days_for_stop: 5, days_for_delete: 6)
+end
+
+puts
+
+timed('Helm releases cleanup') do
+ automated_cleanup.perform_helm_releases_cleanup!(days: 7)
+end
+
+exit(0)
diff --git a/scripts/review_apps/review-apps.sh b/scripts/review_apps/review-apps.sh
new file mode 100755
index 00000000000..78293464265
--- /dev/null
+++ b/scripts/review_apps/review-apps.sh
@@ -0,0 +1,184 @@
+[[ "$TRACE" ]] && set -x
+export TILLER_NAMESPACE="$KUBE_NAMESPACE"
+
+function check_kube_domain() {
+ if [ -z ${REVIEW_APPS_DOMAIN+x} ]; then
+ echo "In order to deploy or use Review Apps, REVIEW_APPS_DOMAIN variable must be set"
+ echo "You can do it in Auto DevOps project settings or defining a variable at group or project level"
+ echo "You can also manually add it in .gitlab-ci.yml"
+ false
+ else
+ true
+ fi
+}
+
+function download_gitlab_chart() {
+ curl -o gitlab.tar.bz2 https://gitlab.com/charts/gitlab/-/archive/$GITLAB_HELM_CHART_REF/gitlab-$GITLAB_HELM_CHART_REF.tar.bz2
+ tar -xjf gitlab.tar.bz2
+ cd gitlab-$GITLAB_HELM_CHART_REF
+
+ helm init --client-only
+ helm repo add gitlab https://charts.gitlab.io
+ helm dependency update
+ helm dependency build
+}
+
+function ensure_namespace() {
+ kubectl describe namespace "$KUBE_NAMESPACE" || kubectl create namespace "$KUBE_NAMESPACE"
+}
+
+function install_tiller() {
+ echo "Checking Tiller..."
+ helm init --upgrade
+ kubectl rollout status -n "$TILLER_NAMESPACE" -w "deployment/tiller-deploy"
+ if ! helm version --debug; then
+ echo "Failed to init Tiller."
+ return 1
+ fi
+ echo ""
+}
+
+function create_secret() {
+ echo "Create secret..."
+
+ kubectl create secret generic -n "$KUBE_NAMESPACE" \
+ $CI_ENVIRONMENT_SLUG-gitlab-initial-root-password \
+ --from-literal=password=$REVIEW_APPS_ROOT_PASSWORD \
+ --dry-run -o json | kubectl apply -f -
+}
+
+function previousDeployFailed() {
+ set +e
+ echo "Checking for previous deployment of $CI_ENVIRONMENT_SLUG"
+ deployment_status=$(helm status $CI_ENVIRONMENT_SLUG >/dev/null 2>&1)
+ status=$?
+ # if `status` is `0`, deployment exists, has a status
+ if [ $status -eq 0 ]; then
+ echo "Previous deployment found, checking status"
+ deployment_status=$(helm status $CI_ENVIRONMENT_SLUG | grep ^STATUS | cut -d' ' -f2)
+ echo "Previous deployment state: $deployment_status"
+ if [[ "$deployment_status" == "FAILED" || "$deployment_status" == "PENDING_UPGRADE" || "$deployment_status" == "PENDING_INSTALL" ]]; then
+ status=0;
+ else
+ status=1;
+ fi
+ else
+ echo "Previous deployment NOT found."
+ fi
+ set -e
+ return $status
+}
+
+function deploy() {
+ track="${1-stable}"
+ name="$CI_ENVIRONMENT_SLUG"
+
+ if [[ "$track" != "stable" ]]; then
+ name="$name-$track"
+ fi
+
+ replicas="1"
+ service_enabled="false"
+ postgres_enabled="$POSTGRES_ENABLED"
+ gitlab_migrations_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-rails-ce"
+ gitlab_sidekiq_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-sidekiq-ce"
+ gitlab_unicorn_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-unicorn-ce"
+ gitlab_gitaly_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitaly"
+ gitlab_shell_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-shell"
+ gitlab_workhorse_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-workhorse-ce"
+
+ if [[ "$CI_PROJECT_NAME" == "gitlab-ee" ]]; then
+ gitlab_migrations_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-rails-ee"
+ gitlab_sidekiq_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-sidekiq-ee"
+ gitlab_unicorn_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-unicorn-ee"
+ gitlab_workhorse_image_repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-workhorse-ee"
+ fi
+
+ # canary uses stable db
+ [[ "$track" == "canary" ]] && postgres_enabled="false"
+
+ env_track=$( echo $track | tr -s '[:lower:]' '[:upper:]' )
+ env_slug=$( echo ${CI_ENVIRONMENT_SLUG//-/_} | tr -s '[:lower:]' '[:upper:]' )
+
+ if [[ "$track" == "stable" ]]; then
+ # for stable track get number of replicas from `PRODUCTION_REPLICAS`
+ eval new_replicas=\$${env_slug}_REPLICAS
+ service_enabled="true"
+ else
+ # for all tracks get number of replicas from `CANARY_PRODUCTION_REPLICAS`
+ eval new_replicas=\$${env_track}_${env_slug}_REPLICAS
+ fi
+ if [[ -n "$new_replicas" ]]; then
+ replicas="$new_replicas"
+ fi
+
+ # Cleanup and previous installs, as FAILED and PENDING_UPGRADE will cause errors with `upgrade`
+ if [ "$CI_ENVIRONMENT_SLUG" != "production" ] && previousDeployFailed ; then
+ echo "Deployment in bad state, cleaning up $CI_ENVIRONMENT_SLUG"
+ delete
+ cleanup
+ fi
+ helm repo add gitlab https://charts.gitlab.io/
+ helm dep update .
+
+HELM_CMD=$(cat << EOF
+ helm upgrade --install \
+ --wait \
+ --timeout 600 \
+ --set releaseOverride="$CI_ENVIRONMENT_SLUG" \
+ --set global.hosts.hostSuffix="$HOST_SUFFIX" \
+ --set global.hosts.domain="$REVIEW_APPS_DOMAIN" \
+ --set certmanager.install=false \
+ --set global.ingress.configureCertmanager=false \
+ --set global.ingress.tls.secretName=tls-cert \
+ --set global.ingress.annotations."external-dns\.alpha\.kubernetes\.io/ttl"="10"
+ --set gitlab.unicorn.resources.requests.cpu=200m \
+ --set gitlab.sidekiq.resources.requests.cpu=100m \
+ --set gitlab.gitlab-shell.resources.requests.cpu=100m \
+ --set redis.resources.requests.cpu=100m \
+ --set minio.resources.requests.cpu=100m \
+ --set gitlab.migrations.image.repository="$gitlab_migrations_image_repository" \
+ --set gitlab.migrations.image.tag="$CI_COMMIT_REF_NAME" \
+ --set gitlab.sidekiq.image.repository="$gitlab_sidekiq_image_repository" \
+ --set gitlab.sidekiq.image.tag="$CI_COMMIT_REF_NAME" \
+ --set gitlab.unicorn.image.repository="$gitlab_unicorn_image_repository" \
+ --set gitlab.unicorn.image.tag="$CI_COMMIT_REF_NAME" \
+ --set gitlab.gitaly.image.repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitaly" \
+ --set gitlab.gitaly.image.tag="v$GITALY_VERSION" \
+ --set gitlab.gitlab-shell.image.repository="registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-shell" \
+ --set gitlab.gitlab-shell.image.tag="v$GITLAB_SHELL_VERSION" \
+ --set gitlab.unicorn.workhorse.image="$gitlab_workhorse_image_repository" \
+ --set gitlab.unicorn.workhorse.tag="$CI_COMMIT_REF_NAME" \
+ --namespace="$KUBE_NAMESPACE" \
+ --version="$CI_PIPELINE_ID-$CI_JOB_ID" \
+ "$name" \
+ .
+EOF
+)
+
+ echo "Deploying with:"
+ echo $HELM_CMD
+
+ eval $HELM_CMD
+}
+
+function delete() {
+ track="${1-stable}"
+ name="$CI_ENVIRONMENT_SLUG"
+
+ if [[ "$track" != "stable" ]]; then
+ name="$name-$track"
+ fi
+
+ echo "Deleting release '$name'..."
+ helm delete --purge "$name" || true
+}
+
+function cleanup() {
+ echo "Cleaning up $CI_ENVIRONMENT_SLUG..."
+ kubectl -n "$KUBE_NAMESPACE" get ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa 2>&1 \
+ | grep "$CI_ENVIRONMENT_SLUG" \
+ | awk '{print $1}' \
+ | xargs kubectl -n "$KUBE_NAMESPACE" delete \
+ || true
+}
diff --git a/scripts/schema_changed.sh b/scripts/schema_changed.sh
index 5de2b35571d..b5e510c2367 100644
--- a/scripts/schema_changed.sh
+++ b/scripts/schema_changed.sh
@@ -1,9 +1,14 @@
-function schema_changed() {
- if [[ ! -z `git diff --name-only -- db/schema.rb` ]]; then
- echo "db/schema.rb after rake db:migrate:reset is different from one in the repository"
+#!/bin/sh
+
+schema_changed() {
+ if [ ! -z "$(git diff --name-only -- db/schema.rb)" ]; then
+ printf "db/schema.rb after rake db:migrate:reset is different from one in the repository"
+ printf "The diff is as follows:\n"
+ diff=$(git diff -p --binary -- db/schema.rb)
+ printf "%s" "$diff"
exit 1
else
- echo "db/schema.rb after rake db:migrate:reset matches one in the repository"
+ printf "db/schema.rb after rake db:migrate:reset matches one in the repository"
fi
}
diff --git a/scripts/trigger-build b/scripts/trigger-build
new file mode 100755
index 00000000000..0b5fd5995dd
--- /dev/null
+++ b/scripts/trigger-build
@@ -0,0 +1,207 @@
+#!/usr/bin/env ruby
+
+require 'gitlab'
+
+#
+# Configure credentials to be used with gitlab gem
+#
+Gitlab.configure do |config|
+ config.endpoint = 'https://gitlab.com/api/v4'
+ config.private_token = ENV['GITLAB_QA_ACCESS_TOKEN'] # gitlab-qa bot access token
+end
+
+module Trigger
+ TOKEN = ENV['BUILD_TRIGGER_TOKEN']
+
+ def self.ee?
+ ENV['CI_PROJECT_NAME'] == 'gitlab-ee' || File.exist?('CHANGELOG-EE.md')
+ end
+
+ class Base
+ def initialize(api_token)
+ Gitlab.private_token = api_token
+ end
+
+ def invoke!(post_comment: false)
+ pipeline = Gitlab.run_trigger(
+ downstream_project_path,
+ Trigger::TOKEN,
+ ref,
+ variables)
+
+ puts "Triggered #{pipeline.web_url}"
+ puts "Waiting for downstream pipeline status"
+
+ begin
+ Trigger::CommitComment.post!(downstream_project_path, pipeline) if post_comment
+ rescue Gitlab::Error::Error => error
+ puts "Ignoring the following error: #{error}"
+ end
+ Trigger::Pipeline.new(downstream_project_path, pipeline.id)
+ end
+
+ private
+
+ # Must be overriden
+ def downstream_project_path
+ raise NotImplementedError
+ end
+
+ # Must be overriden
+ def ref
+ raise NotImplementedError
+ end
+
+ # Can be overriden
+ def extra_variables
+ {}
+ end
+
+ # Can be overriden
+ def version_param_value(version_file)
+ File.read(version_file).strip
+ end
+
+ def variables
+ base_variables.merge(extra_variables).merge(version_file_variables)
+ end
+
+ def base_variables
+ {
+ 'TRIGGERED_USER' => ENV['GITLAB_USER_NAME'],
+ 'TRIGGER_SOURCE' => ENV['CI_JOB_URL']
+ }
+ end
+
+ # Read version files from all components
+ def version_file_variables
+ Dir.glob("*_VERSION").each_with_object({}) do |version_file, params|
+ params[version_file] = version_param_value(version_file)
+ end
+ end
+ end
+
+ class Omnibus < Base
+ private
+
+ def downstream_project_path
+ 'gitlab-org/omnibus-gitlab'.freeze
+ end
+
+ def ref
+ ENV['OMNIBUS_BRANCH'] || 'master'
+ end
+
+ def extra_variables
+ {
+ 'GITLAB_VERSION' => ENV['CI_COMMIT_SHA'],
+ 'ALTERNATIVE_SOURCES' => 'true',
+ 'ee' => Trigger.ee? ? 'true' : 'false'
+ }
+ end
+ end
+
+ class CNG < Base
+ private
+
+ def downstream_project_path
+ ENV['CNG_PROJECT_PATH'] || 'gitlab-org/build/CNG-mirror'
+ end
+
+ def ref
+ ENV['CNG_BRANCH'] || 'master'
+ end
+
+ def extra_variables
+ edition = Trigger.ee? ? 'EE' : 'CE'
+
+ {
+ "GITLAB_#{edition}_VERSION" => ENV['CI_COMMIT_REF_NAME'],
+ "#{edition}_PIPELINE" => 'true'
+ }
+ end
+
+ def version_param_value(_version_file)
+ raw_version = super
+
+ # if the version matches semver format, treat it as a tag and prepend `v`
+ if raw_version =~ Regexp.compile(/^\d+\.\d+\.\d+(-rc\d+)?(-ee)?$/)
+ "v#{raw_version}"
+ else
+ raw_version
+ end
+ end
+ end
+
+ class CommitComment
+ def self.post!(downstream_project_path, downstream_pipeline)
+ Gitlab.create_commit_comment(
+ ENV['CI_PROJECT_PATH'],
+ ENV['CI_COMMIT_SHA'],
+ "The [`#{ENV['CI_JOB_NAME']}`](#{ENV['CI_JOB_URL']}) job from pipeline #{ENV['CI_PIPELINE_URL']} triggered #{downstream_pipeline.web_url} downstream.")
+ end
+ end
+
+ class Pipeline
+ INTERVAL = 60 # seconds
+ MAX_DURATION = 3600 * 3 # 3 hours
+
+ attr_reader :project, :id
+
+ def initialize(project, id)
+ @project = project
+ @id = id
+ @start = Time.now.to_i
+
+ # gitlab-bot's token "GitLab multi-project pipeline polling"
+ Gitlab.private_token = ENV['GITLAB_BOT_MULTI_PROJECT_PIPELINE_POLLING_TOKEN']
+ end
+
+ def wait!
+ loop do
+ raise "Pipeline timed out after waiting for #{duration} minutes!" if timeout?
+
+ case status
+ when :created, :pending, :running
+ print "."
+ sleep INTERVAL
+ when :success
+ puts "Pipeline succeeded in #{duration} minutes!"
+ break
+ else
+ raise "Pipeline did not succeed!"
+ end
+
+ STDOUT.flush
+ end
+ end
+
+ def timeout?
+ Time.now.to_i > (@start + MAX_DURATION)
+ end
+
+ def duration
+ (Time.now.to_i - @start) / 60
+ end
+
+ def status
+ Gitlab.pipeline(project, id).status.to_sym
+ rescue Gitlab::Error::Error => error
+ puts "Ignoring the following error: #{error}"
+ # Ignore GitLab API hiccups. If GitLab is really down, we'll hit the job
+ # timeout anyway.
+ :running
+ end
+ end
+end
+
+case ARGV[0]
+when 'omnibus'
+ Trigger::Omnibus.new(ENV['GITLAB_QA_ACCESS_TOKEN']).invoke!(post_comment: true).wait!
+when 'cng'
+ Trigger::CNG.new(ENV['GITLAB_QA_ACCESS_TOKEN']).invoke!.wait!
+else
+ puts "Please provide a valid option:
+ omnibus - Triggers a pipeline that builds the omnibus-gitlab package
+ cng - Triggers a pipeline that builds images used by the GitLab helm chart"
+end
diff --git a/scripts/trigger-build-docs b/scripts/trigger-build-docs
index c9aaba91aa0..dfc8ee6050a 100755
--- a/scripts/trigger-build-docs
+++ b/scripts/trigger-build-docs
@@ -6,8 +6,8 @@ require 'gitlab'
# Configure credentials to be used with gitlab gem
#
Gitlab.configure do |config|
- config.endpoint = 'https://gitlab.com/api/v4'
- config.private_token = ENV["DOCS_API_TOKEN"] # GitLab Docs bot access token with Developer access to gitlab-docs
+ config.endpoint = 'https://gitlab.com/api/v4'
+ config.private_token = ENV["DOCS_API_TOKEN"] # GitLab Docs bot access token with Developer access to gitlab-docs
end
#
@@ -16,18 +16,14 @@ end
GITLAB_DOCS_REPO = 'gitlab-com/gitlab-docs'.freeze
#
-# Truncate the remote docs branch name if it's more than 63 characters
-# otherwise we hit the filesystem limit and the directory name where
-# NGINX serves the site won't match the branch name.
+# Truncate the remote docs branch name otherwise we hit the filesystem
+# limit and the directory name where NGINX serves the site won't match
+# the branch name.
#
def docs_branch
# The maximum string length a file can have on a filesystem (ext4)
- # is 63 characters. Let's use something smaller to be 100% sure.
- max = 42
- # Prefix the remote branch with the slug of the project in order
- # to avoid name conflicts in the rare case the branch name already
- # exists in the docs repo and truncate to max length.
- "#{slug}-#{ENV["CI_COMMIT_REF_SLUG"]}"[0...max]
+ # is 63 characters. CI_ENVIRONMENT_SLUG is limited to 24 characters.
+ ENV["CI_ENVIRONMENT_SLUG"]
end
#
@@ -103,7 +99,7 @@ def trigger_pipeline
puts "=> Follow the status of the triggered pipeline:"
puts ""
- puts "https://gitlab.com/gitlab-com/gitlab-docs/pipelines/#{pipeline.id}"
+ puts pipeline.web_url
puts ""
puts "=> In a few minutes, you will be able to preview your changes under the following URL:"
puts ""
diff --git a/scripts/trigger-build-omnibus b/scripts/trigger-build-omnibus
deleted file mode 100755
index 85ea4aa74ac..00000000000
--- a/scripts/trigger-build-omnibus
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env ruby
-
-require 'net/http'
-require 'json'
-require 'cgi'
-
-module Omnibus
- PROJECT_PATH = 'gitlab-org/omnibus-gitlab'.freeze
-
- class Trigger
- TOKEN = ENV['BUILD_TRIGGER_TOKEN']
-
- def initialize
- @uri = URI("https://gitlab.com/api/v4/projects/#{CGI.escape(Omnibus::PROJECT_PATH)}/trigger/pipeline")
- @params = env_params.merge(file_params).merge(token: TOKEN)
- end
-
- def invoke!
- res = Net::HTTP.post_form(@uri, @params)
- id = JSON.parse(res.body)['id']
-
- if id
- puts "Triggered https://gitlab.com/#{Omnibus::PROJECT_PATH}/pipelines/#{id}"
- puts "Waiting for downstream pipeline status"
- else
- raise "Trigger failed! The response from the trigger is: #{res.body}"
- end
-
- Omnibus::Pipeline.new(id)
- end
-
- private
-
- def ee?
- File.exist?('CHANGELOG-EE.md')
- end
-
- def env_params
- {
- "ref" => ENV["OMNIBUS_BRANCH"] || "master",
- "variables[GITLAB_VERSION]" => ENV["CI_COMMIT_SHA"],
- "variables[ALTERNATIVE_SOURCES]" => true,
- "variables[ee]" => ee? ? 'true' : 'false',
- "variables[TRIGGERED_USER]" => ENV["GITLAB_USER_NAME"],
- "variables[TRIGGER_SOURCE]" => "https://gitlab.com/gitlab-org/#{ENV['CI_PROJECT_NAME']}/-/jobs/#{ENV['CI_JOB_ID']}"
- }
- end
-
- def file_params
- Hash.new.tap do |params|
- Dir.glob("*_VERSION").each do |version_file|
- params["variables[#{version_file}]"] = File.read(version_file).strip
- end
- end
- end
- end
-
- class Pipeline
- INTERVAL = 60 # seconds
- MAX_DURATION = 3600 * 3 # 3 hours
-
- def initialize(id)
- @start = Time.now.to_i
- @uri = URI("https://gitlab.com/api/v4/projects/#{CGI.escape(Omnibus::PROJECT_PATH)}/pipelines/#{id}")
- end
-
- def wait!
- loop do
- raise "Pipeline timed out after waiting for #{duration} minutes!" if timeout?
-
- case status
- when :created, :pending, :running
- print "."
- sleep INTERVAL
- when :success
- puts "Omnibus pipeline succeeded in #{duration} minutes!"
- break
- else
- raise "Omnibus pipeline did not succeed!"
- end
-
- STDOUT.flush
- end
- end
-
- def timeout?
- Time.now.to_i > (@start + MAX_DURATION)
- end
-
- def duration
- (Time.now.to_i - @start) / 60
- end
-
- def status
- req = Net::HTTP::Get.new(@uri)
- req['PRIVATE-TOKEN'] = ENV['GITLAB_QA_ACCESS_TOKEN']
-
- res = Net::HTTP.start(@uri.hostname, @uri.port, use_ssl: true) do |http|
- http.request(req)
- end
-
- JSON.parse(res.body)['status'].to_s.to_sym
- end
- end
-end
-
-Omnibus::Trigger.new.invoke!.wait!
diff --git a/scripts/utils.sh b/scripts/utils.sh
index 6faa701f0ce..2d2ba115563 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -12,3 +12,21 @@ retry() {
done
return 1
}
+
+setup_db_user_only() {
+ if [ "$GITLAB_DATABASE" = "postgresql" ]; then
+ . scripts/create_postgres_user.sh
+ else
+ . scripts/create_mysql_user.sh
+ fi
+}
+
+setup_db() {
+ setup_db_user_only
+
+ bundle exec rake db:drop db:create db:schema:load db:migrate
+
+ if [ "$GITLAB_DATABASE" = "mysql" ]; then
+ bundle exec rake add_limits_mysql
+ fi
+}