diff options
510 files changed, 7907 insertions, 4558 deletions
diff --git a/.codeclimate.yml b/.codeclimate.yml index dc8ac60fb44..ecac24b68d7 100644 --- a/.codeclimate.yml +++ b/.codeclimate.yml @@ -1,7 +1,5 @@ --- engines: - brakeman: - enabled: true bundler-audit: enabled: true duplication: diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c1d78ef2d48..b4afa953175 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -321,6 +321,7 @@ setup-test-env: expire_in: 7d paths: - tmp/tests + - config/secrets.yml rspec-pg 0 27: *rspec-metadata-pg rspec-pg 1 27: *rspec-metadata-pg @@ -737,8 +738,9 @@ cache gems: gitlab_git_test: <<: *dedicated-runner <<: *except-docs-and-qa - <<: *pull-cache variables: SETUP_DB: "false" + before_script: [] + cache: {} script: - spec/support/prepare-gitlab-git-test-for-commit --check-for-changes diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml index 442d61bcf4f..7a12c8473f3 100644 --- a/.rubocop_todo.yml +++ b/.rubocop_todo.yml @@ -704,7 +704,9 @@ Style/RedundantSelf: # Configuration parameters: EnforcedStyle, AllowInnerSlashes. # SupportedStyles: slashes, percent_r, mixed Style/RegexpLiteral: - Enabled: false + Enabled: true + EnforcedStyle: mixed + AllowInnerSlashes: false # Offense count: 36 # Cop supports --auto-correct. diff --git a/CHANGELOG.md b/CHANGELOG.md index 248c85304a9..5fc97c06f7c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,22 @@ documentation](doc/development/changelog.md) for instructions on adding your own entry. +## 10.4.2 (2018-01-30) + +### Fixed (6 changes) + +- Fix copy/paste on iOS devices due to a bug in webkit. !15804 +- Fix missing "allow users to request access" option in public project permissions. !16485 +- Fix encoding issue when counting commit count. !16637 +- Fixes destination already exists, and some particular service errors on Import/Export error. !16714 +- Fix cache clear bug withg using : on Windows. !16740 +- Use has_table_privilege for TRIGGER on PostgreSQL. + +### Changed (1 change) + +- Vendor Auto DevOps template with DAST security checks enabled. !16691 + + ## 10.4.1 (2018-01-24) ### Fixed (4 changes) diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION index b7c0622b4f4..4a7076db09a 100644 --- a/GITALY_SERVER_VERSION +++ b/GITALY_SERVER_VERSION @@ -1 +1 @@ -0.74.0 +0.77.0 @@ -132,7 +132,7 @@ gem 'asciidoctor-plantuml', '0.0.7' gem 'rouge', '~> 2.0' gem 'truncato', '~> 0.7.9' gem 'bootstrap_form', '~> 2.7.0' -gem 'nokogiri', '~> 1.8.1' +gem 'nokogiri', '~> 1.8.2' # Diffs gem 'diffy', '~> 3.1.0' @@ -325,7 +325,7 @@ group :development, :test do gem 'spinach-rerun-reporter', '~> 0.0.2' gem 'rspec_profiling', '~> 0.0.5' gem 'rspec-set', '~> 0.1.3' - gem 'rspec-parameterized' + gem 'rspec-parameterized', require: false # Prevent occasions where minitest is not bundled in packaged versions of ruby (see #3826) gem 'minitest', '~> 5.7.0' @@ -406,7 +406,7 @@ group :ed25519 do end # Gitaly GRPC client -gem 'gitaly-proto', '~> 0.78.0', require: 'gitaly' +gem 'gitaly-proto', '~> 0.83.0', require: 'gitaly' gem 'toml-rb', '~> 0.3.15', require: false diff --git a/Gemfile.lock b/Gemfile.lock index 5532888d179..2ddf8221a06 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -285,7 +285,7 @@ GEM po_to_json (>= 1.0.0) rails (>= 3.2.0) gherkin-ruby (0.3.2) - gitaly-proto (0.78.0) + gitaly-proto (0.83.0) google-protobuf (~> 3.1) grpc (~> 1.0) github-linguist (4.7.6) @@ -304,7 +304,7 @@ GEM mime-types (>= 1.16) posix-spawn (~> 0.3) gitlab-markup (1.6.3) - gitlab-styles (2.3.1) + gitlab-styles (2.3.2) rubocop (~> 0.51) rubocop-gitlab-security (~> 0.1.0) rubocop-rspec (~> 1.19) @@ -340,7 +340,7 @@ GEM mime-types (~> 3.0) representable (~> 3.0) retriable (>= 2.0, < 4.0) - google-protobuf (3.4.1.1) + google-protobuf (3.5.1.1) googleapis-common-protos-types (1.0.1) google-protobuf (~> 3.0) googleauth (0.5.3) @@ -513,7 +513,7 @@ GEM net-ldap (0.16.0) net-ssh (4.1.0) netrc (0.11.0) - nokogiri (1.8.1) + nokogiri (1.8.2) mini_portile2 (~> 2.3.0) numerizer (0.1.1) oauth (0.5.1) @@ -1056,7 +1056,7 @@ DEPENDENCIES gettext (~> 3.2.2) gettext_i18n_rails (~> 1.8.0) gettext_i18n_rails_js (~> 1.2.0) - gitaly-proto (~> 0.78.0) + gitaly-proto (~> 0.83.0) github-linguist (~> 4.7.0) gitlab-flowdock-git-hook (~> 1.0.1) gitlab-markup (~> 1.6.2) @@ -1100,7 +1100,7 @@ DEPENDENCIES mysql2 (~> 0.4.10) net-ldap net-ssh (~> 4.1.0) - nokogiri (~> 1.8.1) + nokogiri (~> 1.8.2) oauth2 (~> 1.4) octokit (~> 4.6.2) oj (~> 2.17.4) diff --git a/app/assets/javascripts/api.js b/app/assets/javascripts/api.js index 7cb81bf4d5b..1f34c6b50c2 100644 --- a/app/assets/javascripts/api.js +++ b/app/assets/javascripts/api.js @@ -1,9 +1,9 @@ -import $ from 'jquery'; +import _ from 'underscore'; import axios from './lib/utils/axios_utils'; const Api = { groupsPath: '/api/:version/groups.json', - groupPath: '/api/:version/groups/:id.json', + groupPath: '/api/:version/groups/:id', namespacesPath: '/api/:version/namespaces.json', groupProjectsPath: '/api/:version/groups/:id/projects.json', projectsPath: '/api/:version/projects.json', @@ -23,42 +23,44 @@ const Api = { group(groupId, callback) { const url = Api.buildUrl(Api.groupPath) .replace(':id', groupId); - return $.ajax({ - url, - dataType: 'json', - }) - .done(group => callback(group)); + return axios.get(url) + .then(({ data }) => { + callback(data); + + return data; + }); }, // Return groups list. Filtered by query groups(query, options, callback) { const url = Api.buildUrl(Api.groupsPath); - return $.ajax({ - url, - data: Object.assign({ + return axios.get(url, { + params: Object.assign({ search: query, per_page: 20, }, options), - dataType: 'json', }) - .done(groups => callback(groups)); + .then(({ data }) => { + callback(data); + + return data; + }); }, // Return namespaces list. Filtered by query namespaces(query, callback) { const url = Api.buildUrl(Api.namespacesPath); - return $.ajax({ - url, - data: { + return axios.get(url, { + params: { search: query, per_page: 20, }, - dataType: 'json', - }).done(namespaces => callback(namespaces)); + }) + .then(({ data }) => callback(data)); }, // Return projects list. Filtered by query - projects(query, options, callback) { + projects(query, options, callback = _.noop) { const url = Api.buildUrl(Api.projectsPath); const defaults = { search: query, @@ -70,12 +72,14 @@ const Api = { defaults.membership = true; } - return $.ajax({ - url, - data: Object.assign(defaults, options), - dataType: 'json', + return axios.get(url, { + params: Object.assign(defaults, options), }) - .done(projects => callback(projects)); + .then(({ data }) => { + callback(data); + + return data; + }); }, // Return single project @@ -97,41 +101,34 @@ const Api = { url = Api.buildUrl(Api.groupLabelsPath).replace(':namespace_path', namespacePath); } - return $.ajax({ - url, - type: 'POST', - data: { label: data }, - dataType: 'json', + return axios.post(url, { + label: data, }) - .done(label => callback(label)) - .fail(message => callback(message.responseJSON)); + .then(res => callback(res.data)) + .catch(e => callback(e.response.data)); }, // Return group projects list. Filtered by query groupProjects(groupId, query, callback) { const url = Api.buildUrl(Api.groupProjectsPath) .replace(':id', groupId); - return $.ajax({ - url, - data: { + return axios.get(url, { + params: { search: query, per_page: 20, }, - dataType: 'json', }) - .done(projects => callback(projects)); + .then(({ data }) => callback(data)); }, commitMultiple(id, data) { // see https://docs.gitlab.com/ce/api/commits.html#create-a-commit-with-multiple-files-and-actions const url = Api.buildUrl(Api.commitPath) .replace(':id', encodeURIComponent(id)); - return this.wrapAjaxCall({ - url, - type: 'POST', - contentType: 'application/json; charset=utf-8', - data: JSON.stringify(data), - dataType: 'json', + return axios.post(url, JSON.stringify(data), { + headers: { + 'Content-Type': 'application/json; charset=utf-8', + }, }); }, @@ -140,40 +137,37 @@ const Api = { .replace(':id', encodeURIComponent(id)) .replace(':branch', branch); - return this.wrapAjaxCall({ - url, - type: 'GET', - contentType: 'application/json; charset=utf-8', - dataType: 'json', - }); + return axios.get(url); }, // Return text for a specific license licenseText(key, data, callback) { const url = Api.buildUrl(Api.licensePath) .replace(':key', key); - return $.ajax({ - url, - data, + return axios.get(url, { + params: data, }) - .done(license => callback(license)); + .then(res => callback(res.data)); }, gitignoreText(key, callback) { const url = Api.buildUrl(Api.gitignorePath) .replace(':key', key); - return $.get(url, gitignore => callback(gitignore)); + return axios.get(url) + .then(({ data }) => callback(data)); }, gitlabCiYml(key, callback) { const url = Api.buildUrl(Api.gitlabCiYmlPath) .replace(':key', key); - return $.get(url, file => callback(file)); + return axios.get(url) + .then(({ data }) => callback(data)); }, dockerfileYml(key, callback) { const url = Api.buildUrl(Api.dockerfilePath).replace(':key', key); - $.get(url, callback); + return axios.get(url) + .then(({ data }) => callback(data)); }, issueTemplate(namespacePath, projectPath, key, type, callback) { @@ -182,23 +176,18 @@ const Api = { .replace(':type', type) .replace(':project_path', projectPath) .replace(':namespace_path', namespacePath); - $.ajax({ - url, - dataType: 'json', - }) - .done(file => callback(null, file)) - .fail(callback); + return axios.get(url) + .then(({ data }) => callback(null, data)) + .catch(callback); }, users(query, options) { const url = Api.buildUrl(this.usersPath); - return Api.wrapAjaxCall({ - url, - data: Object.assign({ + return axios.get(url, { + params: Object.assign({ search: query, per_page: 20, }, options), - dataType: 'json', }); }, @@ -209,21 +198,6 @@ const Api = { } return urlRoot + url.replace(':version', gon.api_version); }, - - wrapAjaxCall(options) { - return new Promise((resolve, reject) => { - // jQuery 2 is not Promises/A+ compatible (missing catch) - $.ajax(options) // eslint-disable-line promise/catch-or-return - .then(data => resolve(data), - (jqXHR, textStatus, errorThrown) => { - const error = new Error(`${options.url}: ${errorThrown}`); - error.textStatus = textStatus; - if (jqXHR && jqXHR.responseJSON) error.responseJSON = jqXHR.responseJSON; - reject(error); - }, - ); - }); - }, }; export default Api; diff --git a/app/assets/javascripts/awards_handler.js b/app/assets/javascripts/awards_handler.js index 622764107ad..87109a802e5 100644 --- a/app/assets/javascripts/awards_handler.js +++ b/app/assets/javascripts/awards_handler.js @@ -1,8 +1,10 @@ /* eslint-disable class-methods-use-this */ import _ from 'underscore'; import Cookies from 'js-cookie'; +import { __ } from './locale'; import { isInIssuePage, updateTooltipTitle } from './lib/utils/common_utils'; -import Flash from './flash'; +import flash from './flash'; +import axios from './lib/utils/axios_utils'; const animationEndEventString = 'animationend webkitAnimationEnd MSAnimationEnd oAnimationEnd'; const transitionEndEventString = 'transitionend webkitTransitionEnd oTransitionEnd MSTransitionEnd'; @@ -441,13 +443,15 @@ class AwardsHandler { if (this.isUserAuthored($emojiButton)) { this.userAuthored($emojiButton); } else { - $.post(awardUrl, { + axios.post(awardUrl, { name: emoji, - }, (data) => { + }) + .then(({ data }) => { if (data.ok) { callback(); } - }).fail(() => new Flash('Something went wrong on our end.')); + }) + .catch(() => flash(__('Something went wrong on our end.'))); } } diff --git a/app/assets/javascripts/blob/viewer/index.js b/app/assets/javascripts/blob/viewer/index.js index 54132e8537b..612f604e725 100644 --- a/app/assets/javascripts/blob/viewer/index.js +++ b/app/assets/javascripts/blob/viewer/index.js @@ -1,5 +1,6 @@ import Flash from '../../flash'; import { handleLocationHash } from '../../lib/utils/common_utils'; +import axios from '../../lib/utils/axios_utils'; export default class BlobViewer { constructor() { @@ -127,25 +128,18 @@ export default class BlobViewer { const viewer = viewerParam; const url = viewer.getAttribute('data-url'); - return new Promise((resolve, reject) => { - if (!url || viewer.getAttribute('data-loaded') || viewer.getAttribute('data-loading')) { - resolve(viewer); - return; - } + if (!url || viewer.getAttribute('data-loaded') || viewer.getAttribute('data-loading')) { + return Promise.resolve(viewer); + } - viewer.setAttribute('data-loading', 'true'); + viewer.setAttribute('data-loading', 'true'); - $.ajax({ - url, - dataType: 'JSON', - }) - .fail(reject) - .done((data) => { + return axios.get(url) + .then(({ data }) => { viewer.innerHTML = data.html; viewer.setAttribute('data-loaded', 'true'); - resolve(viewer); + return viewer; }); - }); } } diff --git a/app/assets/javascripts/blob_edit/edit_blob.js b/app/assets/javascripts/blob_edit/edit_blob.js index b37988a674d..a25f7fb3dcd 100644 --- a/app/assets/javascripts/blob_edit/edit_blob.js +++ b/app/assets/javascripts/blob_edit/edit_blob.js @@ -1,5 +1,8 @@ /* global ace */ +import axios from '~/lib/utils/axios_utils'; +import createFlash from '~/flash'; +import { __ } from '~/locale'; import TemplateSelectorMediator from '../blob/file_template_mediator'; export default class EditBlob { @@ -56,12 +59,14 @@ export default class EditBlob { if (paneId === '#preview') { this.$toggleButton.hide(); - return $.post(currentLink.data('preview-url'), { + axios.post(currentLink.data('preview-url'), { content: this.editor.getValue(), - }, (response) => { - currentPane.empty().append(response); - return currentPane.renderGFM(); - }); + }) + .then(({ data }) => { + currentPane.empty().append(data); + currentPane.renderGFM(); + }) + .catch(() => createFlash(__('An error occurred previewing the blob'))); } this.$toggleButton.show(); diff --git a/app/assets/javascripts/clusters/clusters_bundle.js b/app/assets/javascripts/clusters/clusters_bundle.js index 637d0dbde23..4dddb6eb0d6 100644 --- a/app/assets/javascripts/clusters/clusters_bundle.js +++ b/app/assets/javascripts/clusters/clusters_bundle.js @@ -14,6 +14,7 @@ import { import ClustersService from './services/clusters_service'; import ClustersStore from './stores/clusters_store'; import applications from './components/applications.vue'; +import setupToggleButtons from '../toggle_buttons'; /** * Cluster page has 2 separate parts: @@ -48,12 +49,9 @@ export default class Clusters { installPrometheusEndpoint: installPrometheusPath, }); - this.toggle = this.toggle.bind(this); this.installApplication = this.installApplication.bind(this); this.showToken = this.showToken.bind(this); - this.toggleButton = document.querySelector('.js-toggle-cluster'); - this.toggleInput = document.querySelector('.js-toggle-input'); this.errorContainer = document.querySelector('.js-cluster-error'); this.successContainer = document.querySelector('.js-cluster-success'); this.creatingContainer = document.querySelector('.js-cluster-creating'); @@ -63,6 +61,7 @@ export default class Clusters { this.tokenField = document.querySelector('.js-cluster-token'); initSettingsPanels(); + setupToggleButtons(document.querySelector('.js-cluster-enable-toggle-area')); this.initApplications(); if (this.store.state.status !== 'created') { @@ -101,13 +100,11 @@ export default class Clusters { } addListeners() { - this.toggleButton.addEventListener('click', this.toggle); if (this.showTokenButton) this.showTokenButton.addEventListener('click', this.showToken); eventHub.$on('installApplication', this.installApplication); } removeListeners() { - this.toggleButton.removeEventListener('click', this.toggle); if (this.showTokenButton) this.showTokenButton.removeEventListener('click', this.showToken); eventHub.$off('installApplication', this.installApplication); } @@ -151,11 +148,6 @@ export default class Clusters { this.updateContainer(prevStatus, this.store.state.status, this.store.state.statusReason); } - toggle() { - this.toggleButton.classList.toggle('is-checked'); - this.toggleInput.setAttribute('value', this.toggleButton.classList.contains('is-checked').toString()); - } - showToken() { const type = this.tokenField.getAttribute('type'); diff --git a/app/assets/javascripts/clusters/clusters_index.js b/app/assets/javascripts/clusters/clusters_index.js index 6844d1dbd83..2e3ad244375 100644 --- a/app/assets/javascripts/clusters/clusters_index.js +++ b/app/assets/javascripts/clusters/clusters_index.js @@ -1,58 +1,20 @@ import Flash from '../flash'; import { s__ } from '../locale'; +import setupToggleButtons from '../toggle_buttons'; import ClustersService from './services/clusters_service'; -/** - * Toggles loading and disabled classes. - * @param {HTMLElement} button - */ -const toggleLoadingButton = (button) => { - if (button.getAttribute('disabled')) { - button.removeAttribute('disabled'); - } else { - button.setAttribute('disabled', true); - } - - button.classList.toggle('is-loading'); -}; -/** - * Toggles checked class for the given button - * @param {HTMLElement} button - */ -const toggleValue = (button) => { - button.classList.toggle('is-checked'); +export default () => { + const clusterList = document.querySelector('.js-clusters-list'); + // The empty state won't have a clusterList + if (clusterList) { + setupToggleButtons( + document.querySelector('.js-clusters-list'), + (value, toggle) => + ClustersService.updateCluster(toggle.dataset.endpoint, { cluster: { enabled: value } }) + .catch((err) => { + Flash(s__('ClusterIntegration|Something went wrong on our end.')); + throw err; + }), + ); + } }; - -/** - * Handles toggle buttons in the cluster's table. - * - * When the user clicks the toggle button for each cluster, it: - * - toggles the button - * - shows a loading and disables button - * - Makes a put request to the given endpoint - * Once we receive the response, either: - * 1) Show updated status in case of successfull response - * 2) Show initial status in case of failed response - */ -export default function setClusterTableToggles() { - document.querySelectorAll('.js-toggle-cluster-list') - .forEach(button => button.addEventListener('click', (e) => { - const toggleButton = e.currentTarget; - const endpoint = toggleButton.getAttribute('data-endpoint'); - - toggleValue(toggleButton); - toggleLoadingButton(toggleButton); - - const value = toggleButton.classList.contains('is-checked'); - - ClustersService.updateCluster(endpoint, { cluster: { enabled: value } }) - .then(() => { - toggleLoadingButton(toggleButton); - }) - .catch(() => { - toggleLoadingButton(toggleButton); - toggleValue(toggleButton); - Flash(s__('ClusterIntegration|Something went wrong on our end.')); - }); - })); -} diff --git a/app/assets/javascripts/commits.js b/app/assets/javascripts/commits.js index 3a03cbf6b90..4b2f75fffde 100644 --- a/app/assets/javascripts/commits.js +++ b/app/assets/javascripts/commits.js @@ -5,6 +5,7 @@ import { pluralize } from './lib/utils/text_utility'; import { localTimeAgo } from './lib/utils/datetime_utility'; import Pager from './pager'; +import axios from './lib/utils/axios_utils'; export default (function () { const CommitsList = {}; @@ -43,29 +44,30 @@ export default (function () { CommitsList.filterResults = function () { const form = $('.commits-search-form'); const search = CommitsList.searchField.val(); - if (search === CommitsList.lastSearch) return; + if (search === CommitsList.lastSearch) return Promise.resolve(); const commitsUrl = form.attr('action') + '?' + form.serialize(); CommitsList.content.fadeTo('fast', 0.5); - return $.ajax({ - type: 'GET', - url: form.attr('action'), - data: form.serialize(), - complete: function () { - return CommitsList.content.fadeTo('fast', 1.0); - }, - success: function (data) { + const params = form.serializeArray().reduce((acc, obj) => Object.assign(acc, { + [obj.name]: obj.value, + }), {}); + + return axios.get(form.attr('action'), { + params, + }) + .then(({ data }) => { CommitsList.lastSearch = search; CommitsList.content.html(data.html); - return history.replaceState({ - page: commitsUrl, + CommitsList.content.fadeTo('fast', 1.0); + // Change url so if user reload a page - search results are saved + history.replaceState({ + page: commitsUrl, }, document.title, commitsUrl); - }, - error: function () { + }) + .catch(() => { + CommitsList.content.fadeTo('fast', 1.0); CommitsList.lastSearch = null; - }, - dataType: 'json', - }); + }); }; // Prepare loaded data. diff --git a/app/assets/javascripts/compare.js b/app/assets/javascripts/compare.js index 144caf1d278..e2a008e8904 100644 --- a/app/assets/javascripts/compare.js +++ b/app/assets/javascripts/compare.js @@ -1,5 +1,6 @@ /* eslint-disable func-names, space-before-function-paren, wrap-iife, quotes, no-var, object-shorthand, consistent-return, no-unused-vars, comma-dangle, vars-on-top, prefer-template, max-len */ import { localTimeAgo } from './lib/utils/datetime_utility'; +import axios from './lib/utils/axios_utils'; export default class Compare { constructor(opts) { @@ -41,17 +42,14 @@ export default class Compare { } getTargetProject() { - return $.ajax({ - url: this.opts.targetProjectUrl, - data: { - target_project_id: $("input[name='merge_request[target_project_id]']").val() - }, - beforeSend: function() { - return $('.mr_target_commit').empty(); + $('.mr_target_commit').empty(); + + return axios.get(this.opts.targetProjectUrl, { + params: { + target_project_id: $("input[name='merge_request[target_project_id]']").val(), }, - success: function(html) { - return $('.js-target-branch-dropdown .dropdown-content').html(html); - } + }).then(({ data }) => { + $('.js-target-branch-dropdown .dropdown-content').html(data); }); } @@ -68,22 +66,19 @@ export default class Compare { }); } - static sendAjax(url, loading, target, data) { - var $target; - $target = $(target); - return $.ajax({ - url: url, - data: data, - beforeSend: function() { - loading.show(); - return $target.empty(); - }, - success: function(html) { - loading.hide(); - $target.html(html); - var className = '.' + $target[0].className.replace(' ', '.'); - localTimeAgo($('.js-timeago', className)); - } + static sendAjax(url, loading, target, params) { + const $target = $(target); + + loading.show(); + $target.empty(); + + return axios.get(url, { + params, + }).then(({ data }) => { + loading.hide(); + $target.html(data); + const className = '.' + $target[0].className.replace(' ', '.'); + localTimeAgo($('.js-timeago', className)); }); } } diff --git a/app/assets/javascripts/compare_autocomplete.js b/app/assets/javascripts/compare_autocomplete.js index e633ef8a29e..59899e97be1 100644 --- a/app/assets/javascripts/compare_autocomplete.js +++ b/app/assets/javascripts/compare_autocomplete.js @@ -1,4 +1,7 @@ /* eslint-disable func-names, space-before-function-paren, one-var, no-var, one-var-declaration-per-line, object-shorthand, comma-dangle, prefer-arrow-callback, no-else-return, newline-per-chained-call, wrap-iife, max-len */ +import { __ } from './locale'; +import axios from './lib/utils/axios_utils'; +import flash from './flash'; export default function initCompareAutocomplete() { $('.js-compare-dropdown').each(function() { @@ -10,15 +13,14 @@ export default function initCompareAutocomplete() { const $filterInput = $('input[type="search"]', $dropdownContainer); $dropdown.glDropdown({ data: function(term, callback) { - return $.ajax({ - url: $dropdown.data('refs-url'), - data: { + axios.get($dropdown.data('refsUrl'), { + params: { ref: $dropdown.data('ref'), search: term, - } - }).done(function(refs) { - return callback(refs); - }); + }, + }).then(({ data }) => { + callback(data); + }).catch(() => flash(__('Error fetching refs'))); }, selectable: true, filterable: true, diff --git a/app/assets/javascripts/create_item_dropdown.js b/app/assets/javascripts/create_item_dropdown.js index 488db023ee7..42e9e568170 100644 --- a/app/assets/javascripts/create_item_dropdown.js +++ b/app/assets/javascripts/create_item_dropdown.js @@ -12,6 +12,7 @@ export default class CreateItemDropdown { this.fieldName = options.fieldName; this.onSelect = options.onSelect || (() => {}); this.getDataOption = options.getData; + this.createNewItemFromValueOption = options.createNewItemFromValue; this.$dropdown = options.$dropdown; this.$dropdownContainer = this.$dropdown.parent(); this.$dropdownFooter = this.$dropdownContainer.find('.dropdown-footer'); @@ -30,15 +31,15 @@ export default class CreateItemDropdown { filterable: true, remote: false, search: { - fields: ['title'], + fields: ['text'], }, selectable: true, toggleLabel(selected) { - return (selected && 'id' in selected) ? selected.title : this.defaultToggleLabel; + return (selected && 'id' in selected) ? _.escape(selected.title) : this.defaultToggleLabel; }, fieldName: this.fieldName, text(item) { - return _.escape(item.title); + return _.escape(item.text); }, id(item) { return _.escape(item.id); @@ -51,6 +52,11 @@ export default class CreateItemDropdown { }); } + clearDropdown() { + this.$dropdownContainer.find('.dropdown-content').html(''); + this.$dropdownContainer.find('.dropdown-input-field').val(''); + } + bindEvents() { this.$createButton.on('click', this.onClickCreateWildcard.bind(this)); } @@ -58,9 +64,13 @@ export default class CreateItemDropdown { onClickCreateWildcard(e) { e.preventDefault(); + this.refreshData(); + this.$dropdown.data('glDropdown').selectRowAtIndex(); + } + + refreshData() { // Refresh the dropdown's data, which ends up calling `getData` this.$dropdown.data('glDropdown').remote.execute(); - this.$dropdown.data('glDropdown').selectRowAtIndex(); } getData(term, callback) { @@ -79,20 +89,28 @@ export default class CreateItemDropdown { }); } - toggleCreateNewButton(item) { - if (item) { - this.selectedItem = { - title: item, - id: item, - text: item, - }; + createNewItemFromValue(newValue) { + if (this.createNewItemFromValueOption) { + return this.createNewItemFromValueOption(newValue); + } + + return { + title: newValue, + id: newValue, + text: newValue, + }; + } + + toggleCreateNewButton(newValue) { + if (newValue) { + this.selectedItem = this.createNewItemFromValue(newValue); this.$dropdownContainer .find('.js-dropdown-create-new-item code') - .text(item); + .text(newValue); } - this.toggleFooter(!item); + this.toggleFooter(!newValue); } toggleFooter(toggleState) { diff --git a/app/assets/javascripts/create_merge_request_dropdown.js b/app/assets/javascripts/create_merge_request_dropdown.js index bc23a72762f..482d83621e2 100644 --- a/app/assets/javascripts/create_merge_request_dropdown.js +++ b/app/assets/javascripts/create_merge_request_dropdown.js @@ -1,5 +1,6 @@ /* eslint-disable no-new */ import _ from 'underscore'; +import axios from './lib/utils/axios_utils'; import Flash from './flash'; import DropLab from './droplab/drop_lab'; import ISetter from './droplab/plugins/input_setter'; @@ -74,60 +75,52 @@ export default class CreateMergeRequestDropdown { } checkAbilityToCreateBranch() { - return $.ajax({ - type: 'GET', - dataType: 'json', - url: this.canCreatePath, - beforeSend: () => this.setUnavailableButtonState(), - }) - .done((data) => { - this.setUnavailableButtonState(false); - - if (data.can_create_branch) { - this.available(); - this.enable(); - - if (!this.droplabInitialized) { - this.droplabInitialized = true; - this.initDroplab(); - this.bindEvents(); + this.setUnavailableButtonState(); + + axios.get(this.canCreatePath) + .then(({ data }) => { + this.setUnavailableButtonState(false); + + if (data.can_create_branch) { + this.available(); + this.enable(); + + if (!this.droplabInitialized) { + this.droplabInitialized = true; + this.initDroplab(); + this.bindEvents(); + } + } else if (data.has_related_branch) { + this.hide(); } - } else if (data.has_related_branch) { - this.hide(); - } - }).fail(() => { - this.unavailable(); - this.disable(); - new Flash('Failed to check if a new branch can be created.'); - }); + }) + .catch(() => { + this.unavailable(); + this.disable(); + Flash('Failed to check if a new branch can be created.'); + }); } createBranch() { - return $.ajax({ - method: 'POST', - dataType: 'json', - url: this.createBranchPath, - beforeSend: () => (this.isCreatingBranch = true), - }) - .done((data) => { - this.branchCreated = true; - window.location.href = data.url; - }) - .fail(() => new Flash('Failed to create a branch for this issue. Please try again.')); + this.isCreatingBranch = true; + + return axios.post(this.createBranchPath) + .then(({ data }) => { + this.branchCreated = true; + window.location.href = data.url; + }) + .catch(() => Flash('Failed to create a branch for this issue. Please try again.')); } createMergeRequest() { - return $.ajax({ - method: 'POST', - dataType: 'json', - url: this.createMrPath, - beforeSend: () => (this.isCreatingMergeRequest = true), - }) - .done((data) => { - this.mergeRequestCreated = true; - window.location.href = data.url; - }) - .fail(() => new Flash('Failed to create Merge Request. Please try again.')); + this.isCreatingMergeRequest = true; + + return axios.post(this.createMrPath) + .then(({ data }) => { + this.mergeRequestCreated = true; + window.location.href = data.url; + }) + .catch(() => Flash('Failed to create Merge Request. Please try again.')); } disable() { @@ -200,39 +193,33 @@ export default class CreateMergeRequestDropdown { getRef(ref, target = 'all') { if (!ref) return false; - return $.ajax({ - method: 'GET', - dataType: 'json', - url: this.refsPath + ref, - beforeSend: () => { - this.isGettingRef = true; - }, - }) - .always(() => { - this.isGettingRef = false; - }) - .done((data) => { - const branches = data[Object.keys(data)[0]]; - const tags = data[Object.keys(data)[1]]; - let result; + return axios.get(this.refsPath + ref) + .then(({ data }) => { + const branches = data[Object.keys(data)[0]]; + const tags = data[Object.keys(data)[1]]; + let result; + + if (target === 'branch') { + result = CreateMergeRequestDropdown.findByValue(branches, ref); + } else { + result = CreateMergeRequestDropdown.findByValue(branches, ref, true) || + CreateMergeRequestDropdown.findByValue(tags, ref, true); + this.suggestedRef = result; + } - if (target === 'branch') { - result = CreateMergeRequestDropdown.findByValue(branches, ref); - } else { - result = CreateMergeRequestDropdown.findByValue(branches, ref, true) || - CreateMergeRequestDropdown.findByValue(tags, ref, true); - this.suggestedRef = result; - } + this.isGettingRef = false; - return this.updateInputState(target, ref, result); - }) - .fail(() => { - this.unavailable(); - this.disable(); - new Flash('Failed to get ref.'); + return this.updateInputState(target, ref, result); + }) + .catch(() => { + this.unavailable(); + this.disable(); + new Flash('Failed to get ref.'); - return false; - }); + this.isGettingRef = false; + + return false; + }); } getTargetData(target) { @@ -332,12 +319,12 @@ export default class CreateMergeRequestDropdown { xhr = this.createBranch(); } - xhr.fail(() => { + xhr.catch(() => { this.isCreatingMergeRequest = false; this.isCreatingBranch = false; - }); - xhr.always(() => this.enable()); + this.enable(); + }); this.disable(); } diff --git a/app/assets/javascripts/dropzone_input.js b/app/assets/javascripts/dropzone_input.js index 550dbdda922..ba89e5726fa 100644 --- a/app/assets/javascripts/dropzone_input.js +++ b/app/assets/javascripts/dropzone_input.js @@ -2,6 +2,7 @@ import Dropzone from 'dropzone'; import _ from 'underscore'; import './preview_markdown'; import csrf from './lib/utils/csrf'; +import axios from './lib/utils/axios_utils'; Dropzone.autoDiscover = false; @@ -235,25 +236,21 @@ export default function dropzoneInput(form) { uploadFile = (item, filename) => { const formData = new FormData(); formData.append('file', item, filename); - return $.ajax({ - url: uploadsPath, - type: 'POST', - data: formData, - dataType: 'json', - processData: false, - contentType: false, - headers: csrf.headers, - beforeSend: () => { - showSpinner(); - return closeAlertMessage(); - }, - success: (e, text, response) => { - const md = response.responseJSON.link.markdown; + + showSpinner(); + closeAlertMessage(); + + axios.post(uploadsPath, formData) + .then(({ data }) => { + const md = data.link.markdown; + insertToTextArea(filename, md); - }, - error: response => showError(response.responseJSON.message), - complete: () => closeSpinner(), - }); + closeSpinner(); + }) + .catch((e) => { + showError(e.response.data.message); + closeSpinner(); + }); }; updateAttachingMessage = (files, messageContainer) => { diff --git a/app/assets/javascripts/due_date_select.js b/app/assets/javascripts/due_date_select.js index ada985913bb..bd4c58b7cb1 100644 --- a/app/assets/javascripts/due_date_select.js +++ b/app/assets/javascripts/due_date_select.js @@ -1,6 +1,7 @@ /* global dateFormat */ import Pikaday from 'pikaday'; +import axios from './lib/utils/axios_utils'; import { parsePikadayDate, pikadayToString } from './lib/utils/datefix'; class DueDateSelect { @@ -125,37 +126,30 @@ class DueDateSelect { } submitSelectedDate(isDropdown) { - return $.ajax({ - type: 'PUT', - url: this.issueUpdateURL, - data: this.datePayload, - dataType: 'json', - beforeSend: () => { - const selectedDateValue = this.datePayload[this.abilityName].due_date; - const displayedDateStyle = this.displayedDate !== 'No due date' ? 'bold' : 'no-value'; + const selectedDateValue = this.datePayload[this.abilityName].due_date; + const displayedDateStyle = this.displayedDate !== 'No due date' ? 'bold' : 'no-value'; - this.$loading.removeClass('hidden').fadeIn(); + this.$loading.removeClass('hidden').fadeIn(); - if (isDropdown) { - this.$dropdown.trigger('loading.gl.dropdown'); - this.$selectbox.hide(); - } + if (isDropdown) { + this.$dropdown.trigger('loading.gl.dropdown'); + this.$selectbox.hide(); + } - this.$value.css('display', ''); - this.$valueContent.html(`<span class='${displayedDateStyle}'>${this.displayedDate}</span>`); - this.$sidebarValue.html(this.displayedDate); + this.$value.css('display', ''); + this.$valueContent.html(`<span class='${displayedDateStyle}'>${this.displayedDate}</span>`); + this.$sidebarValue.html(this.displayedDate); - return selectedDateValue.length ? - $('.js-remove-due-date-holder').removeClass('hidden') : - $('.js-remove-due-date-holder').addClass('hidden'); - }, - }).done(() => { - if (isDropdown) { - this.$dropdown.trigger('loaded.gl.dropdown'); - this.$dropdown.dropdown('toggle'); - } - return this.$loading.fadeOut(); - }); + $('.js-remove-due-date-holder').toggleClass('hidden', selectedDateValue.length); + + return axios.put(this.issueUpdateURL, this.datePayload) + .then(() => { + if (isDropdown) { + this.$dropdown.trigger('loaded.gl.dropdown'); + this.$dropdown.dropdown('toggle'); + } + return this.$loading.fadeOut(); + }); } } diff --git a/app/assets/javascripts/filterable_list.js b/app/assets/javascripts/filterable_list.js index 9e91f72b2ea..a10f027de53 100644 --- a/app/assets/javascripts/filterable_list.js +++ b/app/assets/javascripts/filterable_list.js @@ -1,4 +1,5 @@ import _ from 'underscore'; +import axios from './lib/utils/axios_utils'; /** * Makes search request for content when user types a value in the search input. @@ -54,32 +55,26 @@ export default class FilterableList { this.listFilterElement.removeEventListener('input', this.debounceFilter); } - filterResults(queryData) { + filterResults(params) { if (this.isBusy) { return false; } $(this.listHolderElement).fadeTo(250, 0.5); - return $.ajax({ - url: this.getFilterEndpoint(), - data: queryData, - type: 'GET', - dataType: 'json', - context: this, - complete: this.onFilterComplete, - beforeSend: () => { - this.isBusy = true; - }, - success: (response, textStatus, xhr) => { - this.onFilterSuccess(response, xhr, queryData); - }, - }); + this.isBusy = true; + + return axios.get(this.getFilterEndpoint(), { + params, + }).then((res) => { + this.onFilterSuccess(res, params); + this.onFilterComplete(); + }).catch(() => this.onFilterComplete()); } - onFilterSuccess(response, xhr, queryData) { - if (response.html) { - this.listHolderElement.innerHTML = response.html; + onFilterSuccess(response, queryData) { + if (response.data.html) { + this.listHolderElement.innerHTML = response.data.html; } // Change url so if user reload a page - search results are saved diff --git a/app/assets/javascripts/gl_dropdown.js b/app/assets/javascripts/gl_dropdown.js index 64f258aed64..15df7a7f989 100644 --- a/app/assets/javascripts/gl_dropdown.js +++ b/app/assets/javascripts/gl_dropdown.js @@ -2,6 +2,7 @@ /* global fuzzaldrinPlus */ import _ from 'underscore'; import fuzzaldrinPlus from 'fuzzaldrin-plus'; +import axios from './lib/utils/axios_utils'; import { visitUrl } from './lib/utils/url_utility'; import { isObject } from './lib/utils/type_utility'; @@ -212,25 +213,17 @@ GitLabDropdownRemote = (function() { }; GitLabDropdownRemote.prototype.fetchData = function() { - return $.ajax({ - url: this.dataEndpoint, - dataType: this.options.dataType, - beforeSend: (function(_this) { - return function() { - if (_this.options.beforeSend) { - return _this.options.beforeSend(); - } - }; - })(this), - success: (function(_this) { - return function(data) { - if (_this.options.success) { - return _this.options.success(data); - } - }; - })(this) - }); - // Fetch the data through ajax if the data is a string + if (this.options.beforeSend) { + this.options.beforeSend(); + } + + // Fetch the data through ajax if the data is a string + return axios.get(this.dataEndpoint) + .then(({ data }) => { + if (this.options.success) { + return this.options.success(data); + } + }); }; return GitLabDropdownRemote; diff --git a/app/assets/javascripts/graphs/graphs_show.js b/app/assets/javascripts/graphs/graphs_show.js index 36bad6db3e1..b670e907a5c 100644 --- a/app/assets/javascripts/graphs/graphs_show.js +++ b/app/assets/javascripts/graphs/graphs_show.js @@ -1,11 +1,13 @@ +import flash from '../flash'; +import { __ } from '../locale'; +import axios from '../lib/utils/axios_utils'; import ContributorsStatGraph from './stat_graph_contributors'; document.addEventListener('DOMContentLoaded', () => { - $.ajax({ - type: 'GET', - url: document.querySelector('.js-graphs-show').dataset.projectGraphPath, - dataType: 'json', - success(data) { + const url = document.querySelector('.js-graphs-show').dataset.projectGraphPath; + + axios.get(url) + .then(({ data }) => { const graph = new ContributorsStatGraph(); graph.init(data); @@ -16,6 +18,6 @@ document.addEventListener('DOMContentLoaded', () => { $('.stat-graph').fadeIn(); $('.loading-graph').hide(); - }, - }); + }) + .catch(() => flash(__('Error fetching contributors data.'))); }); diff --git a/app/assets/javascripts/group_label_subscription.js b/app/assets/javascripts/group_label_subscription.js index befaebb635e..df9429b1e02 100644 --- a/app/assets/javascripts/group_label_subscription.js +++ b/app/assets/javascripts/group_label_subscription.js @@ -1,3 +1,7 @@ +import axios from './lib/utils/axios_utils'; +import flash from './flash'; +import { __ } from './locale'; + export default class GroupLabelSubscription { constructor(container) { const $container = $(container); @@ -13,14 +17,12 @@ export default class GroupLabelSubscription { event.preventDefault(); const url = this.$unsubscribeButtons.attr('data-url'); - - $.ajax({ - type: 'POST', - url, - }).done(() => { - this.toggleSubscriptionButtons(); - this.$unsubscribeButtons.removeAttr('data-url'); - }); + axios.post(url) + .then(() => { + this.toggleSubscriptionButtons(); + this.$unsubscribeButtons.removeAttr('data-url'); + }) + .catch(() => flash(__('There was an error when unsubscribing from this label.'))); } subscribe(event) { @@ -31,12 +33,9 @@ export default class GroupLabelSubscription { this.$unsubscribeButtons.attr('data-url', url); - $.ajax({ - type: 'POST', - url, - }).done(() => { - this.toggleSubscriptionButtons(); - }); + axios.post(url) + .then(() => this.toggleSubscriptionButtons()) + .catch(() => flash(__('There was an error when subscribing to this label.'))); } toggleSubscriptionButtons() { diff --git a/app/assets/javascripts/groups/groups_filterable_list.js b/app/assets/javascripts/groups/groups_filterable_list.js index 2db233b09da..31d56d15c23 100644 --- a/app/assets/javascripts/groups/groups_filterable_list.js +++ b/app/assets/javascripts/groups/groups_filterable_list.js @@ -1,6 +1,6 @@ import FilterableList from '~/filterable_list'; import eventHub from './event_hub'; -import { getParameterByName } from '../lib/utils/common_utils'; +import { normalizeHeaders, getParameterByName } from '../lib/utils/common_utils'; export default class GroupFilterableList extends FilterableList { constructor({ form, filter, holder, filterEndpoint, pagePath, dropdownSel, filterInputField }) { @@ -94,23 +94,14 @@ export default class GroupFilterableList extends FilterableList { this.form.querySelector(`[name="${this.filterInputField}"]`).value = ''; } - onFilterSuccess(data, xhr, queryData) { + onFilterSuccess(res, queryData) { const currentPath = this.getPagePath(queryData); - const paginationData = { - 'X-Per-Page': xhr.getResponseHeader('X-Per-Page'), - 'X-Page': xhr.getResponseHeader('X-Page'), - 'X-Total': xhr.getResponseHeader('X-Total'), - 'X-Total-Pages': xhr.getResponseHeader('X-Total-Pages'), - 'X-Next-Page': xhr.getResponseHeader('X-Next-Page'), - 'X-Prev-Page': xhr.getResponseHeader('X-Prev-Page'), - }; - window.history.replaceState({ page: currentPath, }, document.title, currentPath); - eventHub.$emit('updateGroups', data, Object.prototype.hasOwnProperty.call(queryData, this.filterInputField)); - eventHub.$emit('updatePagination', paginationData); + eventHub.$emit('updateGroups', res.data, Object.prototype.hasOwnProperty.call(queryData, this.filterInputField)); + eventHub.$emit('updatePagination', normalizeHeaders(res.headers)); } } diff --git a/app/assets/javascripts/ide/stores/actions.js b/app/assets/javascripts/ide/stores/actions.js index 96a87744df5..d007d0ae78f 100644 --- a/app/assets/javascripts/ide/stores/actions.js +++ b/app/assets/javascripts/ide/stores/actions.js @@ -71,7 +71,7 @@ export const setResizingStatus = ({ commit }, resizing) => { export const checkCommitStatus = ({ state }) => service .getBranchData(state.currentProjectId, state.currentBranchId) - .then((data) => { + .then(({ data }) => { const { id } = data.commit; const selectedBranch = state.projects[state.currentProjectId].branches[state.currentBranchId]; @@ -90,7 +90,7 @@ export const commitChanges = ( ) => service .commit(state.currentProjectId, payload) - .then((data) => { + .then(({ data }) => { const { branch } = payload; if (!data.short_id) { flash(data.message, 'alert', document, null, false, true); @@ -147,8 +147,8 @@ export const commitChanges = ( }) .catch((err) => { let errMsg = 'Error committing changes. Please try again.'; - if (err.responseJSON && err.responseJSON.message) { - errMsg += ` (${stripHtml(err.responseJSON.message)})`; + if (err.response.data && err.response.data.message) { + errMsg += ` (${stripHtml(err.response.data.message)})`; } flash(errMsg, 'alert', document, null, false, true); window.dispatchEvent(new Event('resize')); diff --git a/app/assets/javascripts/ide/stores/actions/branch.js b/app/assets/javascripts/ide/stores/actions/branch.js index 589ec28c6a4..bc6fd2d4163 100644 --- a/app/assets/javascripts/ide/stores/actions/branch.js +++ b/app/assets/javascripts/ide/stores/actions/branch.js @@ -10,7 +10,7 @@ export const getBranchData = ( !state.projects[`${projectId}`].branches[branchId]) || force) { service.getBranchData(`${projectId}`, branchId) - .then((data) => { + .then(({ data }) => { const { id } = data.commit; commit(types.SET_BRANCH, { projectPath: `${projectId}`, branchName: branchId, branch: data }); commit(types.SET_BRANCH_WORKING_REFERENCE, { projectId, branchId, reference: id }); diff --git a/app/assets/javascripts/integrations/integration_settings_form.js b/app/assets/javascripts/integrations/integration_settings_form.js index 32415a8791f..3f27cfc2f6d 100644 --- a/app/assets/javascripts/integrations/integration_settings_form.js +++ b/app/assets/javascripts/integrations/integration_settings_form.js @@ -1,4 +1,5 @@ -import Flash from '../flash'; +import axios from '../lib/utils/axios_utils'; +import flash from '../flash'; export default class IntegrationSettingsForm { constructor(formSelector) { @@ -95,29 +96,26 @@ export default class IntegrationSettingsForm { */ testSettings(formData) { this.toggleSubmitBtnState(true); - $.ajax({ - type: 'PUT', - url: this.testEndPoint, - data: formData, - }) - .done((res) => { - if (res.error) { - new Flash(`${res.message} ${res.service_response}`, 'alert', document, { - title: 'Save anyway', - clickHandler: (e) => { - e.preventDefault(); - this.$form.submit(); - }, - }); - } else { - this.$form.submit(); - } - }) - .fail(() => { - new Flash('Something went wrong on our end.'); - }) - .always(() => { - this.toggleSubmitBtnState(false); - }); + + return axios.put(this.testEndPoint, formData) + .then(({ data }) => { + if (data.error) { + flash(`${data.message} ${data.service_response}`, 'alert', document, { + title: 'Save anyway', + clickHandler: (e) => { + e.preventDefault(); + this.$form.submit(); + }, + }); + } else { + this.$form.submit(); + } + + this.toggleSubmitBtnState(false); + }) + .catch(() => { + flash('Something went wrong on our end.'); + this.toggleSubmitBtnState(false); + }); } } diff --git a/app/assets/javascripts/issuable_bulk_update_actions.js b/app/assets/javascripts/issuable_bulk_update_actions.js index b124fafec70..8c1b2e78ca4 100644 --- a/app/assets/javascripts/issuable_bulk_update_actions.js +++ b/app/assets/javascripts/issuable_bulk_update_actions.js @@ -1,5 +1,6 @@ /* eslint-disable comma-dangle, quotes, consistent-return, func-names, array-callback-return, space-before-function-paren, prefer-arrow-callback, max-len, no-unused-expressions, no-sequences, no-underscore-dangle, no-unused-vars, no-param-reassign */ import _ from 'underscore'; +import axios from './lib/utils/axios_utils'; import Flash from './flash'; export default { @@ -22,15 +23,9 @@ export default { }, submit() { - const _this = this; - const xhr = $.ajax({ - url: this.form.attr('action'), - method: this.form.attr('method'), - dataType: 'JSON', - data: this.getFormDataAsObject() - }); - xhr.done(() => window.location.reload()); - xhr.fail(() => this.onFormSubmitFailure()); + axios[this.form.attr('method')](this.form.attr('action'), this.getFormDataAsObject()) + .then(() => window.location.reload()) + .catch(() => this.onFormSubmitFailure()); }, onFormSubmitFailure() { diff --git a/app/assets/javascripts/issuable_index.js b/app/assets/javascripts/issuable_index.js index c3e0acdff66..0683ca82a38 100644 --- a/app/assets/javascripts/issuable_index.js +++ b/app/assets/javascripts/issuable_index.js @@ -1,3 +1,6 @@ +import axios from './lib/utils/axios_utils'; +import flash from './flash'; +import { __ } from './locale'; import IssuableBulkUpdateSidebar from './issuable_bulk_update_sidebar'; import IssuableBulkUpdateActions from './issuable_bulk_update_actions'; @@ -20,23 +23,24 @@ export default class IssuableIndex { } static resetIncomingEmailToken() { - $('.incoming-email-token-reset').on('click', (e) => { + const $resetToken = $('.incoming-email-token-reset'); + + $resetToken.on('click', (e) => { e.preventDefault(); - $.ajax({ - type: 'PUT', - url: $('.incoming-email-token-reset').attr('href'), - dataType: 'json', - success(response) { - $('#issuable_email').val(response.new_address).focus(); - }, - beforeSend() { - $('.incoming-email-token-reset').text('resetting...'); - }, - complete() { - $('.incoming-email-token-reset').text('reset it'); - }, - }); + $resetToken.text('resetting...'); + + axios.put($resetToken.attr('href')) + .then(({ data }) => { + $('#issuable_email').val(data.new_address).focus(); + + $resetToken.text('reset it'); + }) + .catch(() => { + flash(__('There was an error when reseting email token.')); + + $resetToken.text('reset it'); + }); }); } } diff --git a/app/assets/javascripts/label_manager.js b/app/assets/javascripts/label_manager.js index ac2f636df0f..61b40f79db1 100644 --- a/app/assets/javascripts/label_manager.js +++ b/app/assets/javascripts/label_manager.js @@ -1,7 +1,8 @@ /* eslint-disable comma-dangle, class-methods-use-this, no-underscore-dangle, no-param-reassign, no-unused-vars, consistent-return, func-names, space-before-function-paren, max-len */ import Sortable from 'vendor/Sortable'; -import Flash from './flash'; +import flash from './flash'; +import axios from './lib/utils/axios_utils'; export default class LabelManager { constructor({ togglePriorityButton, prioritizedLabels, otherLabels } = {}) { @@ -50,11 +51,12 @@ export default class LabelManager { if (persistState == null) { persistState = true; } - let xhr; const _this = this; const url = $label.find('.js-toggle-priority').data('url'); let $target = this.prioritizedLabels; let $from = this.otherLabels; + const rollbackLabelPosition = this.rollbackLabelPosition.bind(this, $label, action); + if (action === 'remove') { $target = this.otherLabels; $from = this.prioritizedLabels; @@ -71,40 +73,34 @@ export default class LabelManager { return; } if (action === 'remove') { - xhr = $.ajax({ - url, - type: 'DELETE' - }); + axios.delete(url) + .catch(rollbackLabelPosition); + // Restore empty message if (!$from.find('li').length) { $from.find('.empty-message').removeClass('hidden'); } } else { - xhr = this.savePrioritySort($label, action); + this.savePrioritySort($label, action) + .catch(rollbackLabelPosition); } - return xhr.fail(this.rollbackLabelPosition.bind(this, $label, action)); } onPrioritySortUpdate() { - const xhr = this.savePrioritySort(); - return xhr.fail(function() { - return new Flash(this.errorMessage, 'alert'); - }); + this.savePrioritySort() + .catch(() => flash(this.errorMessage)); } savePrioritySort() { - return $.post({ - url: this.prioritizedLabels.data('url'), - data: { - label_ids: this.getSortedLabelsIds() - } + return axios.post(this.prioritizedLabels.data('url'), { + label_ids: this.getSortedLabelsIds(), }); } rollbackLabelPosition($label, originalAction) { const action = originalAction === 'remove' ? 'add' : 'remove'; this.toggleLabelPriority($label, action, false); - return new Flash(this.errorMessage, 'alert'); + flash(this.errorMessage); } getSortedLabelsIds() { diff --git a/app/assets/javascripts/lib/utils/common_utils.js b/app/assets/javascripts/lib/utils/common_utils.js index 03918762842..8018ec411c1 100644 --- a/app/assets/javascripts/lib/utils/common_utils.js +++ b/app/assets/javascripts/lib/utils/common_utils.js @@ -1,4 +1,5 @@ import { getLocationHash } from './url_utility'; +import axios from './axios_utils'; export const getPagePath = (index = 0) => $('body').attr('data-page').split(':')[index]; @@ -382,22 +383,16 @@ export const resetFavicon = () => { } }; -export const setCiStatusFavicon = (pageUrl) => { - $.ajax({ - url: pageUrl, - dataType: 'json', - success: (data) => { +export const setCiStatusFavicon = pageUrl => + axios.get(pageUrl) + .then(({ data }) => { if (data && data.favicon) { setFavicon(data.favicon); } else { resetFavicon(); } - }, - error: () => { - resetFavicon(); - }, - }); -}; + }) + .catch(resetFavicon); export const spriteIcon = (icon, className = '') => { const classAttribute = className.length > 0 ? `class="${className}"` : ''; diff --git a/app/assets/javascripts/lib/utils/users_cache.js b/app/assets/javascripts/lib/utils/users_cache.js index 88f8a622c00..b01ec6b81a3 100644 --- a/app/assets/javascripts/lib/utils/users_cache.js +++ b/app/assets/javascripts/lib/utils/users_cache.js @@ -8,16 +8,16 @@ class UsersCache extends Cache { } return Api.users('', { username }) - .then((users) => { - if (!users.length) { + .then(({ data }) => { + if (!data.length) { throw new Error(`User "${username}" could not be found!`); } - if (users.length > 1) { + if (data.length > 1) { throw new Error(`Expected username "${username}" to be unique!`); } - const user = users[0]; + const user = data[0]; this.internalStorage[username] = user; return user; }); diff --git a/app/assets/javascripts/mini_pipeline_graph_dropdown.js b/app/assets/javascripts/mini_pipeline_graph_dropdown.js index ca3d271663b..c7bccd483ac 100644 --- a/app/assets/javascripts/mini_pipeline_graph_dropdown.js +++ b/app/assets/javascripts/mini_pipeline_graph_dropdown.js @@ -1,5 +1,6 @@ /* eslint-disable no-new */ -import Flash from './flash'; +import flash from './flash'; +import axios from './lib/utils/axios_utils'; /** * In each pipelines table we have a mini pipeline graph for each pipeline. @@ -78,27 +79,22 @@ export default class MiniPipelineGraph { const button = e.relatedTarget; const endpoint = button.dataset.stageEndpoint; - return $.ajax({ - dataType: 'json', - type: 'GET', - url: endpoint, - beforeSend: () => { - this.renderBuildsList(button, ''); - this.toggleLoading(button); - }, - success: (data) => { + this.renderBuildsList(button, ''); + this.toggleLoading(button); + + axios.get(endpoint) + .then(({ data }) => { this.toggleLoading(button); this.renderBuildsList(button, data.html); this.stopDropdownClickPropagation(); - }, - error: () => { + }) + .catch(() => { this.toggleLoading(button); if ($(button).parent().hasClass('open')) { $(button).dropdown('toggle'); } - new Flash('An error occurred while fetching the builds.', 'alert'); - }, - }); + flash('An error occurred while fetching the builds.', 'alert'); + }); } /** diff --git a/app/assets/javascripts/monitoring/components/dashboard.vue b/app/assets/javascripts/monitoring/components/dashboard.vue index 025e38ea99a..5afae93724b 100644 --- a/app/assets/javascripts/monitoring/components/dashboard.vue +++ b/app/assets/javascripts/monitoring/components/dashboard.vue @@ -76,7 +76,13 @@ .then(data => this.store.storeDeploymentData(data)) .catch(() => new Flash('Error getting deployment information.')), ]) - .then(() => { this.showEmptyState = false; }) + .then(() => { + if (this.store.groups.length < 1) { + this.state = 'noData'; + return; + } + this.showEmptyState = false; + }) .catch(() => { this.state = 'unableToConnect'; }); }, diff --git a/app/assets/javascripts/monitoring/components/empty_state.vue b/app/assets/javascripts/monitoring/components/empty_state.vue index 87d1975d5ad..56cd60c583b 100644 --- a/app/assets/javascripts/monitoring/components/empty_state.vue +++ b/app/assets/javascripts/monitoring/components/empty_state.vue @@ -34,16 +34,23 @@ svgUrl: this.emptyGettingStartedSvgPath, title: 'Get started with performance monitoring', description: `Stay updated about the performance and health -of your environment by configuring Prometheus to monitor your deployments.`, + of your environment by configuring Prometheus to monitor your deployments.`, buttonText: 'Configure Prometheus', }, loading: { svgUrl: this.emptyLoadingSvgPath, title: 'Waiting for performance data', description: `Creating graphs uses the data from the Prometheus server. -If this takes a long time, ensure that data is available.`, + If this takes a long time, ensure that data is available.`, buttonText: 'View documentation', }, + noData: { + svgUrl: this.emptyUnableToConnectSvgPath, + title: 'No data found', + description: `You are connected to the Prometheus server, but there is currently + no data to display.`, + buttonText: 'Configure Prometheus', + }, unableToConnect: { svgUrl: this.emptyUnableToConnectSvgPath, title: 'Unable to connect to Prometheus server', diff --git a/app/assets/javascripts/network/branch_graph.js b/app/assets/javascripts/network/branch_graph.js index 5aad3908eb6..d3edcb724f1 100644 --- a/app/assets/javascripts/network/branch_graph.js +++ b/app/assets/javascripts/network/branch_graph.js @@ -1,5 +1,8 @@ /* eslint-disable func-names, space-before-function-paren, no-var, wrap-iife, quotes, comma-dangle, one-var, one-var-declaration-per-line, no-mixed-operators, no-loop-func, no-floating-decimal, consistent-return, no-unused-vars, prefer-template, prefer-arrow-callback, camelcase, max-len */ +import { __ } from '../locale'; +import axios from '../lib/utils/axios_utils'; +import flash from '../flash'; import Raphael from './raphael'; export default (function() { @@ -26,16 +29,13 @@ export default (function() { } BranchGraph.prototype.load = function() { - return $.ajax({ - url: this.options.url, - method: "get", - dataType: "json", - success: $.proxy(function(data) { + axios.get(this.options.url) + .then(({ data }) => { $(".loading", this.element).hide(); this.prepareData(data.days, data.commits); - return this.buildGraph(); - }, this) - }); + this.buildGraph(); + }) + .catch(() => __('Error fetching network graph.')); }; BranchGraph.prototype.prepareData = function(days, commits) { diff --git a/app/assets/javascripts/notes.js b/app/assets/javascripts/notes.js index a2b8e6f6495..bcb342f407f 100644 --- a/app/assets/javascripts/notes.js +++ b/app/assets/javascripts/notes.js @@ -16,6 +16,7 @@ import Autosize from 'autosize'; import 'vendor/jquery.caret'; // required by jquery.atwho import 'vendor/jquery.atwho'; import AjaxCache from '~/lib/utils/ajax_cache'; +import axios from './lib/utils/axios_utils'; import { getLocationHash } from './lib/utils/url_utility'; import Flash from './flash'; import CommentTypeToggle from './comment_type_toggle'; @@ -252,26 +253,20 @@ export default class Notes { return; } this.refreshing = true; - return $.ajax({ - url: this.notes_url, - headers: { 'X-Last-Fetched-At': this.last_fetched_at }, - dataType: 'json', - success: (function(_this) { - return function(data) { - var notes; - notes = data.notes; - _this.last_fetched_at = data.last_fetched_at; - _this.setPollingInterval(data.notes.length); - return $.each(notes, function(i, note) { - _this.renderNote(note); - }); - }; - })(this) - }).always((function(_this) { - return function() { - return _this.refreshing = false; - }; - })(this)); + axios.get(this.notes_url, { + headers: { + 'X-Last-Fetched-At': this.last_fetched_at, + }, + }).then(({ data }) => { + const notes = data.notes; + this.last_fetched_at = data.last_fetched_at; + this.setPollingInterval(data.notes.length); + $.each(notes, (i, note) => this.renderNote(note)); + + this.refreshing = false; + }).catch(() => { + this.refreshing = false; + }); } /** diff --git a/app/assets/javascripts/notifications_form.js b/app/assets/javascripts/notifications_form.js index 4534360d577..4e0afe13590 100644 --- a/app/assets/javascripts/notifications_form.js +++ b/app/assets/javascripts/notifications_form.js @@ -1,3 +1,7 @@ +import { __ } from './locale'; +import axios from './lib/utils/axios_utils'; +import flash from './flash'; + export default class NotificationsForm { constructor() { this.toggleCheckbox = this.toggleCheckbox.bind(this); @@ -27,24 +31,20 @@ export default class NotificationsForm { saveEvent($checkbox, $parent) { const form = $parent.parents('form:first'); - return $.ajax({ - url: form.attr('action'), - method: form.attr('method'), - dataType: 'json', - data: form.serialize(), - beforeSend: () => { - this.showCheckboxLoadingSpinner($parent); - }, - }).done((data) => { - $checkbox.enable(); - if (data.saved) { - $parent.find('.custom-notification-event-loading').toggleClass('fa-spin fa-spinner fa-check is-done'); - setTimeout(() => { - $parent.removeClass('is-loading') - .find('.custom-notification-event-loading') - .toggleClass('fa-spin fa-spinner fa-check is-done'); - }, 2000); - } - }); + this.showCheckboxLoadingSpinner($parent); + + axios[form.attr('method')](form.attr('action'), form.serialize()) + .then(({ data }) => { + $checkbox.enable(); + if (data.saved) { + $parent.find('.custom-notification-event-loading').toggleClass('fa-spin fa-spinner fa-check is-done'); + setTimeout(() => { + $parent.removeClass('is-loading') + .find('.custom-notification-event-loading') + .toggleClass('fa-spin fa-spinner fa-check is-done'); + }, 2000); + } + }) + .catch(() => flash(__('There was an error saving your notification settings.'))); } } diff --git a/app/assets/javascripts/pager.js b/app/assets/javascripts/pager.js index 6552a88b606..fd3105b1960 100644 --- a/app/assets/javascripts/pager.js +++ b/app/assets/javascripts/pager.js @@ -1,4 +1,5 @@ import { getParameterByName } from '~/lib/utils/common_utils'; +import axios from './lib/utils/axios_utils'; import { removeParams } from './lib/utils/url_utility'; const ENDLESS_SCROLL_BOTTOM_PX = 400; @@ -22,24 +23,22 @@ export default { getOld() { this.loading.show(); - $.ajax({ - type: 'GET', - url: this.url, - data: `limit=${this.limit}&offset=${this.offset}`, - dataType: 'json', - error: () => this.loading.hide(), - success: (data) => { - this.append(data.count, this.prepareData(data.html)); - this.callback(); - - // keep loading until we've filled the viewport height - if (!this.disable && !this.isScrollable()) { - this.getOld(); - } else { - this.loading.hide(); - } + axios.get(this.url, { + params: { + limit: this.limit, + offset: this.offset, }, - }); + }).then(({ data }) => { + this.append(data.count, this.prepareData(data.html)); + this.callback(); + + // keep loading until we've filled the viewport height + if (!this.disable && !this.isScrollable()) { + this.getOld(); + } else { + this.loading.hide(); + } + }).catch(() => this.loading.hide()); }, append(count, html) { diff --git a/app/assets/javascripts/pages/admin/cohorts/usage_ping.js b/app/assets/javascripts/pages/admin/cohorts/usage_ping.js index 2389056bd02..914a9661c27 100644 --- a/app/assets/javascripts/pages/admin/cohorts/usage_ping.js +++ b/app/assets/javascripts/pages/admin/cohorts/usage_ping.js @@ -1,12 +1,13 @@ +import axios from '../../../lib/utils/axios_utils'; +import { __ } from '../../../locale'; +import flash from '../../../flash'; + export default function UsagePing() { - const usageDataUrl = $('.usage-data').data('endpoint'); + const el = document.querySelector('.usage-data'); - $.ajax({ - type: 'GET', - url: usageDataUrl, - dataType: 'html', - success(html) { - $('.usage-data').html(html); - }, - }); + axios.get(el.dataset.endpoint, { + responseType: 'text', + }).then(({ data }) => { + el.innerHTML = data; + }).catch(() => flash(__('Error fetching usage ping data.'))); } diff --git a/app/assets/javascripts/pages/dashboard/todos/index/todos.js b/app/assets/javascripts/pages/dashboard/todos/index/todos.js index e976a3d2f1d..b3f6a72fdcb 100644 --- a/app/assets/javascripts/pages/dashboard/todos/index/todos.js +++ b/app/assets/javascripts/pages/dashboard/todos/index/todos.js @@ -2,6 +2,9 @@ import { visitUrl } from '~/lib/utils/url_utility'; import UsersSelect from '~/users_select'; import { isMetaClick } from '~/lib/utils/common_utils'; +import { __ } from '../../../../locale'; +import flash from '../../../../flash'; +import axios from '../../../../lib/utils/axios_utils'; export default class Todos { constructor() { @@ -59,18 +62,12 @@ export default class Todos { const target = e.target; target.setAttribute('disabled', true); target.classList.add('disabled'); - $.ajax({ - type: 'POST', - url: target.dataset.href, - dataType: 'json', - data: { - '_method': target.dataset.method, - }, - success: (data) => { + + axios[target.dataset.method](target.dataset.href) + .then(({ data }) => { this.updateRowState(target); - return this.updateBadges(data); - }, - }); + this.updateBadges(data); + }).catch(() => flash(__('Error updating todo status.'))); } updateRowState(target) { @@ -98,19 +95,15 @@ export default class Todos { e.preventDefault(); const target = e.currentTarget; - const requestData = { '_method': target.dataset.method, ids: this.todo_ids }; target.setAttribute('disabled', true); target.classList.add('disabled'); - $.ajax({ - type: 'POST', - url: target.dataset.href, - dataType: 'json', - data: requestData, - success: (data) => { - this.updateAllState(target, data); - return this.updateBadges(data); - }, - }); + + axios[target.dataset.method](target.dataset.href, { + ids: this.todo_ids, + }).then(({ data }) => { + this.updateAllState(target, data); + this.updateBadges(data); + }).catch(() => flash(__('Error updating status for all todos.'))); } updateAllState(target, data) { diff --git a/app/assets/javascripts/pages/projects/project.js b/app/assets/javascripts/pages/projects/project.js index e30d558726b..863dac0d20e 100644 --- a/app/assets/javascripts/pages/projects/project.js +++ b/app/assets/javascripts/pages/projects/project.js @@ -1,7 +1,10 @@ /* eslint-disable func-names, space-before-function-paren, no-var, consistent-return, no-new, prefer-arrow-callback, no-return-assign, one-var, one-var-declaration-per-line, object-shorthand, no-else-return, newline-per-chained-call, no-shadow, vars-on-top, prefer-template, max-len */ import Cookies from 'js-cookie'; -import { visitUrl } from '../../lib/utils/url_utility'; +import { __ } from '~/locale'; +import { visitUrl } from '~/lib/utils/url_utility'; +import axios from '~/lib/utils/axios_utils'; +import flash from '~/flash'; import projectSelect from '../../project_select'; export default class Project { @@ -67,17 +70,15 @@ export default class Project { $dropdown = $(this); selected = $dropdown.data('selected'); return $dropdown.glDropdown({ - data: function(term, callback) { - return $.ajax({ - url: $dropdown.data('refs-url'), - data: { + data(term, callback) { + axios.get($dropdown.data('refs-url'), { + params: { ref: $dropdown.data('ref'), search: term, }, - dataType: 'json', - }).done(function(refs) { - return callback(refs); - }); + }) + .then(({ data }) => callback(data)) + .catch(() => flash(__('An error occurred while getting projects'))); }, selectable: true, filterable: true, diff --git a/app/assets/javascripts/pages/projects/tree/show/index.js b/app/assets/javascripts/pages/projects/tree/show/index.js index 28a0160f47d..c4b3356e478 100644 --- a/app/assets/javascripts/pages/projects/tree/show/index.js +++ b/app/assets/javascripts/pages/projects/tree/show/index.js @@ -1,3 +1,5 @@ +import Vue from 'vue'; +import commitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue'; import TreeView from '../../../../tree'; import ShortcutsNavigation from '../../../../shortcuts_navigation'; import BlobViewer from '../../../../blob/viewer'; @@ -11,5 +13,25 @@ export default () => { new NewCommitForm($('.js-create-dir-form')); // eslint-disable-line no-new $('#tree-slider').waitForImages(() => ajaxGet(document.querySelector('.js-tree-content').dataset.logsPath)); + + const commitPipelineStatusEl = document.getElementById('commit-pipeline-status'); + const statusLink = document.querySelector('.commit-actions .ci-status-link'); + if (statusLink != null) { + statusLink.remove(); + // eslint-disable-next-line no-new + new Vue({ + el: commitPipelineStatusEl, + components: { + commitPipelineStatus, + }, + render(createElement) { + return createElement('commit-pipeline-status', { + props: { + endpoint: commitPipelineStatusEl.dataset.endpoint, + }, + }); + }, + }); + } }; diff --git a/app/assets/javascripts/project_find_file.js b/app/assets/javascripts/project_find_file.js index 0da32b4a3cc..586d188350f 100644 --- a/app/assets/javascripts/project_find_file.js +++ b/app/assets/javascripts/project_find_file.js @@ -1,6 +1,9 @@ /* eslint-disable func-names, space-before-function-paren, no-var, prefer-rest-params, wrap-iife, quotes, consistent-return, one-var, one-var-declaration-per-line, no-cond-assign, max-len, object-shorthand, no-param-reassign, comma-dangle, prefer-template, no-unused-vars, no-return-assign */ import fuzzaldrinPlus from 'fuzzaldrin-plus'; +import axios from '~/lib/utils/axios_utils'; +import flash from '~/flash'; +import { __ } from '~/locale'; // highlight text(awefwbwgtc -> <b>a</b>wefw<b>b</b>wgt<b>c</b> ) const highlighter = function(element, text, matches) { @@ -72,19 +75,14 @@ export default class ProjectFindFile { // files pathes load load(url) { - return $.ajax({ - url: url, - method: "get", - dataType: "json", - success: (function(_this) { - return function(data) { - _this.element.find(".loading").hide(); - _this.filePaths = data; - _this.findFile(); - return _this.element.find(".files-slider tr.tree-item").eq(0).addClass("selected").focus(); - }; - })(this) - }); + axios.get(url) + .then(({ data }) => { + this.element.find('.loading').hide(); + this.filePaths = data; + this.findFile(); + this.element.find('.files-slider tr.tree-item').eq(0).addClass('selected').focus(); + }) + .catch(() => flash(__('An error occurred while loading filenames'))); } // render result diff --git a/app/assets/javascripts/projects/tree/components/commit_pipeline_status_component.vue b/app/assets/javascripts/projects/tree/components/commit_pipeline_status_component.vue new file mode 100644 index 00000000000..63f20a0041d --- /dev/null +++ b/app/assets/javascripts/projects/tree/components/commit_pipeline_status_component.vue @@ -0,0 +1,120 @@ +<script> + import Visibility from 'visibilityjs'; + import ciIcon from '~/vue_shared/components/ci_icon.vue'; + import loadingIcon from '~/vue_shared/components/loading_icon.vue'; + import Poll from '~/lib/utils/poll'; + import Flash from '~/flash'; + import { s__, sprintf } from '~/locale'; + import tooltip from '~/vue_shared/directives/tooltip'; + import CommitPipelineService from '../services/commit_pipeline_service'; + + export default { + directives: { + tooltip, + }, + components: { + ciIcon, + loadingIcon, + }, + props: { + endpoint: { + type: String, + required: true, + }, + /* This prop can be used to replace some of the `render_commit_status` + used across GitLab, this way we could use this vue component and add a + realtime status where it makes sense + realtime: { + type: Boolean, + required: false, + default: true, + }, */ + }, + data() { + return { + ciStatus: {}, + isLoading: true, + }; + }, + computed: { + statusTitle() { + return sprintf(s__('Commits|Commit: %{commitText}'), { commitText: this.ciStatus.text }); + }, + }, + mounted() { + this.service = new CommitPipelineService(this.endpoint); + this.initPolling(); + }, + methods: { + successCallback(res) { + const pipelines = res.data.pipelines; + if (pipelines.length > 0) { + // The pipeline entity always keeps the latest pipeline info on the `details.status` + this.ciStatus = pipelines[0].details.status; + } + this.isLoading = false; + }, + errorCallback() { + this.ciStatus = { + text: 'not found', + icon: 'status_notfound', + group: 'notfound', + }; + this.isLoading = false; + Flash(s__('Something went wrong on our end')); + }, + initPolling() { + this.poll = new Poll({ + resource: this.service, + method: 'fetchData', + successCallback: response => this.successCallback(response), + errorCallback: this.errorCallback, + }); + + if (!Visibility.hidden()) { + this.isLoading = true; + this.poll.makeRequest(); + } else { + this.fetchPipelineCommitData(); + } + + Visibility.change(() => { + if (!Visibility.hidden()) { + this.poll.restart(); + } else { + this.poll.stop(); + } + }); + }, + fetchPipelineCommitData() { + this.service.fetchData() + .then(this.successCallback) + .catch(this.errorCallback); + }, + }, + destroy() { + this.poll.stop(); + }, + }; +</script> +<template> + <div> + <loading-icon + label="Loading pipeline status" + size="3" + v-if="isLoading" + /> + <a + v-else + :href="ciStatus.details_path" + > + <ci-icon + v-tooltip + :title="statusTitle" + :aria-label="statusTitle" + data-container="body" + :status="ciStatus" + /> + </a> + </div> +</template> diff --git a/app/assets/javascripts/projects/tree/services/commit_pipeline_service.js b/app/assets/javascripts/projects/tree/services/commit_pipeline_service.js new file mode 100644 index 00000000000..4b4189bc2de --- /dev/null +++ b/app/assets/javascripts/projects/tree/services/commit_pipeline_service.js @@ -0,0 +1,11 @@ +import axios from '~/lib/utils/axios_utils'; + +export default class CommitPipelineService { + constructor(endpoint) { + this.endpoint = endpoint; + } + + fetchData() { + return axios.get(this.endpoint); + } +} diff --git a/app/assets/javascripts/render_math.js b/app/assets/javascripts/render_math.js index 15205d8a4e2..73b6aafdd12 100644 --- a/app/assets/javascripts/render_math.js +++ b/app/assets/javascripts/render_math.js @@ -7,7 +7,12 @@ // // <code class="js-render-math"></div> // - // Only load once + +import { __ } from './locale'; +import axios from './lib/utils/axios_utils'; +import flash from './flash'; + +// Only load once let katexLoaded = false; // Loop over all math elements and render math @@ -33,19 +38,26 @@ export default function renderMath($els) { if (katexLoaded) { renderWithKaTeX($els); } else { - $.get(gon.katex_css_url, () => { - const css = $('<link>', { - rel: 'stylesheet', - type: 'text/css', - href: gon.katex_css_url, - }); - css.appendTo('head'); - - // Load KaTeX js - $.getScript(gon.katex_js_url, () => { + axios.get(gon.katex_css_url) + .then(() => { + const css = $('<link>', { + rel: 'stylesheet', + type: 'text/css', + href: gon.katex_css_url, + }); + css.appendTo('head'); + }) + .then(() => axios.get(gon.katex_js_url, { + responseType: 'text', + })) + .then(({ data }) => { + // Add katex js to our document + $.globalEval(data); + }) + .then(() => { katexLoaded = true; renderWithKaTeX($els); // Run KaTeX - }); - }); + }) + .catch(() => flash(__('An error occurred while rendering KaTeX'))); } } diff --git a/app/assets/javascripts/sidebar/components/assignees/assignees.js b/app/assets/javascripts/sidebar/components/assignees/assignees.js index 7e5feac622c..643877b9d47 100644 --- a/app/assets/javascripts/sidebar/components/assignees/assignees.js +++ b/app/assets/javascripts/sidebar/components/assignees/assignees.js @@ -84,7 +84,7 @@ export default { return !this.showLess || (index < this.defaultRenderCount && this.showLess); }, avatarUrl(user) { - return user.avatar || user.avatar_url; + return user.avatar || user.avatar_url || gon.default_avatar_url; }, assigneeUrl(user) { return `${this.rootPath}${user.username}`; diff --git a/app/assets/javascripts/single_file_diff.js b/app/assets/javascripts/single_file_diff.js index 95e51bc4e7a..48dd91bdf06 100644 --- a/app/assets/javascripts/single_file_diff.js +++ b/app/assets/javascripts/single_file_diff.js @@ -1,5 +1,8 @@ /* eslint-disable func-names, prefer-arrow-callback, space-before-function-paren, no-var, prefer-rest-params, wrap-iife, one-var, one-var-declaration-per-line, consistent-return, no-param-reassign, max-len */ +import { __ } from './locale'; +import axios from './lib/utils/axios_utils'; +import createFlash from './flash'; import FilesCommentButton from './files_comment_button'; import imageDiffHelper from './image_diff/helpers/index'; import syntaxHighlight from './syntax_highlight'; @@ -60,30 +63,31 @@ export default class SingleFileDiff { getContentHTML(cb) { this.collapsedContent.hide(); this.loadingContent.show(); - $.get(this.diffForPath, (function(_this) { - return function(data) { - _this.loadingContent.hide(); + + axios.get(this.diffForPath) + .then(({ data }) => { + this.loadingContent.hide(); if (data.html) { - _this.content = $(data.html); - syntaxHighlight(_this.content); + this.content = $(data.html); + syntaxHighlight(this.content); } else { - _this.hasError = true; - _this.content = $(ERROR_HTML); + this.hasError = true; + this.content = $(ERROR_HTML); } - _this.collapsedContent.after(_this.content); + this.collapsedContent.after(this.content); if (typeof gl.diffNotesCompileComponents !== 'undefined') { gl.diffNotesCompileComponents(); } - const $file = $(_this.file); + const $file = $(this.file); FilesCommentButton.init($file); const canCreateNote = $file.closest('.files').is('[data-can-create-note]'); imageDiffHelper.initImageDiff($file[0], canCreateNote); if (cb) cb(); - }; - })(this)); + }) + .catch(createFlash(__('An error occurred while retrieving diff'))); } } diff --git a/app/assets/javascripts/toggle_buttons.js b/app/assets/javascripts/toggle_buttons.js new file mode 100644 index 00000000000..974dc3ee052 --- /dev/null +++ b/app/assets/javascripts/toggle_buttons.js @@ -0,0 +1,61 @@ +import $ from 'jquery'; +import Flash from './flash'; +import { __ } from './locale'; +import { convertPermissionToBoolean } from './lib/utils/common_utils'; + +/* + example HAML: + ``` + %button.js-project-feature-toggle.project-feature-toggle{ type: "button", + class: "#{'is-checked' if enabled?}", + 'aria-label': _('Toggle Cluster') } + %input{ type: "hidden", class: 'js-project-feature-toggle-input', value: enabled? } + ``` +*/ + +function updatetoggle(toggle, isOn) { + toggle.classList.toggle('is-checked', isOn); +} + +function onToggleClicked(toggle, input, clickCallback) { + const previousIsOn = convertPermissionToBoolean(input.value); + + // Visually change the toggle and start loading + updatetoggle(toggle, !previousIsOn); + toggle.setAttribute('disabled', true); + toggle.classList.toggle('is-loading', true); + + Promise.resolve(clickCallback(!previousIsOn, toggle)) + .then(() => { + // Actually change the input value + input.setAttribute('value', !previousIsOn); + }) + .catch(() => { + // Revert the visuals if something goes wrong + updatetoggle(toggle, previousIsOn); + }) + .then(() => { + // Remove the loading indicator in any case + toggle.removeAttribute('disabled'); + toggle.classList.toggle('is-loading', false); + + $(input).trigger('trigger-change'); + }) + .catch(() => { + Flash(__('Something went wrong when toggling the button')); + }); +} + +export default function setupToggleButtons(container, clickCallback = () => {}) { + const toggles = container.querySelectorAll('.js-project-feature-toggle'); + + toggles.forEach((toggle) => { + const input = toggle.querySelector('.js-project-feature-toggle-input'); + const isOn = convertPermissionToBoolean(input.value); + + // Get the visible toggle in sync with the hidden input + updatetoggle(toggle, isOn); + + toggle.addEventListener('click', onToggleClicked.bind(null, toggle, input, clickCallback)); + }); +} diff --git a/app/assets/javascripts/users/activity_calendar.js b/app/assets/javascripts/users/activity_calendar.js index 0581239d5a5..57306322aa4 100644 --- a/app/assets/javascripts/users/activity_calendar.js +++ b/app/assets/javascripts/users/activity_calendar.js @@ -1,7 +1,10 @@ import _ from 'underscore'; import { scaleLinear, scaleThreshold } from 'd3-scale'; import { select } from 'd3-selection'; -import { getDayName, getDayDifference } from '../lib/utils/datetime_utility'; +import { getDayName, getDayDifference } from '~/lib/utils/datetime_utility'; +import axios from '~/lib/utils/axios_utils'; +import flash from '~/flash'; +import { __ } from '~/locale'; const d3 = { select, scaleLinear, scaleThreshold }; @@ -98,7 +101,7 @@ export default class ActivityCalendar { const secondLastColMonth = this.timestampsTmp[group - 2][0].date.getMonth(); if (lastColMonth !== secondLastColMonth) { - extraWidthPadding = 3; + extraWidthPadding = 6; } return extraWidthPadding; @@ -221,14 +224,16 @@ export default class ActivityCalendar { this.currentSelectedDate.getDate(), ].join('-'); - $.ajax({ - url: this.calendarActivitiesPath, - data: { date }, - cache: false, - dataType: 'html', - beforeSend: () => $('.user-calendar-activities').html(LOADING_HTML), - success: data => $('.user-calendar-activities').html(data), - }); + $('.user-calendar-activities').html(LOADING_HTML); + + axios.get(this.calendarActivitiesPath, { + params: { + date, + }, + responseType: 'text', + }) + .then(({ data }) => $('.user-calendar-activities').html(data)) + .catch(() => flash(__('An error occurred while retrieving calendar activity'))); } else { this.currentSelectedDate = ''; $('.user-calendar-activities').html(''); diff --git a/app/assets/javascripts/users_select.js b/app/assets/javascripts/users_select.js index f249bd036d6..ab108906732 100644 --- a/app/assets/javascripts/users_select.js +++ b/app/assets/javascripts/users_select.js @@ -492,7 +492,7 @@ function UsersSelect(currentUser, els, options = {}) { renderRow: function(user) { var avatar, img, listClosingTags, listWithName, listWithUserName, username; username = user.username ? "@" + user.username : ""; - avatar = user.avatar_url ? user.avatar_url : false; + avatar = user.avatar_url ? user.avatar_url : gon.default_avatar_url; let selected = false; @@ -513,9 +513,7 @@ function UsersSelect(currentUser, els, options = {}) { if (user.beforeDivider != null) { `<li><a href='#' class='${selected === true ? 'is-active' : ''}'>${_.escape(user.name)}</a></li>`; } else { - if (avatar) { - img = "<img src='" + avatar + "' class='avatar avatar-inline' width='32' />"; - } + img = "<img src='" + avatar + "' class='avatar avatar-inline' width='32' />"; } return ` diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author.js b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author.js deleted file mode 100644 index 982b5e8e373..00000000000 --- a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author.js +++ /dev/null @@ -1,28 +0,0 @@ -import tooltip from '../../vue_shared/directives/tooltip'; - -export default { - name: 'MRWidgetAuthor', - props: { - author: { type: Object, required: true }, - showAuthorName: { type: Boolean, required: false, default: true }, - showAuthorTooltip: { type: Boolean, required: false, default: false }, - }, - directives: { - tooltip, - }, - template: ` - <a - :href="author.webUrl || author.web_url" - class="author-link inline" - :v-tooltip="showAuthorTooltip" - :title="author.name"> - <img - :src="author.avatarUrl || author.avatar_url" - class="avatar avatar-inline s16" /> - <span - v-if="showAuthorName" - class="author">{{author.name}} - </span> - </a> - `, -}; diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author.vue new file mode 100644 index 00000000000..cb6e9858736 --- /dev/null +++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author.vue @@ -0,0 +1,53 @@ +<script> + import tooltip from '../../vue_shared/directives/tooltip'; + + export default { + name: 'MRWidgetAuthor', + directives: { + tooltip, + }, + props: { + author: { + type: Object, + required: true, + }, + showAuthorName: { + type: Boolean, + required: false, + default: true, + }, + showAuthorTooltip: { + type: Boolean, + required: false, + default: false, + }, + }, + computed: { + authorUrl() { + return this.author.webUrl || this.author.web_url; + }, + avatarUrl() { + return this.author.avatarUrl || this.author.avatar_url; + }, + }, + }; +</script> +<template> + <a + :href="authorUrl" + class="author-link inline" + :v-tooltip="showAuthorTooltip" + :title="author.name" + > + <img + :src="avatarUrl" + class="avatar avatar-inline s16" + /> + <span + class="author" + v-if="showAuthorName" + > + {{ author.name }} + </span> + </a> +</template> diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author_time.js b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author_time.js deleted file mode 100644 index 6d2ed5fda64..00000000000 --- a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author_time.js +++ /dev/null @@ -1,27 +0,0 @@ -import MRWidgetAuthor from './mr_widget_author'; - -export default { - name: 'MRWidgetAuthorTime', - props: { - actionText: { type: String, required: true }, - author: { type: Object, required: true }, - dateTitle: { type: String, required: true }, - dateReadable: { type: String, required: true }, - }, - components: { - 'mr-widget-author': MRWidgetAuthor, - }, - template: ` - <h4 class="js-mr-widget-author"> - {{actionText}} - <mr-widget-author :author="author" /> - <time - :title="dateTitle" - data-toggle="tooltip" - data-placement="top" - data-container="body"> - {{dateReadable}} - </time> - </h4> - `, -}; diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author_time.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author_time.vue new file mode 100644 index 00000000000..8f1fd809a81 --- /dev/null +++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_author_time.vue @@ -0,0 +1,42 @@ +<script> + import mrWidgetAuthor from './mr_widget_author.vue'; + + export default { + name: 'MRWidgetAuthorTime', + components: { + mrWidgetAuthor, + }, + props: { + actionText: { + type: String, + required: true, + }, + author: { + type: Object, + required: true, + }, + dateTitle: { + type: String, + required: true, + }, + dateReadable: { + type: String, + required: true, + }, + }, + }; +</script> +<template> + <h4 class="js-mr-widget-author"> + {{ actionText }} + <mr-widget-author :author="author" /> + <time + :title="dateTitle" + data-toggle="tooltip" + data-placement="top" + data-container="body" + > + {{ dateReadable }} + </time> + </h4> +</template> diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.js b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.js deleted file mode 100644 index 85bfd03a3cf..00000000000 --- a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.js +++ /dev/null @@ -1,116 +0,0 @@ -import tooltip from '../../vue_shared/directives/tooltip'; -import { pluralize } from '../../lib/utils/text_utility'; -import icon from '../../vue_shared/components/icon.vue'; - -export default { - name: 'MRWidgetHeader', - props: { - mr: { type: Object, required: true }, - }, - directives: { - tooltip, - }, - components: { - icon, - }, - computed: { - shouldShowCommitsBehindText() { - return this.mr.divergedCommitsCount > 0; - }, - commitsText() { - return pluralize('commit', this.mr.divergedCommitsCount); - }, - branchNameClipboardData() { - // This supports code in app/assets/javascripts/copy_to_clipboard.js that - // works around ClipboardJS limitations to allow the context-specific - // copy/pasting of plain text or GFM. - return JSON.stringify({ - text: this.mr.sourceBranch, - gfm: `\`${this.mr.sourceBranch}\``, - }); - }, - }, - methods: { - isBranchTitleLong(branchTitle) { - return branchTitle.length > 32; - }, - }, - template: ` - <div class="mr-source-target"> - <div class="normal"> - <strong> - Request to merge - <span - class="label-branch" - :class="{'label-truncated': isBranchTitleLong(mr.sourceBranch)}" - :title="isBranchTitleLong(mr.sourceBranch) ? mr.sourceBranch : ''" - data-placement="bottom" - :v-tooltip="isBranchTitleLong(mr.sourceBranch)" - v-html="mr.sourceBranchLink"></span> - <button - v-tooltip - class="btn btn-transparent btn-clipboard" - data-title="Copy branch name to clipboard" - :data-clipboard-text="branchNameClipboardData"> - <i - aria-hidden="true" - class="fa fa-clipboard"></i> - </button> - into - <span - class="label-branch" - :v-tooltip="isBranchTitleLong(mr.sourceBranch)" - :class="{'label-truncatedtooltip': isBranchTitleLong(mr.targetBranch)}" - :title="isBranchTitleLong(mr.targetBranch) ? mr.targetBranch : ''" - data-placement="bottom"> - <a :href="mr.targetBranchTreePath">{{mr.targetBranch}}</a> - </span> - </strong> - <span - v-if="shouldShowCommitsBehindText" - class="diverged-commits-count"> - (<a :href="mr.targetBranchPath">{{mr.divergedCommitsCount}} {{commitsText}} behind</a>) - </span> - </div> - <div v-if="mr.isOpen"> - <a - href="#modal_merge_info" - data-toggle="modal" - class="btn btn-sm inline"> - Check out branch - </a> - <span class="dropdown prepend-left-10"> - <a - class="btn btn-sm inline dropdown-toggle" - data-toggle="dropdown" - aria-label="Download as" - role="button"> - <icon - name="download"> - </icon> - <i - class="fa fa-caret-down" - aria-hidden="true"> - </i> - </a> - <ul class="dropdown-menu dropdown-menu-align-right"> - <li> - <a - :href="mr.emailPatchesPath" - download> - Email patches - </a> - </li> - <li> - <a - :href="mr.plainDiffPath" - download> - Plain diff - </a> - </li> - </ul> - </span> - </div> - </div> - `, -}; diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.vue new file mode 100644 index 00000000000..18a3787857d --- /dev/null +++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_header.vue @@ -0,0 +1,145 @@ +<script> + import tooltip from '~/vue_shared/directives/tooltip'; + import { n__ } from '~/locale'; + import icon from '~/vue_shared/components/icon.vue'; + import clipboardButton from '~/vue_shared/components/clipboard_button.vue'; + + export default { + name: 'MRWidgetHeader', + directives: { + tooltip, + }, + components: { + icon, + clipboardButton, + }, + props: { + mr: { + type: Object, + required: true, + }, + }, + computed: { + shouldShowCommitsBehindText() { + return this.mr.divergedCommitsCount > 0; + }, + commitsText() { + return n__('%d commit behind', '%d commits behind', this.mr.divergedCommitsCount); + }, + branchNameClipboardData() { + // This supports code in app/assets/javascripts/copy_to_clipboard.js that + // works around ClipboardJS limitations to allow the context-specific + // copy/pasting of plain text or GFM. + return JSON.stringify({ + text: this.mr.sourceBranch, + gfm: `\`${this.mr.sourceBranch}\``, + }); + }, + isSourceBranchLong() { + return this.isBranchTitleLong(this.mr.sourceBranch); + }, + isTargetBranchLong() { + return this.isBranchTitleLong(this.mr.targetBranch); + }, + }, + methods: { + isBranchTitleLong(branchTitle) { + return branchTitle.length > 32; + }, + }, + }; +</script> +<template> + <div class="mr-source-target"> + <div class="normal"> + <strong> + {{ s__("mrWidget|Request to merge") }} + <span + class="label-branch js-source-branch" + :class="{ 'label-truncated': isSourceBranchLong }" + :title="isSourceBranchLong ? mr.sourceBranch : ''" + data-placement="bottom" + :v-tooltip="isSourceBranchLong" + v-html="mr.sourceBranchLink" + > + </span> + + <clipboard-button + :text="branchNameClipboardData" + :title="__('Copy branch name to clipboard')" + /> + + {{ s__("mrWidget|into") }} + + <span + class="label-branch" + :v-tooltip="isTargetBranchLong" + :class="{ 'label-truncatedtooltip': isTargetBranchLong }" + :title="isTargetBranchLong ? mr.targetBranch : ''" + data-placement="bottom" + > + <a + :href="mr.targetBranchTreePath" + class="js-target-branch" + > + {{ mr.targetBranch }} + </a> + </span> + </strong> + <span + v-if="shouldShowCommitsBehindText" + class="diverged-commits-count" + > + (<a :href="mr.targetBranchPath">{{ commitsText }}</a>) + </span> + </div> + + <div v-if="mr.isOpen"> + <button + data-target="#modal_merge_info" + data-toggle="modal" + :disabled="mr.sourceBranchRemoved" + class="btn btn-sm btn-default inline js-check-out-branch" + type="button" + > + {{ s__("mrWidget|Check out branch") }} + </button> + <span class="dropdown prepend-left-10"> + <button + type="button" + class="btn btn-sm inline dropdown-toggle" + data-toggle="dropdown" + aria-label="Download as" + aria-haspopup="true" + aria-expanded="false" + > + <icon name="download" /> + <i + class="fa fa-caret-down" + aria-hidden="true"> + </i> + </button> + <ul class="dropdown-menu dropdown-menu-align-right"> + <li> + <a + class="js-download-email-patches" + :href="mr.emailPatchesPath" + download + > + {{ s__("mrWidget|Email patches") }} + </a> + </li> + <li> + <a + class="js-download-plain-diff" + :href="mr.plainDiffPath" + download + > + {{ s__("mrWidget|Plain diff") }} + </a> + </li> + </ul> + </span> + </div> + </div> +</template> diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_merge_help.js b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_merge_help.js deleted file mode 100644 index 1d9f9863dd9..00000000000 --- a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_merge_help.js +++ /dev/null @@ -1,23 +0,0 @@ -export default { - name: 'MRWidgetMergeHelp', - props: { - missingBranch: { type: String, required: false, default: '' }, - }, - template: ` - <section class="mr-widget-help"> - <template - v-if="missingBranch"> - If the {{missingBranch}} branch exists in your local repository, you - </template> - <template v-else> - You - </template> - can merge this merge request manually using the - <a - data-toggle="modal" - href="#modal_merge_info"> - command line - </a> - </section> - `, -}; diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_merge_help.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_merge_help.vue new file mode 100644 index 00000000000..62b61e1f41f --- /dev/null +++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_merge_help.vue @@ -0,0 +1,41 @@ +<script> + import { sprintf, s__ } from '~/locale'; + + export default { + name: 'MRWidgetMergeHelp', + props: { + missingBranch: { + type: String, + required: false, + default: '', + }, + }, + computed: { + missingBranchInfo() { + return sprintf( + s__('mrWidget|If the %{branch} branch exists in your local repository, you can merge this merge request manually using the'), + { branch: this.missingBranch }, + ); + }, + }, + }; +</script> +<template> + <section class="mr-widget-help"> + <template v-if="missingBranch"> + {{ missingBranchInfo }} + </template> + <template v-else> + {{ s__("mrWidget|You can merge this merge request manually using the") }} + </template> + + <button + type="button" + class="btn-link btn-blank js-open-modal-help" + data-toggle="modal" + data-target="#modal_merge_info" + > + {{ s__("mrWidget|command line") }} + </button> + </section> +</template> diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_related_links.js b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_related_links.js deleted file mode 100644 index 563267ad044..00000000000 --- a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_related_links.js +++ /dev/null @@ -1,37 +0,0 @@ -export default { - name: 'MRWidgetRelatedLinks', - props: { - relatedLinks: { type: Object, required: true }, - state: { type: String, required: false }, - }, - computed: { - hasLinks() { - const { closing, mentioned, assignToMe } = this.relatedLinks; - return closing || mentioned || assignToMe; - }, - closesText() { - if (this.state === 'merged') { - return 'Closed'; - } - if (this.state === 'closed') { - return 'Did not close'; - } - return 'Closes'; - }, - }, - template: ` - <section - v-if="hasLinks" - class="mr-info-list mr-links"> - <p v-if="relatedLinks.closing"> - {{closesText}} <span v-html="relatedLinks.closing"></span> - </p> - <p v-if="relatedLinks.mentioned"> - Mentions <span v-html="relatedLinks.mentioned"></span> - </p> - <p v-if="relatedLinks.assignToMe"> - <span v-html="relatedLinks.assignToMe"></span> - </p> - </section> - `, -}; diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_related_links.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_related_links.vue new file mode 100644 index 00000000000..88d0fcd70f5 --- /dev/null +++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_related_links.vue @@ -0,0 +1,43 @@ +<script> + import { s__ } from '~/locale'; + + export default { + name: 'MRWidgetRelatedLinks', + props: { + relatedLinks: { + type: Object, + required: true, + default: () => ({}), + }, + state: { + type: String, + required: false, + default: '', + }, + }, + computed: { + closesText() { + if (this.state === 'merged') { + return s__('mrWidget|Closed'); + } + if (this.state === 'closed') { + return s__('mrWidget|Did not close'); + } + return s__('mrWidget|Closes'); + }, + }, + }; +</script> +<template> + <section class="mr-info-list mr-links"> + <p v-if="relatedLinks.closing"> + {{ closesText }} <span v-html="relatedLinks.closing"></span> + </p> + <p v-if="relatedLinks.mentioned"> + {{ s__("mrWidget|Mentions") }} <span v-html="relatedLinks.mentioned"></span> + </p> + <p v-if="relatedLinks.assignToMe"> + <span v-html="relatedLinks.assignToMe"></span> + </p> + </section> +</template> diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_closed.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_closed.vue index 71bfdaf801e..68b691fc914 100644 --- a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_closed.vue +++ b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_closed.vue @@ -1,5 +1,5 @@ <script> - import mrWidgetAuthorTime from '../../components/mr_widget_author_time'; + import mrWidgetAuthorTime from '../../components/mr_widget_author_time.vue'; import statusIcon from '../mr_widget_status_icon.vue'; export default { diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.js b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.js deleted file mode 100644 index 76b0235af1b..00000000000 --- a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.js +++ /dev/null @@ -1,78 +0,0 @@ -import statusIcon from '../mr_widget_status_icon.vue'; -import eventHub from '../../event_hub'; - -export default { - name: 'MRWidgetFailedToMerge', - props: { - mr: { type: Object, required: true }, - }, - data() { - return { - timer: 10, - isRefreshing: false, - }; - }, - mounted() { - setInterval(() => { - this.updateTimer(); - }, 1000); - }, - created() { - eventHub.$emit('DisablePolling'); - }, - computed: { - timerText() { - return this.timer > 1 ? `${this.timer} seconds` : 'a second'; - }, - }, - methods: { - refresh() { - this.isRefreshing = true; - eventHub.$emit('MRWidgetUpdateRequested'); - eventHub.$emit('EnablePolling'); - }, - updateTimer() { - this.timer = this.timer - 1; - - if (this.timer === 0) { - this.refresh(); - } - }, - }, - components: { - statusIcon, - }, - template: ` - <div class="mr-widget-body media"> - <template v-if="isRefreshing"> - <status-icon status="loading" /> - <span class="media-body bold js-refresh-label"> - Refreshing now - </span> - </template> - <template v-else> - <status-icon status="warning" :show-disabled-button="true" /> - <div class="media-body space-children"> - <span class="bold"> - <span - class="has-error-message" - v-if="mr.mergeError"> - {{mr.mergeError}}. - </span> - <span v-else>Merge failed.</span> - <span - :class="{ 'has-custom-error': mr.mergeError }"> - Refreshing in {{timerText}} to show the updated status... - </span> - </span> - <button - @click="refresh" - class="btn btn-default btn-xs js-refresh-button" - type="button"> - Refresh now - </button> - </div> - </template> - </div> - `, -}; diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.vue new file mode 100644 index 00000000000..602b68ea572 --- /dev/null +++ b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.vue @@ -0,0 +1,105 @@ +<script> + import { n__ } from '~/locale'; + import statusIcon from '../mr_widget_status_icon.vue'; + import eventHub from '../../event_hub'; + + export default { + name: 'MRWidgetFailedToMerge', + + components: { + statusIcon, + }, + + props: { + mr: { + type: Object, + required: true, + default: () => ({}), + }, + }, + + data() { + return { + timer: 10, + isRefreshing: false, + }; + }, + + computed: { + timerText() { + return n__( + 'Refreshing in a second to show the updated status...', + 'Refreshing in %d seconds to show the updated status...', + this.timer, + ); + }, + }, + + mounted() { + setInterval(() => { + this.updateTimer(); + }, 1000); + }, + + created() { + eventHub.$emit('DisablePolling'); + }, + + methods: { + refresh() { + this.isRefreshing = true; + eventHub.$emit('MRWidgetUpdateRequested'); + eventHub.$emit('EnablePolling'); + }, + updateTimer() { + this.timer = this.timer - 1; + + if (this.timer === 0) { + this.refresh(); + } + }, + }, + + }; +</script> +<template> + <div class="mr-widget-body media"> + <template v-if="isRefreshing"> + <status-icon status="loading" /> + <span class="media-body bold js-refresh-label"> + {{ s__("mrWidget|Refreshing now") }} + </span> + </template> + <template v-else> + <status-icon + status="warning" + :show-disabled-button="true" + /> + <div class="media-body space-children"> + <span class="bold"> + <span + class="has-error-message" + v-if="mr.mergeError" + > + {{ mr.mergeError }}. + </span> + <span v-else> + {{ s__("mrWidget|Merge failed.") }} + </span> + <span + :class="{ 'has-custom-error': mr.mergeError }" + > + {{ timerText }} + </span> + </span> + <button + @click="refresh" + class="btn btn-default btn-xs js-refresh-button" + type="button" + > + {{ s__("mrWidget|Refresh now") }} + </button> + </div> + </template> + </div> +</template> diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds.js b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds.js deleted file mode 100644 index 357485b9e78..00000000000 --- a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds.js +++ /dev/null @@ -1,124 +0,0 @@ -import Flash from '../../../flash'; -import statusIcon from '../mr_widget_status_icon.vue'; -import MRWidgetAuthor from '../../components/mr_widget_author'; -import eventHub from '../../event_hub'; - -export default { - name: 'MRWidgetMergeWhenPipelineSucceeds', - props: { - mr: { type: Object, required: true }, - service: { type: Object, required: true }, - }, - components: { - 'mr-widget-author': MRWidgetAuthor, - statusIcon, - }, - data() { - return { - isCancellingAutoMerge: false, - isRemovingSourceBranch: false, - }; - }, - computed: { - canRemoveSourceBranch() { - const { shouldRemoveSourceBranch, canRemoveSourceBranch, - mergeUserId, currentUserId } = this.mr; - - return !shouldRemoveSourceBranch && canRemoveSourceBranch && mergeUserId === currentUserId; - }, - }, - methods: { - cancelAutomaticMerge() { - this.isCancellingAutoMerge = true; - this.service.cancelAutomaticMerge() - .then(res => res.data) - .then((data) => { - eventHub.$emit('UpdateWidgetData', data); - }) - .catch(() => { - this.isCancellingAutoMerge = false; - new Flash('Something went wrong. Please try again.'); // eslint-disable-line - }); - }, - removeSourceBranch() { - const options = { - sha: this.mr.sha, - merge_when_pipeline_succeeds: true, - should_remove_source_branch: true, - }; - - this.isRemovingSourceBranch = true; - this.service.mergeResource.save(options) - .then(res => res.data) - .then((data) => { - if (data.status === 'merge_when_pipeline_succeeds') { - eventHub.$emit('MRWidgetUpdateRequested'); - } - }) - .catch(() => { - this.isRemovingSourceBranch = false; - new Flash('Something went wrong. Please try again.'); // eslint-disable-line - }); - }, - }, - template: ` - <div class="mr-widget-body media"> - <status-icon status="success" /> - <div class="media-body"> - <h4 class="flex-container-block"> - <span class="append-right-10"> - Set by - <mr-widget-author :author="mr.setToMWPSBy" /> - to be merged automatically when the pipeline succeeds - </span> - <a - v-if="mr.canCancelAutomaticMerge" - @click.prevent="cancelAutomaticMerge" - :disabled="isCancellingAutoMerge" - role="button" - href="#" - class="btn btn-xs btn-default js-cancel-auto-merge"> - <i - v-if="isCancellingAutoMerge" - class="fa fa-spinner fa-spin" - aria-hidden="true" /> - Cancel automatic merge - </a> - </h4> - <section class="mr-info-list"> - <p>The changes will be merged into - <a - :href="mr.targetBranchPath" - class="label-branch"> - {{mr.targetBranch}} - </a> - </p> - <p v-if="mr.shouldRemoveSourceBranch"> - The source branch will be removed - </p> - <p - v-else - class="flex-container-block" - > - <span class="append-right-10"> - The source branch will not be removed - </span> - <a - v-if="canRemoveSourceBranch" - :disabled="isRemovingSourceBranch" - @click.prevent="removeSourceBranch" - role="button" - class="btn btn-xs btn-default js-remove-source-branch" - href="#"> - <i - v-if="isRemovingSourceBranch" - class="fa fa-spinner fa-spin" - aria-hidden="true" /> - Remove source branch - </a> - </p> - </section> - </div> - </div> - `, -}; diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds.vue new file mode 100644 index 00000000000..de98a77be6f --- /dev/null +++ b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds.vue @@ -0,0 +1,147 @@ +<script> + import Flash from '../../../flash'; + import statusIcon from '../mr_widget_status_icon.vue'; + import mrWidgetAuthor from '../../components/mr_widget_author.vue'; + import eventHub from '../../event_hub'; + + export default { + name: 'MRWidgetMergeWhenPipelineSucceeds', + components: { + mrWidgetAuthor, + statusIcon, + }, + props: { + mr: { + type: Object, + required: true, + default: () => ({}), + }, + service: { + type: Object, + required: true, + default: () => ({}), + }, + }, + data() { + return { + isCancellingAutoMerge: false, + isRemovingSourceBranch: false, + }; + }, + computed: { + canRemoveSourceBranch() { + const { + shouldRemoveSourceBranch, + canRemoveSourceBranch, + mergeUserId, + currentUserId, + } = this.mr; + + return !shouldRemoveSourceBranch && + canRemoveSourceBranch && + mergeUserId === currentUserId; + }, + }, + methods: { + cancelAutomaticMerge() { + this.isCancellingAutoMerge = true; + this.service.cancelAutomaticMerge() + .then(res => res.data) + .then((data) => { + eventHub.$emit('UpdateWidgetData', data); + }) + .catch(() => { + this.isCancellingAutoMerge = false; + Flash('Something went wrong. Please try again.'); + }); + }, + removeSourceBranch() { + const options = { + sha: this.mr.sha, + merge_when_pipeline_succeeds: true, + should_remove_source_branch: true, + }; + + this.isRemovingSourceBranch = true; + this.service.mergeResource.save(options) + .then(res => res.data) + .then((data) => { + if (data.status === 'merge_when_pipeline_succeeds') { + eventHub.$emit('MRWidgetUpdateRequested'); + } + }) + .catch(() => { + this.isRemovingSourceBranch = false; + Flash('Something went wrong. Please try again.'); + }); + }, + }, + }; +</script> +<template> + <div class="mr-widget-body media"> + <status-icon status="success" /> + <div class="media-body"> + <h4 class="flex-container-block"> + <span class="append-right-10"> + {{ s__("mrWidget|Set by") }} + <mr-widget-author :author="mr.setToMWPSBy" /> + {{ s__("mrWidget|to be merged automatically when the pipeline succeeds") }} + </span> + <a + v-if="mr.canCancelAutomaticMerge" + @click.prevent="cancelAutomaticMerge" + :disabled="isCancellingAutoMerge" + role="button" + href="#" + class="btn btn-xs btn-default js-cancel-auto-merge"> + <i + v-if="isCancellingAutoMerge" + class="fa fa-spinner fa-spin" + aria-hidden="true" + > + </i> + {{ s__("mrWidget|Cancel automatic merge") }} + </a> + </h4> + <section class="mr-info-list"> + <p> + {{ s__("mrWidget|The changes will be merged into") }} + <a + :href="mr.targetBranchPath" + class="label-branch" + > + {{ mr.targetBranch }} + </a> + </p> + <p v-if="mr.shouldRemoveSourceBranch"> + {{ s__("mrWidget|The source branch will be removed") }} + </p> + <p + v-else + class="flex-container-block" + > + <span class="append-right-10"> + {{ s__("mrWidget|The source branch will not be removed") }} + </span> + <a + v-if="canRemoveSourceBranch" + :disabled="isRemovingSourceBranch" + @click.prevent="removeSourceBranch" + role="button" + class="btn btn-xs btn-default js-remove-source-branch" + href="#" + > + <i + v-if="isRemovingSourceBranch" + class="fa fa-spinner fa-spin" + aria-hidden="true" + > + </i> + {{ s__("mrWidget|Remove source branch") }} + </a> + </p> + </section> + </div> + </div> +</template> diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merged.js b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merged.js deleted file mode 100644 index 7f8d78cab73..00000000000 --- a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merged.js +++ /dev/null @@ -1,139 +0,0 @@ -import Flash from '../../../flash'; -import mrWidgetAuthorTime from '../../components/mr_widget_author_time'; -import tooltip from '../../../vue_shared/directives/tooltip'; -import loadingIcon from '../../../vue_shared/components/loading_icon.vue'; -import statusIcon from '../mr_widget_status_icon.vue'; -import eventHub from '../../event_hub'; - -export default { - name: 'MRWidgetMerged', - props: { - mr: { type: Object, required: true }, - service: { type: Object, required: true }, - }, - data() { - return { - isMakingRequest: false, - }; - }, - directives: { - tooltip, - }, - components: { - 'mr-widget-author-and-time': mrWidgetAuthorTime, - loadingIcon, - statusIcon, - }, - computed: { - shouldShowRemoveSourceBranch() { - const { sourceBranchRemoved, isRemovingSourceBranch, canRemoveSourceBranch } = this.mr; - - return !sourceBranchRemoved && canRemoveSourceBranch && - !this.isMakingRequest && !isRemovingSourceBranch; - }, - shouldShowSourceBranchRemoving() { - const { sourceBranchRemoved, isRemovingSourceBranch } = this.mr; - return !sourceBranchRemoved && (isRemovingSourceBranch || this.isMakingRequest); - }, - shouldShowMergedButtons() { - const { canRevertInCurrentMR, canCherryPickInCurrentMR, revertInForkPath, - cherryPickInForkPath } = this.mr; - - return canRevertInCurrentMR || canCherryPickInCurrentMR || - revertInForkPath || cherryPickInForkPath; - }, - }, - methods: { - removeSourceBranch() { - this.isMakingRequest = true; - this.service.removeSourceBranch() - .then(res => res.data) - .then((data) => { - if (data.message === 'Branch was removed') { - eventHub.$emit('MRWidgetUpdateRequested', () => { - this.isMakingRequest = false; - }); - } - }) - .catch(() => { - this.isMakingRequest = false; - new Flash('Something went wrong. Please try again.'); // eslint-disable-line - }); - }, - }, - template: ` - <div class="mr-widget-body media"> - <status-icon status="success" /> - <div class="media-body"> - <div class="space-children"> - <mr-widget-author-and-time - actionText="Merged by" - :author="mr.metrics.mergedBy" - :date-title="mr.metrics.mergedAt" - :date-readable="mr.metrics.readableMergedAt" /> - <a - v-if="mr.canRevertInCurrentMR" - v-tooltip - class="btn btn-close btn-xs" - href="#modal-revert-commit" - data-toggle="modal" - data-container="body" - title="Revert this merge request in a new merge request"> - Revert - </a> - <a - v-else-if="mr.revertInForkPath" - v-tooltip - class="btn btn-close btn-xs" - data-method="post" - :href="mr.revertInForkPath" - title="Revert this merge request in a new merge request"> - Revert - </a> - <a - v-if="mr.canCherryPickInCurrentMR" - v-tooltip - class="btn btn-default btn-xs" - href="#modal-cherry-pick-commit" - data-toggle="modal" - data-container="body" - title="Cherry-pick this merge request in a new merge request"> - Cherry-pick - </a> - <a - v-else-if="mr.cherryPickInForkPath" - v-tooltip - class="btn btn-default btn-xs" - data-method="post" - :href="mr.cherryPickInForkPath" - title="Cherry-pick this merge request in a new merge request"> - Cherry-pick - </a> - </div> - <section class="mr-info-list"> - <p> - The changes were merged into - <span class="label-branch"> - <a :href="mr.targetBranchPath">{{mr.targetBranch}}</a> - </span> - </p> - <p v-if="mr.sourceBranchRemoved">The source branch has been removed</p> - <p v-if="shouldShowRemoveSourceBranch" class="space-children"> - <span>You can remove source branch now</span> - <button - @click="removeSourceBranch" - :disabled="isMakingRequest" - type="button" - class="btn btn-xs btn-default js-remove-branch-button"> - Remove Source Branch - </button> - </p> - <p v-if="shouldShowSourceBranchRemoving"> - <loading-icon inline /> - <span>The source branch is being removed</span> - </p> - </section> - </div> - </div> - `, -}; diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merged.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merged.vue new file mode 100644 index 00000000000..c1618bc6ea0 --- /dev/null +++ b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merged.vue @@ -0,0 +1,192 @@ +<script> + import Flash from '~/flash'; + import tooltip from '~/vue_shared/directives/tooltip'; + import loadingIcon from '~/vue_shared/components/loading_icon.vue'; + import { s__, __ } from '~/locale'; + import mrWidgetAuthorTime from '../../components/mr_widget_author_time.vue'; + import statusIcon from '../mr_widget_status_icon.vue'; + import eventHub from '../../event_hub'; + + export default { + name: 'MRWidgetMerged', + directives: { + tooltip, + }, + components: { + mrWidgetAuthorTime, + loadingIcon, + statusIcon, + }, + props: { + mr: { + type: Object, + required: true, + default: () => ({}), + }, + service: { + type: Object, + required: true, + default: () => ({}), + }, + }, + data() { + return { + isMakingRequest: false, + }; + }, + computed: { + shouldShowRemoveSourceBranch() { + const { + sourceBranchRemoved, + isRemovingSourceBranch, + canRemoveSourceBranch, + } = this.mr; + + return !sourceBranchRemoved && + canRemoveSourceBranch && + !this.isMakingRequest && + !isRemovingSourceBranch; + }, + shouldShowSourceBranchRemoving() { + const { + sourceBranchRemoved, + isRemovingSourceBranch, + } = this.mr; + return !sourceBranchRemoved && + (isRemovingSourceBranch || this.isMakingRequest); + }, + shouldShowMergedButtons() { + const { + canRevertInCurrentMR, + canCherryPickInCurrentMR, + revertInForkPath, + cherryPickInForkPath, + } = this.mr; + + return canRevertInCurrentMR || + canCherryPickInCurrentMR || + revertInForkPath || + cherryPickInForkPath; + }, + revertTitle() { + return s__('mrWidget|Revert this merge request in a new merge request'); + }, + cherryPickTitle() { + return s__('mrWidget|Cherry-pick this merge request in a new merge request'); + }, + revertLabel() { + return s__('mrWidget|Revert'); + }, + cherryPickLabel() { + return s__('mrWidget|Cherry-pick'); + }, + }, + methods: { + removeSourceBranch() { + this.isMakingRequest = true; + + this.service.removeSourceBranch() + .then(res => res.data) + .then((data) => { + if (data.message === 'Branch was removed') { + eventHub.$emit('MRWidgetUpdateRequested', () => { + this.isMakingRequest = false; + }); + } + }) + .catch(() => { + this.isMakingRequest = false; + Flash(__('Something went wrong. Please try again.')); + }); + }, + }, + }; +</script> +<template> + <div class="mr-widget-body media"> + <status-icon status="success" /> + <div class="media-body"> + <div class="space-children"> + <mr-widget-author-time + :action-text="s__('mrWidget|Merged by')" + :author="mr.metrics.mergedBy" + :date-title="mr.metrics.mergedAt" + :date-readable="mr.metrics.readableMergedAt" + /> + <a + v-if="mr.canRevertInCurrentMR" + v-tooltip + class="btn btn-close btn-xs" + href="#modal-revert-commit" + data-toggle="modal" + data-container="body" + :title="revertTitle" + > + {{ revertLabel }} + </a> + <a + v-else-if="mr.revertInForkPath" + v-tooltip + class="btn btn-close btn-xs" + data-method="post" + :href="mr.revertInForkPath" + :title="revertTitle" + > + {{ revertLabel }} + </a> + <a + v-if="mr.canCherryPickInCurrentMR" + v-tooltip + class="btn btn-default btn-xs" + href="#modal-cherry-pick-commit" + data-toggle="modal" + data-container="body" + :title="cherryPickTitle" + > + {{ cherryPickLabel }} + </a> + <a + v-else-if="mr.cherryPickInForkPath" + v-tooltip + class="btn btn-default btn-xs" + data-method="post" + :href="mr.cherryPickInForkPath" + :title="cherryPickTitle" + > + {{ cherryPickLabel }} + </a> + </div> + <section class="mr-info-list"> + <p> + {{ s__("mrWidget|The changes were merged into") }} + <span class="label-branch"> + <a :href="mr.targetBranchPath">{{ mr.targetBranch }}</a> + </span> + </p> + <p v-if="mr.sourceBranchRemoved"> + {{ s__("mrWidget|The source branch has been removed") }} + </p> + <p + v-if="shouldShowRemoveSourceBranch" + class="space-children" + > + <span>{{ s__("mrWidget|You can remove source branch now") }}</span> + <button + @click="removeSourceBranch" + :disabled="isMakingRequest" + type="button" + class="btn btn-xs btn-default js-remove-branch-button" + > + {{ s__("mrWidget|Remove Source Branch") }} + </button> + </p> + <p v-if="shouldShowSourceBranchRemoving"> + <loading-icon :inline="true" /> + <span> + {{ s__("mrWidget|The source branch is being removed") }} + </span> + </p> + </section> + </div> + </div> +</template> diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_missing_branch.js b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_missing_branch.js index 303877d6fbf..7733fb74afe 100644 --- a/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_missing_branch.js +++ b/app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_missing_branch.js @@ -1,6 +1,6 @@ import statusIcon from '../mr_widget_status_icon.vue'; import tooltip from '../../../vue_shared/directives/tooltip'; -import mrWidgetMergeHelp from '../../components/mr_widget_merge_help'; +import mrWidgetMergeHelp from '../../components/mr_widget_merge_help.vue'; export default { name: 'MRWidgetMissingBranch', diff --git a/app/assets/javascripts/vue_merge_request_widget/dependencies.js b/app/assets/javascripts/vue_merge_request_widget/dependencies.js index 8651945a3da..7ca15537719 100644 --- a/app/assets/javascripts/vue_merge_request_widget/dependencies.js +++ b/app/assets/javascripts/vue_merge_request_widget/dependencies.js @@ -11,13 +11,13 @@ export { default as Vue } from 'vue'; export { default as SmartInterval } from '~/smart_interval'; -export { default as WidgetHeader } from './components/mr_widget_header'; -export { default as WidgetMergeHelp } from './components/mr_widget_merge_help'; +export { default as WidgetHeader } from './components/mr_widget_header.vue'; +export { default as WidgetMergeHelp } from './components/mr_widget_merge_help.vue'; export { default as WidgetPipeline } from './components/mr_widget_pipeline.vue'; export { default as WidgetDeployment } from './components/mr_widget_deployment'; -export { default as WidgetRelatedLinks } from './components/mr_widget_related_links'; -export { default as MergedState } from './components/states/mr_widget_merged'; -export { default as FailedToMerge } from './components/states/mr_widget_failed_to_merge'; +export { default as WidgetRelatedLinks } from './components/mr_widget_related_links.vue'; +export { default as MergedState } from './components/states/mr_widget_merged.vue'; +export { default as FailedToMerge } from './components/states/mr_widget_failed_to_merge.vue'; export { default as ClosedState } from './components/states/mr_widget_closed.vue'; export { default as MergingState } from './components/states/mr_widget_merging.vue'; export { default as WipState } from './components/states/mr_widget_wip'; @@ -31,7 +31,7 @@ export { default as SHAMismatchState } from './components/states/mr_widget_sha_m export { default as UnresolvedDiscussionsState } from './components/states/mr_widget_unresolved_discussions'; export { default as PipelineBlockedState } from './components/states/mr_widget_pipeline_blocked'; export { default as PipelineFailedState } from './components/states/mr_widget_pipeline_failed'; -export { default as MergeWhenPipelineSucceedsState } from './components/states/mr_widget_merge_when_pipeline_succeeds'; +export { default as MergeWhenPipelineSucceedsState } from './components/states/mr_widget_merge_when_pipeline_succeeds.vue'; export { default as RebaseState } from './components/states/mr_widget_rebase.vue'; export { default as AutoMergeFailed } from './components/states/mr_widget_auto_merge_failed.vue'; export { default as CheckingState } from './components/states/mr_widget_checking.vue'; diff --git a/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.js b/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.js index 98d33f9efaa..edf67fcd0a7 100644 --- a/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.js +++ b/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.js @@ -257,7 +257,8 @@ export default { <mr-widget-related-links v-if="shouldRenderRelatedLinks" :state="mr.state" - :related-links="mr.relatedLinks" /> + :related-links="mr.relatedLinks" + /> </div> <div class="mr-widget-footer" diff --git a/app/assets/javascripts/vue_shared/components/confirmation_input.vue b/app/assets/javascripts/vue_shared/components/confirmation_input.vue new file mode 100644 index 00000000000..1aa03ea6317 --- /dev/null +++ b/app/assets/javascripts/vue_shared/components/confirmation_input.vue @@ -0,0 +1,62 @@ +<script> + import _ from 'underscore'; + import { __, sprintf } from '~/locale'; + + export default { + props: { + inputId: { + type: String, + required: true, + }, + confirmationKey: { + type: String, + required: true, + }, + confirmationValue: { + type: String, + required: true, + }, + shouldEscapeConfirmationValue: { + type: Boolean, + required: false, + default: true, + }, + }, + computed: { + inputLabel() { + let value = this.confirmationValue; + if (this.shouldEscapeConfirmationValue) { + value = _.escape(value); + } + + return sprintf( + __('Type %{value} to confirm:'), + { value: `<code>${value}</code>` }, + false, + ); + }, + }, + methods: { + hasCorrectValue() { + return this.$refs.enteredValue.value === this.confirmationValue; + }, + }, + }; +</script> + +<template> + <div> + <label + v-html="inputLabel" + :for="inputId" + > + </label> + <input + :id="inputId" + :name="confirmationKey" + type="text" + ref="enteredValue" + class="form-control" + /> + </div> +</template> diff --git a/app/assets/stylesheets/pages/commits.scss b/app/assets/stylesheets/pages/commits.scss index aeaa33bd3bd..17801ed5910 100644 --- a/app/assets/stylesheets/pages/commits.scss +++ b/app/assets/stylesheets/pages/commits.scss @@ -195,6 +195,18 @@ .commit-actions { @media (min-width: $screen-sm-min) { font-size: 0; + + div { + display: inline; + } + + .fa-spinner { + font-size: 12px; + } + + span { + font-size: 6px; + } } .ci-status-link { @@ -219,6 +231,11 @@ font-size: 14px; font-weight: $gl-font-weight-bold; } + + .ci-status-icon { + position: relative; + top: 1px; + } } .commit, diff --git a/app/assets/stylesheets/pages/note_form.scss b/app/assets/stylesheets/pages/note_form.scss index bf8eb4c1f06..4a528bc2bb1 100644 --- a/app/assets/stylesheets/pages/note_form.scss +++ b/app/assets/stylesheets/pages/note_form.scss @@ -410,7 +410,6 @@ width: 298px; } - @media (max-width: $screen-xs-max) { display: flex; width: 100%; diff --git a/app/controllers/admin/gitaly_servers_controller.rb b/app/controllers/admin/gitaly_servers_controller.rb new file mode 100644 index 00000000000..11c4dfe3d8d --- /dev/null +++ b/app/controllers/admin/gitaly_servers_controller.rb @@ -0,0 +1,5 @@ +class Admin::GitalyServersController < Admin::ApplicationController + def index + @gitaly_servers = Gitaly::Server.all + end +end diff --git a/app/controllers/concerns/issuable_collections.rb b/app/controllers/concerns/issuable_collections.rb index b25e753a5ad..0d7ee06deb6 100644 --- a/app/controllers/concerns/issuable_collections.rb +++ b/app/controllers/concerns/issuable_collections.rb @@ -12,11 +12,9 @@ module IssuableCollections # rubocop:disable Gitlab/ModuleWithInstanceVariables def set_issuables_index - @issuables = issuables_collection - @issuables = @issuables.page(params[:page]) - @issuable_meta_data = issuable_meta_data(@issuables, collection_type) - @total_pages = issuable_page_count + @issuables = issuables_collection + set_pagination return if redirect_out_of_range(@total_pages) if params[:label_name].present? @@ -35,14 +33,26 @@ module IssuableCollections @users.push(author) if author end end + + def set_pagination + return if pagination_disabled? + + @issuables = @issuables.page(params[:page]) + @issuable_meta_data = issuable_meta_data(@issuables, collection_type) + @total_pages = issuable_page_count + end # rubocop:enable Gitlab/ModuleWithInstanceVariables + def pagination_disabled? + false + end + def issuables_collection finder.execute.preload(preload_for_collection) end def redirect_out_of_range(total_pages) - return false if total_pages.zero? + return false if total_pages.nil? || total_pages.zero? out_of_range = @issuables.current_page > total_pages # rubocop:disable Gitlab/ModuleWithInstanceVariables @@ -84,6 +94,7 @@ module IssuableCollections @filter_params[:project_id] = @project.id elsif @group @filter_params[:group_id] = @group.id + @filter_params[:include_subgroups] = true else # TODO: this filter ignore issues/mr created in public or # internal repos where you are not a member. Enable this filter diff --git a/app/controllers/concerns/uploads_actions.rb b/app/controllers/concerns/uploads_actions.rb index a6fb1f40001..61554029d09 100644 --- a/app/controllers/concerns/uploads_actions.rb +++ b/app/controllers/concerns/uploads_actions.rb @@ -1,6 +1,8 @@ module UploadsActions include Gitlab::Utils::StrongMemoize + UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze + def create link_to_file = UploadService.new(model, params[:file], uploader_class).execute @@ -17,34 +19,71 @@ module UploadsActions end end + # This should either + # - send the file directly + # - or redirect to its URL + # def show return render_404 unless uploader.exists? - disposition = uploader.image_or_video? ? 'inline' : 'attachment' - - expires_in 0.seconds, must_revalidate: true, private: true + if uploader.file_storage? + disposition = uploader.image_or_video? ? 'inline' : 'attachment' + expires_in 0.seconds, must_revalidate: true, private: true - send_file uploader.file.path, disposition: disposition + send_file uploader.file.path, disposition: disposition + else + redirect_to uploader.url + end end private + def uploader_class + raise NotImplementedError + end + + def upload_mount + mounted_as = params[:mounted_as] + mounted_as if UPLOAD_MOUNTS.include?(mounted_as) + end + + def uploader_mounted? + upload_model_class < CarrierWave::Mount::Extension && !upload_mount.nil? + end + def uploader strong_memoize(:uploader) do - return if show_model.nil? + if uploader_mounted? + model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend + else + build_uploader_from_upload || build_uploader_from_params + end + end + end - file_uploader = FileUploader.new(show_model, params[:secret]) - file_uploader.retrieve_from_store!(params[:filename]) + def build_uploader_from_upload + return nil unless params[:secret] && params[:filename] - file_uploader - end + upload_path = uploader_class.upload_path(params[:secret], params[:filename]) + upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_path) + upload&.build_uploader + end + + def build_uploader_from_params + uploader = uploader_class.new(model, params[:secret]) + uploader.retrieve_from_store!(params[:filename]) + uploader end def image_or_video? uploader && uploader.exists? && uploader.image_or_video? end - def uploader_class - FileUploader + def find_model + nil + end + + def model + strong_memoize(:model) { find_model } end end diff --git a/app/controllers/groups/uploads_controller.rb b/app/controllers/groups/uploads_controller.rb index e6bd9806401..f1578f75e88 100644 --- a/app/controllers/groups/uploads_controller.rb +++ b/app/controllers/groups/uploads_controller.rb @@ -7,29 +7,23 @@ class Groups::UploadsController < Groups::ApplicationController private - def show_model - strong_memoize(:show_model) do - group_id = params[:group_id] - - Group.find_by_full_path(group_id) - end + def upload_model_class + Group end - def authorize_upload_file! - render_404 unless can?(current_user, :upload_file, group) + def uploader_class + NamespaceFileUploader end - def uploader - strong_memoize(:uploader) do - file_uploader = uploader_class.new(show_model, params[:secret]) - file_uploader.retrieve_from_store!(params[:filename]) - file_uploader - end - end + def find_model + return @group if @group - def uploader_class - NamespaceFileUploader + group_id = params[:group_id] + + Group.find_by_full_path(group_id) end - alias_method :model, :group + def authorize_upload_file! + render_404 unless can?(current_user, :upload_file, group) + end end diff --git a/app/controllers/groups_controller.rb b/app/controllers/groups_controller.rb index eb53a522f90..bb652832cb1 100644 --- a/app/controllers/groups_controller.rb +++ b/app/controllers/groups_controller.rb @@ -118,10 +118,10 @@ class GroupsController < Groups::ApplicationController end def group_params - params.require(:group).permit(group_params_ce) + params.require(:group).permit(group_params_attributes) end - def group_params_ce + def group_params_attributes [ :avatar, :description, @@ -150,7 +150,6 @@ class GroupsController < Groups::ApplicationController @projects = GroupProjectsFinder.new(params: params, group: group, options: options, current_user: current_user) .execute .includes(:namespace) - .page(params[:page]) @events = EventCollection .new(@projects, offset: params[:offset].to_i, filter: event_filter) diff --git a/app/controllers/help_controller.rb b/app/controllers/help_controller.rb index 38f379dbf4f..a394521698c 100644 --- a/app/controllers/help_controller.rb +++ b/app/controllers/help_controller.rb @@ -5,7 +5,7 @@ class HelpController < ApplicationController # Taken from Jekyll # https://github.com/jekyll/jekyll/blob/3.5-stable/lib/jekyll/document.rb#L13 - YAML_FRONT_MATTER_REGEXP = %r!\A(---\s*\n.*?\n?)^((---|\.\.\.)\s*$\n?)!m + YAML_FRONT_MATTER_REGEXP = /\A(---\s*\n.*?\n?)^((---|\.\.\.)\s*$\n?)/m def index # Remove YAML frontmatter so that it doesn't look weird diff --git a/app/controllers/projects/lfs_storage_controller.rb b/app/controllers/projects/lfs_storage_controller.rb index 293869345bd..941638db427 100644 --- a/app/controllers/projects/lfs_storage_controller.rb +++ b/app/controllers/projects/lfs_storage_controller.rb @@ -60,7 +60,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController def store_file(oid, size, tmp_file) # Define tmp_file_path early because we use it in "ensure" - tmp_file_path = File.join("#{Gitlab.config.lfs.storage_path}/tmp/upload", tmp_file) + tmp_file_path = File.join(LfsObjectUploader.workhorse_upload_path, tmp_file) object = LfsObject.find_or_create_by(oid: oid, size: size) file_exists = object.file.exists? || move_tmp_file_to_storage(object, tmp_file_path) diff --git a/app/controllers/projects/uploads_controller.rb b/app/controllers/projects/uploads_controller.rb index 4685bbe80b4..f5cf089ad98 100644 --- a/app/controllers/projects/uploads_controller.rb +++ b/app/controllers/projects/uploads_controller.rb @@ -1,6 +1,7 @@ class Projects::UploadsController < Projects::ApplicationController include UploadsActions + # These will kick you out if you don't have access. skip_before_action :project, :repository, if: -> { action_name == 'show' && image_or_video? } @@ -8,14 +9,20 @@ class Projects::UploadsController < Projects::ApplicationController private - def show_model - strong_memoize(:show_model) do - namespace = params[:namespace_id] - id = params[:project_id] + def upload_model_class + Project + end - Project.find_by_full_path("#{namespace}/#{id}") - end + def uploader_class + FileUploader end - alias_method :model, :project + def find_model + return @project if @project + + namespace = params[:namespace_id] + id = params[:project_id] + + Project.find_by_full_path("#{namespace}/#{id}") + end end diff --git a/app/controllers/projects_controller.rb b/app/controllers/projects_controller.rb index e6e2b219e6a..8158934322d 100644 --- a/app/controllers/projects_controller.rb +++ b/app/controllers/projects_controller.rb @@ -403,6 +403,6 @@ class ProjectsController < Projects::ApplicationController # to # localhost/group/project # - redirect_to request.original_url.sub(/\.git\/?\Z/, '') if params[:format] == 'git' + redirect_to request.original_url.sub(%r{\.git/?\Z}, '') if params[:format] == 'git' end end diff --git a/app/controllers/uploads_controller.rb b/app/controllers/uploads_controller.rb index 16a74f82d3f..3d227b0a955 100644 --- a/app/controllers/uploads_controller.rb +++ b/app/controllers/uploads_controller.rb @@ -1,19 +1,34 @@ class UploadsController < ApplicationController include UploadsActions + UnknownUploadModelError = Class.new(StandardError) + + MODEL_CLASSES = { + "user" => User, + "project" => Project, + "note" => Note, + "group" => Group, + "appearance" => Appearance, + "personal_snippet" => PersonalSnippet, + nil => PersonalSnippet + }.freeze + + rescue_from UnknownUploadModelError, with: :render_404 + skip_before_action :authenticate_user! + before_action :upload_mount_satisfied? before_action :find_model before_action :authorize_access!, only: [:show] before_action :authorize_create_access!, only: [:create] - private + def uploader_class + PersonalFileUploader + end def find_model return nil unless params[:id] - return render_404 unless upload_model && upload_mount - - @model = upload_model.find(params[:id]) + upload_model_class.find(params[:id]) end def authorize_access! @@ -53,55 +68,17 @@ class UploadsController < ApplicationController end end - def upload_model - upload_models = { - "user" => User, - "project" => Project, - "note" => Note, - "group" => Group, - "appearance" => Appearance, - "personal_snippet" => PersonalSnippet - } - - upload_models[params[:model]] - end - - def upload_mount - return true unless params[:mounted_as] - - upload_mounts = %w(avatar attachment file logo header_logo) - - if upload_mounts.include?(params[:mounted_as]) - params[:mounted_as] - end + def upload_model_class + MODEL_CLASSES[params[:model]] || raise(UnknownUploadModelError) end - def uploader - return @uploader if defined?(@uploader) - - case model - when nil - @uploader = PersonalFileUploader.new(nil, params[:secret]) - - @uploader.retrieve_from_store!(params[:filename]) - when PersonalSnippet - @uploader = PersonalFileUploader.new(model, params[:secret]) - - @uploader.retrieve_from_store!(params[:filename]) - else - @uploader = @model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend - - redirect_to @uploader.url unless @uploader.file_storage? - end - - @uploader + def upload_model_class_has_mounts? + upload_model_class < CarrierWave::Mount::Extension end - def uploader_class - PersonalFileUploader - end + def upload_mount_satisfied? + return true unless upload_model_class_has_mounts? - def model - @model ||= find_model + upload_model_class.uploader_options.has_key?(upload_mount) end end diff --git a/app/finders/group_projects_finder.rb b/app/finders/group_projects_finder.rb index f2d3b90b8e2..f73cf8adb4d 100644 --- a/app/finders/group_projects_finder.rb +++ b/app/finders/group_projects_finder.rb @@ -87,8 +87,17 @@ class GroupProjectsFinder < ProjectsFinder options.fetch(:only_shared, false) end + # subgroups are supported only for owned projects not for shared + def include_subgroups? + options.fetch(:include_subgroups, false) + end + def owned_projects - group.projects + if include_subgroups? + Project.where(namespace_id: group.self_and_descendants.select(:id)) + else + group.projects + end end def shared_projects diff --git a/app/finders/issuable_finder.rb b/app/finders/issuable_finder.rb index 493e7985d75..0fe3000ca01 100644 --- a/app/finders/issuable_finder.rb +++ b/app/finders/issuable_finder.rb @@ -43,6 +43,7 @@ class IssuableFinder search sort state + include_subgroups ].freeze ARRAY_PARAMS = { label_name: [], iids: [], assignee_username: [] }.freeze @@ -148,7 +149,8 @@ class IssuableFinder if current_user && params[:authorized_only].presence && !current_user_related? current_user.authorized_projects elsif group - GroupProjectsFinder.new(group: group, current_user: current_user).execute + finder_options = { include_subgroups: params[:include_subgroups], only_owned: true } + GroupProjectsFinder.new(group: group, current_user: current_user, options: finder_options).execute else ProjectsFinder.new(current_user: current_user, project_ids_relation: item_project_ids(items)).execute end diff --git a/app/helpers/application_helper.rb b/app/helpers/application_helper.rb index d13407a06c8..6530327698b 100644 --- a/app/helpers/application_helper.rb +++ b/app/helpers/application_helper.rb @@ -89,7 +89,7 @@ module ApplicationHelper end def default_avatar - 'no_avatar.png' + asset_path('no_avatar.png') end def last_commit(project) diff --git a/app/helpers/application_settings_helper.rb b/app/helpers/application_settings_helper.rb index 8ef561d90e6..e91e1d29d64 100644 --- a/app/helpers/application_settings_helper.rb +++ b/app/helpers/application_settings_helper.rb @@ -96,12 +96,12 @@ module ApplicationSettingsHelper ] end - def repository_storages_options_for_select + def repository_storages_options_for_select(selected) options = Gitlab.config.repositories.storages.map do |name, storage| ["#{name} - #{storage['path']}", name] end - options_for_select(options, @application_setting.repository_storages) + options_for_select(options, selected) end def sidekiq_queue_options_for_select diff --git a/app/helpers/sidekiq_helper.rb b/app/helpers/sidekiq_helper.rb index 55f4da0ef85..50aeb7f4b82 100644 --- a/app/helpers/sidekiq_helper.rb +++ b/app/helpers/sidekiq_helper.rb @@ -1,12 +1,12 @@ module SidekiqHelper - SIDEKIQ_PS_REGEXP = /\A + SIDEKIQ_PS_REGEXP = %r{\A (?<pid>\d+)\s+ (?<cpu>[\d\.,]+)\s+ (?<mem>[\d\.,]+)\s+ - (?<state>[DIEKNRSTVWXZNLpsl\+<>\/\d]+)\s+ + (?<state>[DIEKNRSTVWXZNLpsl\+<>/\d]+)\s+ (?<start>.+?)\s+ (?<command>(?:ruby\d+:\s+)?sidekiq.*\].*) - \z/x + \z}x def parse_sidekiq_ps(line) match = line.strip.match(SIDEKIQ_PS_REGEXP) diff --git a/app/helpers/submodule_helper.rb b/app/helpers/submodule_helper.rb index 1db9ae3839c..9151543dfdc 100644 --- a/app/helpers/submodule_helper.rb +++ b/app/helpers/submodule_helper.rb @@ -11,7 +11,7 @@ module SubmoduleHelper url = File.join(Gitlab.config.gitlab.url, @project.full_path) end - if url =~ /([^\/:]+)\/([^\/]+(?:\.git)?)\Z/ + if url =~ %r{([^/:]+)/([^/]+(?:\.git)?)\Z} namespace, project = $1, $2 gitlab_hosts = [Gitlab.config.gitlab.url, Gitlab.config.gitlab_shell.ssh_path_prefix] @@ -23,7 +23,7 @@ module SubmoduleHelper end end - namespace.sub!(/\A\//, '') + namespace.sub!(%r{\A/}, '') project.rstrip! project.sub!(/\.git\z/, '') @@ -47,11 +47,11 @@ module SubmoduleHelper protected def github_dot_com_url?(url) - url =~ /github\.com[\/:][^\/]+\/[^\/]+\Z/ + url =~ %r{github\.com[/:][^/]+/[^/]+\Z} end def gitlab_dot_com_url?(url) - url =~ /gitlab\.com[\/:][^\/]+\/[^\/]+\Z/ + url =~ %r{gitlab\.com[/:][^/]+/[^/]+\Z} end def self_url?(url, namespace, project) @@ -65,7 +65,7 @@ module SubmoduleHelper def relative_self_url?(url) # (./)?(../repo.git) || (./)?(../../project/repo.git) ) - url =~ /\A((\.\/)?(\.\.\/))(?!(\.\.)|(.*\/)).*(\.git)?\z/ || url =~ /\A((\.\/)?(\.\.\/){2})(?!(\.\.))([^\/]*)\/(?!(\.\.)|(.*\/)).*(\.git)?\z/ + url =~ %r{\A((\./)?(\.\./))(?!(\.\.)|(.*/)).*(\.git)?\z} || url =~ %r{\A((\./)?(\.\./){2})(?!(\.\.))([^/]*)/(?!(\.\.)|(.*/)).*(\.git)?\z} end def standard_links(host, namespace, project, commit) diff --git a/app/helpers/tree_helper.rb b/app/helpers/tree_helper.rb index 5b2ea38a03d..d39cac0f510 100644 --- a/app/helpers/tree_helper.rb +++ b/app/helpers/tree_helper.rb @@ -110,7 +110,7 @@ module TreeHelper # returns the relative path of the first subdir that doesn't have only one directory descendant def flatten_tree(root_path, tree) - return tree.flat_path.sub(/\A#{root_path}\//, '') if tree.flat_path.present? + return tree.flat_path.sub(%r{\A#{root_path}/}, '') if tree.flat_path.present? subtree = Gitlab::Git::Tree.where(@repository, @commit.id, tree.path) if subtree.count == 1 && subtree.first.dir? diff --git a/app/models/appearance.rb b/app/models/appearance.rb index 76cfe28742a..dcd14c08f3c 100644 --- a/app/models/appearance.rb +++ b/app/models/appearance.rb @@ -11,6 +11,7 @@ class Appearance < ActiveRecord::Base mount_uploader :logo, AttachmentUploader mount_uploader :header_logo, AttachmentUploader + has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent CACHE_KEY = 'current_appearance'.freeze diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb index df67fb243ad..78906e7a968 100644 --- a/app/models/ci/build.rb +++ b/app/models/ci/build.rb @@ -292,7 +292,7 @@ module Ci def repo_url auth = "gitlab-ci-token:#{ensure_token!}@" - project.http_url_to_repo.sub(/^https?:\/\//) do |prefix| + project.http_url_to_repo.sub(%r{^https?://}) do |prefix| prefix + auth end end @@ -466,7 +466,7 @@ module Ci if cache && project.jobs_cache_index cache = cache.merge( - key: "#{cache[:key]}:#{project.jobs_cache_index}") + key: "#{cache[:key]}_#{project.jobs_cache_index}") end [cache] diff --git a/app/models/commit_status.rb b/app/models/commit_status.rb index c0263c0b4e2..3469d5d795c 100644 --- a/app/models/commit_status.rb +++ b/app/models/commit_status.rb @@ -141,7 +141,7 @@ class CommitStatus < ActiveRecord::Base end def group_name - name.to_s.gsub(/\d+[\s:\/\\]+\d+\s*/, '').strip + name.to_s.gsub(%r{\d+[\s:/\\]+\d+\s*}, '').strip end def failed_but_allowed? diff --git a/app/models/concerns/avatarable.rb b/app/models/concerns/avatarable.rb index 10659030910..d35e37935fb 100644 --- a/app/models/concerns/avatarable.rb +++ b/app/models/concerns/avatarable.rb @@ -1,6 +1,30 @@ module Avatarable extend ActiveSupport::Concern + included do + prepend ShadowMethods + + validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? } + validates :avatar, file_size: { maximum: 200.kilobytes.to_i } + + mount_uploader :avatar, AvatarUploader + end + + module ShadowMethods + def avatar_url(**args) + # We use avatar_path instead of overriding avatar_url because of carrierwave. + # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864 + + avatar_path(only_path: args.fetch(:only_path, true)) || super + end + end + + def avatar_type + unless self.avatar.image? + self.errors.add :avatar, "only images allowed" + end + end + def avatar_path(only_path: true) return unless self[:avatar].present? diff --git a/app/models/concerns/discussion_on_diff.rb b/app/models/concerns/discussion_on_diff.rb index db9770fabf4..8b3c55387b3 100644 --- a/app/models/concerns/discussion_on_diff.rb +++ b/app/models/concerns/discussion_on_diff.rb @@ -37,6 +37,8 @@ module DiscussionOnDiff # Returns an array of at most 16 highlighted lines above a diff note def truncated_diff_lines(highlight: true) + return [] if diff_line.nil? && first_note.is_a?(LegacyDiffNote) + lines = highlight ? highlighted_diff_lines : diff_lines initial_line_index = [diff_line.index - NUMBER_OF_TRUNCATED_DIFF_LINES + 1, 0].max diff --git a/app/models/concerns/taskable.rb b/app/models/concerns/taskable.rb index d07041c2fdf..ccd6f0e0a7d 100644 --- a/app/models/concerns/taskable.rb +++ b/app/models/concerns/taskable.rb @@ -9,13 +9,13 @@ require 'task_list/filter' module Taskable COMPLETED = 'completed'.freeze INCOMPLETE = 'incomplete'.freeze - ITEM_PATTERN = / + ITEM_PATTERN = %r{ ^ \s*(?:[-+*]|(?:\d+\.)) # list prefix required - task item has to be always in a list \s+ # whitespace prefix has to be always presented for a list item (\[\s\]|\[[xX]\]) # checkbox (\s.+) # followed by whitespace and some text. - /x + }x def self.get_tasks(content) content.to_s.scan(ITEM_PATTERN).map do |checkbox, label| diff --git a/app/models/environment.rb b/app/models/environment.rb index bf69b4c50f0..2f6eae605ee 100644 --- a/app/models/environment.rb +++ b/app/models/environment.rb @@ -115,7 +115,7 @@ class Environment < ActiveRecord::Base def formatted_external_url return nil unless external_url - external_url.gsub(/\A.*?:\/\//, '') + external_url.gsub(%r{\A.*?://}, '') end def stop_action? diff --git a/app/models/group.rb b/app/models/group.rb index fddace03387..62b1322ebe6 100644 --- a/app/models/group.rb +++ b/app/models/group.rb @@ -29,18 +29,14 @@ class Group < Namespace has_many :variables, class_name: 'Ci::GroupVariable' has_many :custom_attributes, class_name: 'GroupCustomAttribute' - validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? } + has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent + validate :visibility_level_allowed_by_projects validate :visibility_level_allowed_by_sub_groups validate :visibility_level_allowed_by_parent - validates :avatar, file_size: { maximum: 200.kilobytes.to_i } - validates :two_factor_grace_period, presence: true, numericality: { greater_than_or_equal_to: 0 } - mount_uploader :avatar, AvatarUploader - has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent - after_create :post_create_hook after_destroy :post_destroy_hook after_save :update_two_factor_requirement @@ -116,12 +112,6 @@ class Group < Namespace visibility_level_allowed_by_sub_groups?(level) end - def avatar_url(**args) - # We use avatar_path instead of overriding avatar_url because of carrierwave. - # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864 - avatar_path(args) - end - def lfs_enabled? return false unless Gitlab.config.lfs.enabled return Gitlab.config.lfs.enabled if self[:lfs_enabled].nil? diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb index 4accb08eaf9..d025062f562 100644 --- a/app/models/merge_request.rb +++ b/app/models/merge_request.rb @@ -618,12 +618,12 @@ class MergeRequest < ActiveRecord::Base can_be_merged? && !should_be_rebased? end - def mergeable_state?(skip_ci_check: false) + def mergeable_state?(skip_ci_check: false, skip_discussions_check: false) return false unless open? return false if work_in_progress? return false if broken? return false unless skip_ci_check || mergeable_ci_state? - return false unless mergeable_discussions_state? + return false unless skip_discussions_check || mergeable_discussions_state? true end @@ -989,13 +989,13 @@ class MergeRequest < ActiveRecord::Base merged_at = metrics&.merged_at notes_association = notes_with_associations - # It is not guaranteed that Note#created_at will be strictly later than - # MergeRequestMetric#merged_at. Nanoseconds on MySQL may break this - # comparison, as will a HA environment if clocks are not *precisely* - # synchronized. Add a minute's leeway to compensate for both possibilities - cutoff = merged_at - 1.minute - if merged_at + # It is not guaranteed that Note#created_at will be strictly later than + # MergeRequestMetric#merged_at. Nanoseconds on MySQL may break this + # comparison, as will a HA environment if clocks are not *precisely* + # synchronized. Add a minute's leeway to compensate for both possibilities + cutoff = merged_at - 1.minute + notes_association = notes_association.where('created_at >= ?', cutoff) end diff --git a/app/models/note.rb b/app/models/note.rb index 184fbd5f5ae..a84db8982e5 100644 --- a/app/models/note.rb +++ b/app/models/note.rb @@ -88,6 +88,7 @@ class Note < ActiveRecord::Base end end + # @deprecated attachments are handler by the MarkdownUploader mount_uploader :attachment, AttachmentUploader # Scopes diff --git a/app/models/project.rb b/app/models/project.rb index e19873f64ce..90f5df6265d 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -234,7 +234,7 @@ class Project < ActiveRecord::Base validates :creator, presence: true, on: :create validates :description, length: { maximum: 2000 }, allow_blank: true validates :ci_config_path, - format: { without: /(\.{2}|\A\/)/, + format: { without: %r{(\.{2}|\A/)}, message: 'cannot include leading slash or directory traversal.' }, length: { maximum: 255 }, allow_blank: true @@ -256,9 +256,6 @@ class Project < ActiveRecord::Base validates :star_count, numericality: { greater_than_or_equal_to: 0 } validate :check_limit, on: :create validate :check_repository_path_availability, on: :update, if: ->(project) { project.renamed? } - validate :avatar_type, - if: ->(project) { project.avatar.present? && project.avatar_changed? } - validates :avatar, file_size: { maximum: 200.kilobytes.to_i } validate :visibility_level_allowed_by_group validate :visibility_level_allowed_as_fork validate :check_wiki_path_conflict @@ -266,7 +263,6 @@ class Project < ActiveRecord::Base presence: true, inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } } - mount_uploader :avatar, AvatarUploader has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent # Scopes @@ -289,7 +285,6 @@ class Project < ActiveRecord::Base scope :non_archived, -> { where(archived: false) } scope :for_milestones, ->(ids) { joins(:milestones).where('milestones.id' => ids).distinct } scope :with_push, -> { joins(:events).where('events.action = ?', Event::PUSHED) } - scope :with_project_feature, -> { joins('LEFT JOIN project_features ON projects.id = project_features.project_id') } scope :with_statistics, -> { includes(:statistics) } scope :with_shared_runners, -> { where(shared_runners_enabled: true) } @@ -568,6 +563,9 @@ class Project < ActiveRecord::Base RepositoryForkWorker.perform_async(id, forked_from_project.repository_storage_path, forked_from_project.disk_path) + elsif gitlab_project_import? + # Do not retry on Import/Export until https://gitlab.com/gitlab-org/gitlab-ce/issues/26189 is solved. + RepositoryImportWorker.set(retry: false).perform_async(self.id) else RepositoryImportWorker.perform_async(self.id) end @@ -920,20 +918,12 @@ class Project < ActiveRecord::Base issues_tracker.to_param == 'jira' end - def avatar_type - unless self.avatar.image? - self.errors.add :avatar, 'only images allowed' - end - end - def avatar_in_git repository.avatar end def avatar_url(**args) - # We use avatar_path instead of overriding avatar_url because of carrierwave. - # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864 - avatar_path(args) || (Gitlab::Routing.url_helpers.project_avatar_url(self) if avatar_in_git) + Gitlab::Routing.url_helpers.project_avatar_url(self) if avatar_in_git end # For compatibility with old code @@ -1335,7 +1325,7 @@ class Project < ActiveRecord::Base host = "#{subdomain}.#{Settings.pages.host}".downcase # The host in URL always needs to be downcased - url = Gitlab.config.pages.url.sub(/^https?:\/\//) do |prefix| + url = Gitlab.config.pages.url.sub(%r{^https?://}) do |prefix| "#{prefix}#{subdomain}." end.downcase diff --git a/app/models/project_services/asana_service.rb b/app/models/project_services/asana_service.rb index 9ce2d1153a7..109258d1eb7 100644 --- a/app/models/project_services/asana_service.rb +++ b/app/models/project_services/asana_service.rb @@ -84,7 +84,7 @@ http://app.asana.com/-/account_api' # - fix/ed/es/ing # - close/s/d # - closing - issue_finder = /(fix\w*|clos[ei]\w*+)?\W*(?:https:\/\/app\.asana\.com\/\d+\/\d+\/(\d+)|#(\d+))/i + issue_finder = %r{(fix\w*|clos[ei]\w*+)?\W*(?:https://app\.asana\.com/\d+/\d+/(\d+)|#(\d+))}i message.scan(issue_finder).each do |tuple| # tuple will be diff --git a/app/models/project_services/emails_on_push_service.rb b/app/models/project_services/emails_on_push_service.rb index 1a236e232f9..b604d860a87 100644 --- a/app/models/project_services/emails_on_push_service.rb +++ b/app/models/project_services/emails_on_push_service.rb @@ -2,7 +2,7 @@ class EmailsOnPushService < Service boolean_accessor :send_from_committer_email boolean_accessor :disable_diffs prop_accessor :recipients - validates :recipients, presence: true, if: :activated? + validates :recipients, presence: true, if: :valid_recipients? def title 'Emails on push' diff --git a/app/models/project_services/irker_service.rb b/app/models/project_services/irker_service.rb index 19357f90810..27bdf708c80 100644 --- a/app/models/project_services/irker_service.rb +++ b/app/models/project_services/irker_service.rb @@ -4,7 +4,7 @@ class IrkerService < Service prop_accessor :server_host, :server_port, :default_irc_uri prop_accessor :recipients, :channels boolean_accessor :colorize_messages - validates :recipients, presence: true, if: :activated? + validates :recipients, presence: true, if: :valid_recipients? before_validation :get_channels diff --git a/app/models/project_services/issue_tracker_service.rb b/app/models/project_services/issue_tracker_service.rb index 31984c5d7ed..5fb15c383ca 100644 --- a/app/models/project_services/issue_tracker_service.rb +++ b/app/models/project_services/issue_tracker_service.rb @@ -10,9 +10,9 @@ class IssueTrackerService < Service # overriden patterns. See ReferenceRegexes::EXTERNAL_PATTERN def self.reference_pattern(only_long: false) if only_long - %r{(\b[A-Z][A-Z0-9_]+-)(?<issue>\d+)} + /(\b[A-Z][A-Z0-9_]+-)(?<issue>\d+)/ else - %r{(\b[A-Z][A-Z0-9_]+-|#{Issue.reference_prefix})(?<issue>\d+)} + /(\b[A-Z][A-Z0-9_]+-|#{Issue.reference_prefix})(?<issue>\d+)/ end end diff --git a/app/models/project_services/jira_service.rb b/app/models/project_services/jira_service.rb index 2be35b6ea9d..30eafe31454 100644 --- a/app/models/project_services/jira_service.rb +++ b/app/models/project_services/jira_service.rb @@ -19,7 +19,7 @@ class JiraService < IssueTrackerService # {PROJECT-KEY}-{NUMBER} Examples: JIRA-1, PROJECT-1 def self.reference_pattern(only_long: true) - @reference_pattern ||= %r{(?<issue>\b([A-Z][A-Z0-9_]+-)\d+)} + @reference_pattern ||= /(?<issue>\b([A-Z][A-Z0-9_]+-)\d+)/ end def initialize_properties @@ -43,7 +43,7 @@ class JiraService < IssueTrackerService username: self.username, password: self.password, site: URI.join(url, '/').to_s, - context_path: url.path, + context_path: url.path.chomp('/'), auth_type: :basic, read_timeout: 120, use_cookies: true, diff --git a/app/models/project_services/pipelines_email_service.rb b/app/models/project_services/pipelines_email_service.rb index 6a3118a11b8..9c7b58dead5 100644 --- a/app/models/project_services/pipelines_email_service.rb +++ b/app/models/project_services/pipelines_email_service.rb @@ -1,7 +1,7 @@ class PipelinesEmailService < Service prop_accessor :recipients boolean_accessor :notify_only_broken_pipelines - validates :recipients, presence: true, if: :activated? + validates :recipients, presence: true, if: :valid_recipients? def initialize_properties self.properties ||= { notify_only_broken_pipelines: true } diff --git a/app/models/project_wiki.rb b/app/models/project_wiki.rb index a0af749a93f..459d1673125 100644 --- a/app/models/project_wiki.rb +++ b/app/models/project_wiki.rb @@ -124,6 +124,12 @@ class ProjectWiki update_project_activity end + def page_formatted_data(page) + page_title, page_dir = page_title_and_dir(page.title) + + wiki.page_formatted_data(title: page_title, dir: page_dir, version: page.version) + end + def page_title_and_dir(title) title_array = title.split("/") title = title_array.pop diff --git a/app/models/repository.rb b/app/models/repository.rb index 5b06dc5a39b..edfb236a91a 100644 --- a/app/models/repository.rb +++ b/app/models/repository.rb @@ -255,6 +255,8 @@ class Repository # This will still fail if the file is corrupted (e.g. 0 bytes) raw_repository.write_ref(keep_around_ref_name(sha), sha, shell: false) + rescue Gitlab::Git::CommandError => ex + Rails.logger.error "Unable to create keep-around reference for repository #{path}: #{ex}" end def kept_around?(sha) @@ -491,7 +493,7 @@ class Repository raw_repository.root_ref else # When the repo does not exist we raise this error so no data is cached. - raise Rugged::ReferenceError + raise Gitlab::Git::Repository::NoRepository end end cache_method :root_ref @@ -525,11 +527,7 @@ class Repository def commit_count_for_ref(ref) return 0 unless exists? - begin - cache.fetch(:"commit_count_#{ref}") { raw_repository.commit_count(ref) } - rescue Rugged::ReferenceError - 0 - end + cache.fetch(:"commit_count_#{ref}") { raw_repository.commit_count(ref) } end delegate :branch_names, to: :raw_repository @@ -653,26 +651,14 @@ class Repository end def last_commit_for_path(sha, path) - raw_repository.gitaly_migrate(:last_commit_for_path) do |is_enabled| - if is_enabled - last_commit_for_path_by_gitaly(sha, path) - else - last_commit_for_path_by_rugged(sha, path) - end - end + commit_by(oid: last_commit_id_for_path(sha, path)) end def last_commit_id_for_path(sha, path) key = path.blank? ? "last_commit_id_for_path:#{sha}" : "last_commit_id_for_path:#{sha}:#{Digest::SHA1.hexdigest(path)}" cache.fetch(key) do - raw_repository.gitaly_migrate(:last_commit_for_path) do |is_enabled| - if is_enabled - last_commit_for_path_by_gitaly(sha, path).id - else - last_commit_id_for_path_by_shelling_out(sha, path) - end - end + raw_repository.last_commit_id_for_path(sha, path) end end @@ -800,16 +786,6 @@ class Repository with_cache_hooks { raw.multi_action(user, **options) } end - def can_be_merged?(source_sha, target_branch) - raw_repository.gitaly_migrate(:can_be_merged) do |is_enabled| - if is_enabled - gitaly_can_be_merged?(source_sha, find_branch(target_branch).target) - else - rugged_can_be_merged?(source_sha, target_branch) - end - end - end - def merge(user, source_sha, merge_request, message) with_cache_hooks do raw_repository.merge(user, source_sha, merge_request.target_branch, message) do |commit_id| @@ -876,26 +852,18 @@ class Repository @root_ref_sha ||= commit(root_ref).sha end - delegate :merged_branch_names, to: :raw_repository + delegate :merged_branch_names, :can_be_merged?, to: :raw_repository def merge_base(first_commit_id, second_commit_id) first_commit_id = commit(first_commit_id).try(:id) || first_commit_id second_commit_id = commit(second_commit_id).try(:id) || second_commit_id raw_repository.merge_base(first_commit_id, second_commit_id) - rescue Rugged::ReferenceError - nil end def ancestor?(ancestor_id, descendant_id) return false if ancestor_id.nil? || descendant_id.nil? - Gitlab::GitalyClient.migrate(:is_ancestor) do |is_enabled| - if is_enabled - raw_repository.ancestor?(ancestor_id, descendant_id) - else - rugged_is_ancestor?(ancestor_id, descendant_id) - end - end + raw_repository.ancestor?(ancestor_id, descendant_id) end def fetch_as_mirror(url, forced: false, refmap: :all_refs, remote_name: nil) @@ -983,7 +951,7 @@ class Repository end instance_variable_set(ivar, value) - rescue Rugged::ReferenceError, Gitlab::Git::Repository::NoRepository + rescue Gitlab::Git::Repository::NoRepository # Even if the above `#exists?` check passes these errors might still # occur (for example because of a non-existing HEAD). We want to # gracefully handle this and not cache anything @@ -1077,30 +1045,7 @@ class Repository Gitlab::Metrics.add_event(event, { path: full_path }.merge(tags)) end - def last_commit_for_path_by_gitaly(sha, path) - c = raw_repository.gitaly_commit_client.last_commit_for_path(sha, path) - commit_by(oid: c) - end - - def last_commit_for_path_by_rugged(sha, path) - sha = last_commit_id_for_path_by_shelling_out(sha, path) - commit_by(oid: sha) - end - - def last_commit_id_for_path_by_shelling_out(sha, path) - args = %W(rev-list --max-count=1 #{sha} -- #{path}) - raw_repository.run_git_with_timeout(args, Gitlab::Git::Popen::FAST_GIT_PROCESS_TIMEOUT).first.strip - end - def initialize_raw_repository Gitlab::Git::Repository.new(project.repository_storage, disk_path + '.git', Gitlab::GlRepository.gl_repository(project, is_wiki)) end - - def gitaly_can_be_merged?(their_commit, our_commit) - !raw_repository.gitaly_conflicts_client(our_commit, their_commit).conflicts? - end - - def rugged_can_be_merged?(their_commit, our_commit) - !rugged.merge_commits(our_commit, their_commit).conflicts? - end end diff --git a/app/models/service.rb b/app/models/service.rb index 96a064697f0..369cae2e85f 100644 --- a/app/models/service.rb +++ b/app/models/service.rb @@ -2,6 +2,8 @@ # and implement a set of methods class Service < ActiveRecord::Base include Sortable + include Importable + serialize :properties, JSON # rubocop:disable Cop/ActiveRecordSerialize default_value_for :active, false @@ -295,4 +297,8 @@ class Service < ActiveRecord::Base project.cache_has_external_wiki end end + + def valid_recipients? + activated? && !importing? + end end diff --git a/app/models/upload.rb b/app/models/upload.rb index f194d7bdb80..fb55fd8007b 100644 --- a/app/models/upload.rb +++ b/app/models/upload.rb @@ -9,22 +9,11 @@ class Upload < ActiveRecord::Base validates :model, presence: true validates :uploader, presence: true - before_save :calculate_checksum, if: :foreground_checksum? - after_commit :schedule_checksum, unless: :foreground_checksum? + before_save :calculate_checksum!, if: :foreground_checksummable? + after_commit :schedule_checksum, if: :checksummable? - def self.remove_path(path) - where(path: path).destroy_all - end - - def self.record(uploader) - remove_path(uploader.relative_path) - - create( - size: uploader.file.size, - path: uploader.relative_path, - model: uploader.model, - uploader: uploader.class.to_s - ) + def self.hexdigest(path) + Digest::SHA256.file(path).hexdigest end def absolute_path @@ -33,10 +22,18 @@ class Upload < ActiveRecord::Base uploader_class.absolute_path(self) end - def calculate_checksum - return unless exist? + def calculate_checksum! + self.checksum = nil + return unless checksummable? - self.checksum = Digest::SHA256.file(absolute_path).hexdigest + self.checksum = self.class.hexdigest(absolute_path) + end + + def build_uploader + uploader_class.new(model).tap do |uploader| + uploader.upload = self + uploader.retrieve_from_store!(identifier) + end end def exist? @@ -45,8 +42,16 @@ class Upload < ActiveRecord::Base private - def foreground_checksum? - size <= CHECKSUM_THRESHOLD + def checksummable? + checksum.nil? && local? && exist? + end + + def local? + true + end + + def foreground_checksummable? + checksummable? && size <= CHECKSUM_THRESHOLD end def schedule_checksum @@ -57,6 +62,10 @@ class Upload < ActiveRecord::Base !path.start_with?('/') end + def identifier + File.basename(path) + end + def uploader_class Object.const_get(uploader) end diff --git a/app/models/user.rb b/app/models/user.rb index 9403da98268..89e787c3274 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -137,6 +137,7 @@ class User < ActiveRecord::Base has_many :assigned_merge_requests, dependent: :nullify, foreign_key: :assignee_id, class_name: "MergeRequest" # rubocop:disable Cop/ActiveRecordDependent has_many :custom_attributes, class_name: 'UserCustomAttribute' + has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent # # Validations @@ -159,12 +160,10 @@ class User < ActiveRecord::Base validate :namespace_uniq, if: :username_changed? validate :namespace_move_dir_allowed, if: :username_changed? - validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? } validate :unique_email, if: :email_changed? validate :owns_notification_email, if: :notification_email_changed? validate :owns_public_email, if: :public_email_changed? validate :signup_domain_valid?, on: :create, if: ->(user) { !user.created_by_id } - validates :avatar, file_size: { maximum: 200.kilobytes.to_i } before_validation :sanitize_attrs before_validation :set_notification_email, if: :email_changed? @@ -225,9 +224,6 @@ class User < ActiveRecord::Base end end - mount_uploader :avatar, AvatarUploader - has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent - # Scopes scope :admins, -> { where(admin: true) } scope :blocked, -> { with_states(:blocked, :ldap_blocked) } @@ -527,12 +523,6 @@ class User < ActiveRecord::Base end end - def avatar_type - unless avatar.image? - errors.add :avatar, "only images allowed" - end - end - def unique_email if !emails.exists?(email: email) && Email.exists?(email: email) errors.add(:email, 'has already been taken') @@ -842,13 +832,13 @@ class User < ActiveRecord::Base end def full_website_url - return "http://#{website_url}" if website_url !~ /\Ahttps?:\/\// + return "http://#{website_url}" if website_url !~ %r{\Ahttps?://} website_url end def short_website_url - website_url.sub(/\Ahttps?:\/\//, '') + website_url.sub(%r{\Ahttps?://}, '') end def all_ssh_keys @@ -860,9 +850,7 @@ class User < ActiveRecord::Base end def avatar_url(size: nil, scale: 2, **args) - # We use avatar_path instead of overriding avatar_url because of carrierwave. - # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864 - avatar_path(args) || GravatarService.new.execute(email, size, scale, username: username) + GravatarService.new.execute(email, size, scale, username: username) end def primary_email_verified? diff --git a/app/models/wiki_page.rb b/app/models/wiki_page.rb index bdfef677ef3..e6254183baf 100644 --- a/app/models/wiki_page.rb +++ b/app/models/wiki_page.rb @@ -107,7 +107,10 @@ class WikiPage # The processed/formatted content of this page. def formatted_content - @attributes[:formatted_content] ||= @page&.formatted_data + # Assuming @page exists, nil formatted_data means we didn't load it + # before hand (i.e. page was fetched by Gitaly), so we fetch it separately. + # If the page was fetched by Gollum, formatted_data would've been a String. + @attributes[:formatted_content] ||= @page&.formatted_data || @wiki.page_formatted_data(@page) end # The markup format for the page. diff --git a/app/policies/ci/pipeline_schedule_policy.rb b/app/policies/ci/pipeline_schedule_policy.rb index abcf536b2f7..dc7a4aed577 100644 --- a/app/policies/ci/pipeline_schedule_policy.rb +++ b/app/policies/ci/pipeline_schedule_policy.rb @@ -10,6 +10,10 @@ module Ci can?(:developer_access) && pipeline_schedule.owned_by?(@user) end + condition(:non_owner_of_schedule) do + !pipeline_schedule.owned_by?(@user) + end + rule { can?(:developer_access) }.policy do enable :play_pipeline_schedule end @@ -19,6 +23,10 @@ module Ci enable :admin_pipeline_schedule end + rule { can?(:master_access) & non_owner_of_schedule }.policy do + enable :take_ownership_pipeline_schedule + end + rule { protected_ref }.prevent :play_pipeline_schedule end end diff --git a/app/serializers/merge_request_widget_entity.rb b/app/serializers/merge_request_widget_entity.rb index 48cd2317f46..fbfe480503b 100644 --- a/app/serializers/merge_request_widget_entity.rb +++ b/app/serializers/merge_request_widget_entity.rb @@ -48,7 +48,18 @@ class MergeRequestWidgetEntity < IssuableEntity expose :merge_ongoing?, as: :merge_ongoing expose :work_in_progress?, as: :work_in_progress expose :source_branch_exists?, as: :source_branch_exists - expose :mergeable_discussions_state?, as: :mergeable_discussions_state + + expose :mergeable_discussions_state?, as: :mergeable_discussions_state do |merge_request| + # This avoids calling MergeRequest#mergeable_discussions_state without + # considering the state of the MR first. If a MR isn't mergeable, we can + # safely short-circuit it. + if merge_request.mergeable_state?(skip_ci_check: true, skip_discussions_check: true) + merge_request.mergeable_discussions_state? + else + false + end + end + expose :branch_missing?, as: :branch_missing expose :commits_count expose :cannot_be_merged?, as: :has_conflicts diff --git a/app/services/merge_requests/build_service.rb b/app/services/merge_requests/build_service.rb index 22b9b91a957..2ae855d078b 100644 --- a/app/services/merge_requests/build_service.rb +++ b/app/services/merge_requests/build_service.rb @@ -1,7 +1,9 @@ module MergeRequests class BuildService < MergeRequests::BaseService + include Gitlab::Utils::StrongMemoize + def execute - @issue_iid = params.delete(:issue_iid) + @params_issue_iid = params.delete(:issue_iid) self.merge_request = MergeRequest.new(params) merge_request.compare_commits = [] @@ -123,7 +125,7 @@ module MergeRequests # def assign_title_and_description assign_title_and_description_from_single_commit - assign_title_from_issue + assign_title_from_issue if target_project.issues_enabled? || target_project.external_issue_tracker merge_request.title ||= source_branch.titleize.humanize merge_request.title = wip_title if compare_commits.empty? @@ -132,9 +134,9 @@ module MergeRequests end def append_closes_description - return unless issue_iid + return unless issue - closes_issue = "Closes ##{issue_iid}" + closes_issue = "Closes #{issue.to_reference}" if description.present? merge_request.description += closes_issue.prepend("\n\n") @@ -154,13 +156,27 @@ module MergeRequests end def assign_title_from_issue - return unless issue && issue.is_a?(Issue) + return unless issue + + merge_request.title = "Resolve \"#{issue.title}\"" if issue.is_a?(Issue) - merge_request.title = "Resolve \"#{issue.title}\"" + unless merge_request.title + branch_title = source_branch.downcase.remove(issue_iid.downcase).titleize.humanize + merge_request.title = "Resolve #{issue_iid}" + merge_request.title += " \"#{branch_title}\"" unless branch_title.empty? + end end def issue_iid - @issue_iid ||= source_branch.match(/\A(\d+)-/).try(:[], 1) + strong_memoize(:issue_iid) do + @params_issue_iid || begin + id = if target_project.external_issue_tracker + source_branch.match(target_project.external_issue_reference_pattern).try(:[], 0) + end + + id || source_branch.match(/\A(\d+)-/).try(:[], 1) + end + end end def issue diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb index 9f05535d4d4..18c40ce8992 100644 --- a/app/services/merge_requests/refresh_service.rb +++ b/app/services/merge_requests/refresh_service.rb @@ -9,7 +9,8 @@ module MergeRequests Gitlab::GitalyClient.allow_n_plus_1_calls(&method(:find_new_commits)) # Be sure to close outstanding MRs before reloading them to avoid generating an # empty diff during a manual merge - close_merge_requests + close_upon_missing_source_branch_ref + post_merge_manually_merged reload_merge_requests reset_merge_when_pipeline_succeeds mark_pending_todos_done @@ -29,11 +30,22 @@ module MergeRequests private + def close_upon_missing_source_branch_ref + # MergeRequest#reload_diff ignores not opened MRs. This means it won't + # create an `empty` diff for `closed` MRs without a source branch, keeping + # the latest diff state as the last _valid_ one. + merge_requests_for_source_branch.reject(&:source_branch_exists?).each do |mr| + MergeRequests::CloseService + .new(mr.target_project, @current_user) + .execute(mr) + end + end + # Collect open merge requests that target same branch we push into # and close if push to master include last commit from merge request # We need this to close(as merged) merge requests that were merged into # target branch manually - def close_merge_requests + def post_merge_manually_merged commit_ids = @commits.map(&:id) merge_requests = @project.merge_requests.preload(:latest_merge_request_diff).opened.where(target_branch: @branch_name).to_a merge_requests = merge_requests.select(&:diff_head_commit) @@ -78,6 +90,10 @@ module MergeRequests merge_request.mark_as_unchecked UpdateHeadPipelineForMergeRequestWorker.perform_async(merge_request.id) end + + # Upcoming method calls need the refreshed version of + # @source_merge_requests diffs (for MergeRequest#commit_shas for instance). + merge_requests_for_source_branch(reload: true) end def reset_merge_when_pipeline_succeeds @@ -183,7 +199,8 @@ module MergeRequests merge_requests.uniq.select(&:source_project) end - def merge_requests_for_source_branch + def merge_requests_for_source_branch(reload: false) + @source_merge_requests = nil if reload @source_merge_requests ||= merge_requests_for(@branch_name) end diff --git a/app/services/projects/hashed_storage/migrate_attachments_service.rb b/app/services/projects/hashed_storage/migrate_attachments_service.rb index f8aaec8a9c0..bc897d891d5 100644 --- a/app/services/projects/hashed_storage/migrate_attachments_service.rb +++ b/app/services/projects/hashed_storage/migrate_attachments_service.rb @@ -14,9 +14,9 @@ module Projects @old_path = project.full_path @new_path = project.disk_path - origin = FileUploader.dynamic_path_segment(project) + origin = FileUploader.absolute_base_dir(project) project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments] - target = FileUploader.dynamic_path_segment(project) + target = FileUploader.absolute_base_dir(project) result = move_folder!(origin, target) project.save! diff --git a/app/uploaders/attachment_uploader.rb b/app/uploaders/attachment_uploader.rb index 109eb2fea0b..4930fb2fca7 100644 --- a/app/uploaders/attachment_uploader.rb +++ b/app/uploaders/attachment_uploader.rb @@ -1,10 +1,12 @@ class AttachmentUploader < GitlabUploader - include RecordsUploads include UploaderHelper + include RecordsUploads::Concern storage :file - def store_dir - "#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}" + private + + def dynamic_segment + File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s) end end diff --git a/app/uploaders/avatar_uploader.rb b/app/uploaders/avatar_uploader.rb index cbb79376d5f..5c8e1cea62e 100644 --- a/app/uploaders/avatar_uploader.rb +++ b/app/uploaders/avatar_uploader.rb @@ -1,25 +1,24 @@ class AvatarUploader < GitlabUploader - include RecordsUploads include UploaderHelper + include RecordsUploads::Concern storage :file - def store_dir - "#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}" - end - def exists? model.avatar.file && model.avatar.file.present? end - # We set move_to_store and move_to_cache to 'false' to prevent stealing - # the avatar file from a project when forking it. - # https://gitlab.com/gitlab-org/gitlab-ce/issues/26158 - def move_to_store + def move_to_cache false end - def move_to_cache + def move_to_store false end + + private + + def dynamic_segment + File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s) + end end diff --git a/app/uploaders/file_mover.rb b/app/uploaders/file_mover.rb index 00c2888d224..e7af1483d23 100644 --- a/app/uploaders/file_mover.rb +++ b/app/uploaders/file_mover.rb @@ -21,7 +21,8 @@ class FileMover end def update_markdown - updated_text = model.read_attribute(update_field).gsub(temp_file_uploader.to_markdown, uploader.to_markdown) + updated_text = model.read_attribute(update_field) + .gsub(temp_file_uploader.markdown_link, uploader.markdown_link) model.update_attribute(update_field, updated_text) true diff --git a/app/uploaders/file_uploader.rb b/app/uploaders/file_uploader.rb index 0b591e3bbbb..85ae9863b13 100644 --- a/app/uploaders/file_uploader.rb +++ b/app/uploaders/file_uploader.rb @@ -1,23 +1,38 @@ +# This class breaks the actual CarrierWave concept. +# Every uploader should use a base_dir that is model agnostic so we can build +# back URLs from base_dir-relative paths saved in the `Upload` model. +# +# As the `.base_dir` is model dependent and **not** saved in the upload model (see #upload_path) +# there is no way to build back the correct file path without the model, which defies +# CarrierWave way of storing files. +# class FileUploader < GitlabUploader - include RecordsUploads include UploaderHelper + include RecordsUploads::Concern MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)} + DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)} storage :file - def self.absolute_path(upload_record) + def self.root + File.join(options.storage_path, 'uploads') + end + + def self.absolute_path(upload) File.join( - self.dynamic_path_segment(upload_record.model), - upload_record.path + absolute_base_dir(upload.model), + upload.path # already contain the dynamic_segment, see #upload_path ) end - # Not using `GitlabUploader.base_dir` because all project namespaces are in - # the `public/uploads` dir. - # - def self.base_dir - root_dir + def self.base_dir(model) + model_path_segment(model) + end + + # used in migrations and import/exports + def self.absolute_base_dir(model) + File.join(root, base_dir(model)) end # Returns the part of `store_dir` that can change based on the model's current @@ -29,63 +44,96 @@ class FileUploader < GitlabUploader # model - Object that responds to `full_path` and `disk_path` # # Returns a String without a trailing slash - def self.dynamic_path_segment(model) + def self.model_path_segment(model) if model.hashed_storage?(:attachments) - dynamic_path_builder(model.disk_path) + model.disk_path else - dynamic_path_builder(model.full_path) + model.full_path end end - # Auxiliary method to build dynamic path segment when not using a project model - # - # Prefer to use the `.dynamic_path_segment` as it includes Hashed Storage specific logic - def self.dynamic_path_builder(path) - File.join(CarrierWave.root, base_dir, path) + def self.upload_path(secret, identifier) + File.join(secret, identifier) + end + + def self.generate_secret + SecureRandom.hex end attr_accessor :model - attr_reader :secret def initialize(model, secret = nil) @model = model - @secret = secret || generate_secret + @secret = secret end - def store_dir - File.join(dynamic_path_segment, @secret) + def base_dir + self.class.base_dir(@model) end - def relative_path - self.file.path.sub("#{dynamic_path_segment}/", '') + # we don't need to know the actual path, an uploader instance should be + # able to yield the file content on demand, so we should build the digest + def absolute_path + self.class.absolute_path(@upload) end - def to_markdown - to_h[:markdown] + def upload_path + self.class.upload_path(dynamic_segment, identifier) end - def to_h - filename = image_or_video? ? self.file.basename : self.file.filename - escaped_filename = filename.gsub("]", "\\]") + def model_path_segment + self.class.model_path_segment(@model) + end + + def store_dir + File.join(base_dir, dynamic_segment) + end - markdown = "[#{escaped_filename}](#{secure_url})" + def markdown_link + markdown = "[#{markdown_name}](#{secure_url})" markdown.prepend("!") if image_or_video? || dangerous? + markdown + end + def to_h { - alt: filename, + alt: markdown_name, url: secure_url, - markdown: markdown + markdown: markdown_link } end + def filename + self.file.filename + end + + # the upload does not hold the secret, but holds the path + # which contains the secret: extract it + def upload=(value) + if matches = DYNAMIC_PATH_PATTERN.match(value.path) + @secret = matches[:secret] + @identifier = matches[:identifier] + end + + super + end + + def secret + @secret ||= self.class.generate_secret + end + private - def dynamic_path_segment - self.class.dynamic_path_segment(model) + def markdown_name + (image_or_video? ? File.basename(filename, File.extname(filename)) : filename).gsub("]", "\\]") end - def generate_secret - SecureRandom.hex + def identifier + @identifier ||= filename + end + + def dynamic_segment + secret end def secure_url diff --git a/app/uploaders/gitlab_uploader.rb b/app/uploaders/gitlab_uploader.rb index 7f72b3ce471..b12829efe73 100644 --- a/app/uploaders/gitlab_uploader.rb +++ b/app/uploaders/gitlab_uploader.rb @@ -1,28 +1,32 @@ class GitlabUploader < CarrierWave::Uploader::Base - def self.absolute_path(upload_record) - File.join(CarrierWave.root, upload_record.path) - end + class_attribute :options - def self.root_dir - 'uploads' - end + class << self + # DSL setter + def storage_options(options) + self.options = options + end - # When object storage is used, keep the `root_dir` as `base_dir`. - # The files aren't really in folders there, they just have a name. - # The files that contain user input in their name, also contain a hash, so - # the names are still unique - # - # This method is overridden in the `FileUploader` - def self.base_dir - return root_dir unless file_storage? + def root + options.storage_path + end - File.join(root_dir, '-', 'system') - end + # represent the directory namespacing at the class level + def base_dir + options.fetch('base_dir', '') + end - def self.file_storage? - self.storage == CarrierWave::Storage::File + def file_storage? + storage == CarrierWave::Storage::File + end + + def absolute_path(upload_record) + File.join(root, upload_record.path) + end end + storage_options Gitlab.config.uploads + delegate :base_dir, :file_storage?, to: :class def file_cache_storage? @@ -31,34 +35,28 @@ class GitlabUploader < CarrierWave::Uploader::Base # Reduce disk IO def move_to_cache - true + file_storage? end # Reduce disk IO def move_to_store - true - end - - # Designed to be overridden by child uploaders that have a dynamic path - # segment -- that is, a path that changes based on mutable attributes of its - # associated model - # - # For example, `FileUploader` builds the storage path based on the associated - # project model's `path_with_namespace` value, which can change when the - # project or its containing namespace is moved or renamed. - def relative_path - self.file.path.sub("#{root}/", '') + file_storage? end def exists? file.present? end - # Override this if you don't want to save files by default to the Rails.root directory + def store_dir + File.join(base_dir, dynamic_segment) + end + + def cache_dir + File.join(root, base_dir, 'tmp/cache') + end + def work_dir - # Default path set by CarrierWave: - # https://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L182 - CarrierWave.tmp_path + File.join(root, base_dir, 'tmp/work') end def filename @@ -67,6 +65,13 @@ class GitlabUploader < CarrierWave::Uploader::Base private + # Designed to be overridden by child uploaders that have a dynamic path + # segment -- that is, a path that changes based on mutable attributes of its + # associated model + def dynamic_segment + raise(NotImplementedError) + end + # To prevent files from moving across filesystems, override the default # implementation: # http://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L181-L183 @@ -74,6 +79,6 @@ class GitlabUploader < CarrierWave::Uploader::Base # To be safe, keep this directory outside of the the cache directory # because calling CarrierWave.clean_cache_files! will remove any files in # the cache directory. - File.join(work_dir, @cache_id, version_name.to_s, for_file) + File.join(work_dir, cache_id, version_name.to_s, for_file) end end diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb index 15dfb5a5763..0abb462ab7d 100644 --- a/app/uploaders/job_artifact_uploader.rb +++ b/app/uploaders/job_artifact_uploader.rb @@ -1,13 +1,7 @@ class JobArtifactUploader < GitlabUploader - storage :file + extend Workhorse::UploadPath - def self.local_store_path - Gitlab.config.artifacts.path - end - - def self.artifacts_upload_path - File.join(self.local_store_path, 'tmp/uploads/') - end + storage_options Gitlab.config.artifacts def size return super if model.size.nil? @@ -16,24 +10,12 @@ class JobArtifactUploader < GitlabUploader end def store_dir - default_local_path - end - - def cache_dir - File.join(self.class.local_store_path, 'tmp/cache') - end - - def work_dir - File.join(self.class.local_store_path, 'tmp/work') + dynamic_segment end private - def default_local_path - File.join(self.class.local_store_path, default_path) - end - - def default_path + def dynamic_segment creation_date = model.created_at.utc.strftime('%Y_%m_%d') File.join(disk_hash[0..1], disk_hash[2..3], disk_hash, diff --git a/app/uploaders/legacy_artifact_uploader.rb b/app/uploaders/legacy_artifact_uploader.rb index 4f7f8a63108..28c458d3ff1 100644 --- a/app/uploaders/legacy_artifact_uploader.rb +++ b/app/uploaders/legacy_artifact_uploader.rb @@ -1,33 +1,15 @@ class LegacyArtifactUploader < GitlabUploader - storage :file + extend Workhorse::UploadPath - def self.local_store_path - Gitlab.config.artifacts.path - end - - def self.artifacts_upload_path - File.join(self.local_store_path, 'tmp/uploads/') - end + storage_options Gitlab.config.artifacts def store_dir - default_local_path - end - - def cache_dir - File.join(self.class.local_store_path, 'tmp/cache') - end - - def work_dir - File.join(self.class.local_store_path, 'tmp/work') + dynamic_segment end private - def default_local_path - File.join(self.class.local_store_path, default_path) - end - - def default_path + def dynamic_segment File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s) end end diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb index d11ebf0f9ca..e04c97ce179 100644 --- a/app/uploaders/lfs_object_uploader.rb +++ b/app/uploaders/lfs_object_uploader.rb @@ -1,19 +1,24 @@ class LfsObjectUploader < GitlabUploader - storage :file + extend Workhorse::UploadPath - def store_dir - "#{Gitlab.config.lfs.storage_path}/#{model.oid[0, 2]}/#{model.oid[2, 2]}" + # LfsObject are in `tmp/upload` instead of `tmp/uploads` + def self.workhorse_upload_path + File.join(root, 'tmp/upload') end - def cache_dir - "#{Gitlab.config.lfs.storage_path}/tmp/cache" - end + storage_options Gitlab.config.lfs def filename model.oid[4..-1] end - def work_dir - File.join(Gitlab.config.lfs.storage_path, 'tmp', 'work') + def store_dir + dynamic_segment + end + + private + + def dynamic_segment + File.join(model.oid[0, 2], model.oid[2, 2]) end end diff --git a/app/uploaders/namespace_file_uploader.rb b/app/uploaders/namespace_file_uploader.rb index 672126e9ec2..993e85fbc13 100644 --- a/app/uploaders/namespace_file_uploader.rb +++ b/app/uploaders/namespace_file_uploader.rb @@ -1,15 +1,19 @@ class NamespaceFileUploader < FileUploader - def self.base_dir - File.join(root_dir, '-', 'system', 'namespace') + # Re-Override + def self.root + options.storage_path end - def self.dynamic_path_segment(model) - dynamic_path_builder(model.id.to_s) + def self.base_dir(model) + File.join(options.base_dir, 'namespace', model_path_segment(model)) end - private + def self.model_path_segment(model) + File.join(model.id.to_s) + end - def secure_url - File.join('/uploads', @secret, file.filename) + # Re-Override + def store_dir + File.join(base_dir, dynamic_segment) end end diff --git a/app/uploaders/personal_file_uploader.rb b/app/uploaders/personal_file_uploader.rb index 3298ad104ec..e7d9ecd3222 100644 --- a/app/uploaders/personal_file_uploader.rb +++ b/app/uploaders/personal_file_uploader.rb @@ -1,23 +1,27 @@ class PersonalFileUploader < FileUploader - def self.dynamic_path_segment(model) - File.join(CarrierWave.root, model_path(model)) + # Re-Override + def self.root + options.storage_path end - def self.base_dir - File.join(root_dir, '-', 'system') + def self.base_dir(model) + File.join(options.base_dir, model_path_segment(model)) end - private + def self.model_path_segment(model) + return 'temp/' unless model - def secure_url - File.join(self.class.model_path(model), secret, file.filename) + File.join(model.class.to_s.underscore, model.id.to_s) + end + + # Revert-Override + def store_dir + File.join(base_dir, dynamic_segment) end - def self.model_path(model) - if model - File.join("/#{base_dir}", model.class.to_s.underscore, model.id.to_s) - else - File.join("/#{base_dir}", 'temp') - end + private + + def secure_url + File.join('/', base_dir, secret, file.filename) end end diff --git a/app/uploaders/records_uploads.rb b/app/uploaders/records_uploads.rb index feb4f04d7b7..dfb8dccec57 100644 --- a/app/uploaders/records_uploads.rb +++ b/app/uploaders/records_uploads.rb @@ -1,35 +1,61 @@ module RecordsUploads - extend ActiveSupport::Concern + module Concern + extend ActiveSupport::Concern - included do - after :store, :record_upload - before :remove, :destroy_upload - end + attr_accessor :upload - # After storing an attachment, create a corresponding Upload record - # - # NOTE: We're ignoring the argument passed to this callback because we want - # the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the - # `Tempfile` object the callback gets. - # - # Called `after :store` - def record_upload(_tempfile = nil) - return unless model - return unless file_storage? - return unless file.exists? - - Upload.record(self) - end + included do + after :store, :record_upload + before :remove, :destroy_upload + end + + # After storing an attachment, create a corresponding Upload record + # + # NOTE: We're ignoring the argument passed to this callback because we want + # the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the + # `Tempfile` object the callback gets. + # + # Called `after :store` + def record_upload(_tempfile = nil) + return unless model + return unless file && file.exists? + + Upload.transaction do + uploads.where(path: upload_path).delete_all + upload.destroy! if upload + + self.upload = build_upload_from_uploader(self) + upload.save! + end + end + + def upload_path + File.join(store_dir, filename.to_s) + end + + private + + def uploads + Upload.order(id: :desc).where(uploader: self.class.to_s) + end - private + def build_upload_from_uploader(uploader) + Upload.new( + size: uploader.file.size, + path: uploader.upload_path, + model: uploader.model, + uploader: uploader.class.to_s + ) + end - # Before removing an attachment, destroy any Upload records at the same path - # - # Called `before :remove` - def destroy_upload(*args) - return unless file_storage? - return unless file + # Before removing an attachment, destroy any Upload records at the same path + # + # Called `before :remove` + def destroy_upload(*args) + return unless file && file.exists? - Upload.remove_path(relative_path) + self.upload = nil + uploads.where(path: upload_path).delete_all + end end end diff --git a/app/uploaders/uploader_helper.rb b/app/uploaders/uploader_helper.rb index 7635c20ab3a..fd446d31092 100644 --- a/app/uploaders/uploader_helper.rb +++ b/app/uploaders/uploader_helper.rb @@ -32,14 +32,7 @@ module UploaderHelper def extension_match?(extensions) return false unless file - extension = - if file.respond_to?(:extension) - file.extension - else - # Not all CarrierWave storages respond to :extension - File.extname(file.path).delete('.') - end - + extension = file.try(:extension) || File.extname(file.path).delete('.') extensions.include?(extension.downcase) end end diff --git a/app/uploaders/workhorse.rb b/app/uploaders/workhorse.rb new file mode 100644 index 00000000000..782032cf516 --- /dev/null +++ b/app/uploaders/workhorse.rb @@ -0,0 +1,7 @@ +module Workhorse + module UploadPath + def workhorse_upload_path + File.join(root, base_dir, 'tmp/uploads') + end + end +end diff --git a/app/views/admin/application_settings/_form.html.haml b/app/views/admin/application_settings/_form.html.haml index ba4ca88a8a9..fb5e6f337a7 100644 --- a/app/views/admin/application_settings/_form.html.haml +++ b/app/views/admin/application_settings/_form.html.haml @@ -537,7 +537,8 @@ .form-group = f.label :repository_storages, 'Storage paths for new projects', class: 'control-label col-sm-2' .col-sm-10 - = f.select :repository_storages, repository_storages_options_for_select, {include_hidden: false}, multiple: true, class: 'form-control' + = f.select :repository_storages, repository_storages_options_for_select(@application_setting.repository_storages), + {include_hidden: false}, multiple: true, class: 'form-control' .help-block Manage repository storage paths. Learn more in the = succeed "." do diff --git a/app/views/admin/dashboard/index.html.haml b/app/views/admin/dashboard/index.html.haml index 509f559c120..d251f75a8fd 100644 --- a/app/views/admin/dashboard/index.html.haml +++ b/app/views/admin/dashboard/index.html.haml @@ -138,20 +138,12 @@ GitLab API %span.pull-right = API::API::version - %p - Gitaly - %span.pull-right - = Gitlab::GitalyClient.expected_server_version - if Gitlab.config.pages.enabled %p GitLab Pages %span.pull-right = Gitlab::Pages::VERSION %p - Git - %span.pull-right - = Gitlab::Git.version - %p Ruby %span.pull-right #{RUBY_VERSION}p#{RUBY_PATCHLEVEL} @@ -163,6 +155,8 @@ = Gitlab::Database.adapter_name %span.pull-right = Gitlab::Database.version + %p + = link_to "Gitaly Servers", admin_gitaly_servers_path .row .col-md-4 .info-well diff --git a/app/views/admin/gitaly_servers/index.html.haml b/app/views/admin/gitaly_servers/index.html.haml new file mode 100644 index 00000000000..231f94dc95d --- /dev/null +++ b/app/views/admin/gitaly_servers/index.html.haml @@ -0,0 +1,31 @@ +- breadcrumb_title _("Gitaly Servers") + +%h3.page-title= _("Gitaly Servers") +%hr +.gitaly_servers + - if @gitaly_servers.any? + .table-holder + %table.table.responsive-table + %thead.hidden-sm.hidden-xs + %tr + %th= _("Storage") + %th= n_("Gitaly|Address") + %th= _("Server version") + %th= _("Git version") + %th= _("Up to date") + - @gitaly_servers.each do |server| + %tr + %td + = server.storage + %td + = server.address + %td + = server.server_version + %td + = server.git_binary_version + %td + = boolean_to_icon(server.up_to_date?) + - else + .empty-state + .text-center + %h4= _("No connection could be made to a Gitaly Server, please check your logs!") diff --git a/app/views/help/index.html.haml b/app/views/help/index.html.haml index b8692009225..fdd72ead2cb 100644 --- a/app/views/help/index.html.haml +++ b/app/views/help/index.html.haml @@ -5,15 +5,16 @@ = markdown_field(current_application_settings, :help_page_text) %hr -- unless current_application_settings.help_page_hide_commercial_content? - %h1 - GitLab - Community Edition - - if user_signed_in? - %span= Gitlab::VERSION - %small= link_to Gitlab::REVISION, Gitlab::COM_URL + namespace_project_commits_path('gitlab-org', 'gitlab-ce', Gitlab::REVISION) - = version_status_badge +%h1 + GitLab + Community Edition + - if user_signed_in? + %span= Gitlab::VERSION + %small= link_to Gitlab::REVISION, Gitlab::COM_URL + namespace_project_commits_path('gitlab-org', 'gitlab-ce', Gitlab::REVISION) + = version_status_badge + %hr +- unless current_application_settings.help_page_hide_commercial_content? %p.slead GitLab is open source software to collaborate on code. %br diff --git a/app/views/layouts/devise.html.haml b/app/views/layouts/devise.html.haml index 4e9ea33e675..a95c834dcfd 100644 --- a/app/views/layouts/devise.html.haml +++ b/app/views/layouts/devise.html.haml @@ -15,7 +15,7 @@ .col-sm-7.brand-holder.pull-left %h1 = brand_title - = brand_image + = brand_image - if brand_item&.description? = brand_text - else diff --git a/app/views/layouts/header/_new_dropdown.haml b/app/views/layouts/header/_new_dropdown.haml index 088f2785092..eb32f393310 100644 --- a/app/views/layouts/header/_new_dropdown.haml +++ b/app/views/layouts/header/_new_dropdown.haml @@ -1,5 +1,5 @@ %li.header-new.dropdown - = link_to new_project_path, class: "header-new-dropdown-toggle has-tooltip", title: "New...", ref: 'tooltip', aria: { label: "New..." }, data: { toggle: 'dropdown', placement: 'bottom', container: 'body' } do + = link_to new_project_path, class: "header-new-dropdown-toggle has-tooltip qa-new-menu-toggle", title: "New...", ref: 'tooltip', aria: { label: "New..." }, data: { toggle: 'dropdown', placement: 'bottom', container: 'body' } do = sprite_icon('plus-square', size: 16) = sprite_icon('angle-down', css_class: 'caret-down') .dropdown-menu-nav.dropdown-menu-align-right diff --git a/app/views/projects/_last_push.html.haml b/app/views/projects/_last_push.html.haml index 56eecece54c..6f5eb828902 100644 --- a/app/views/projects/_last_push.html.haml +++ b/app/views/projects/_last_push.html.haml @@ -14,5 +14,5 @@ #{time_ago_with_tooltip(event.created_at)} .pull-right - = link_to new_mr_path_from_push_event(event), title: _("New merge request"), class: "btn btn-info btn-sm" do + = link_to new_mr_path_from_push_event(event), title: _("New merge request"), class: "btn btn-info btn-sm qa-create-merge-request" do #{ _('Create merge request') } diff --git a/app/views/projects/clusters/_cluster.html.haml b/app/views/projects/clusters/_cluster.html.haml index 3943dfc0856..20ee8086f93 100644 --- a/app/views/projects/clusters/_cluster.html.haml +++ b/app/views/projects/clusters/_cluster.html.haml @@ -12,11 +12,12 @@ .table-section.section-10 .table-mobile-header{ role: "rowheader" } .table-mobile-content - %button{ type: "button", - class: "js-toggle-cluster-list project-feature-toggle #{'is-checked' if cluster.enabled?} #{'is-disabled' if !cluster.can_toggle_cluster?}", + %button.js-project-feature-toggle.project-feature-toggle{ type: "button", + class: "#{'is-checked' if cluster.enabled?} #{'is-disabled' if !cluster.can_toggle_cluster?}", "aria-label": s_("ClusterIntegration|Toggle Cluster"), disabled: !cluster.can_toggle_cluster?, data: { endpoint: namespace_project_cluster_path(@project.namespace, @project, cluster, format: :json) } } + %input.js-project-feature-toggle-input{ type: "hidden", value: cluster.enabled? } = icon("spinner spin", class: "loading-icon") %span.toggle-icon = sprite_icon('status_success_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-checked') diff --git a/app/views/projects/clusters/_integration_form.html.haml b/app/views/projects/clusters/_integration_form.html.haml index 9d593ffc021..0af6e6e0577 100644 --- a/app/views/projects/clusters/_integration_form.html.haml +++ b/app/views/projects/clusters/_integration_form.html.haml @@ -10,13 +10,12 @@ = s_('ClusterIntegration|Cluster integration is enabled for this project.') - else = s_('ClusterIntegration|Cluster integration is disabled for this project.') - %label.append-bottom-10 - = field.hidden_field :enabled, { class: 'js-toggle-input'} - + %label.append-bottom-10.js-cluster-enable-toggle-area %button{ type: 'button', - class: "js-toggle-cluster project-feature-toggle #{'is-checked' unless !@cluster.enabled?} #{'is-disabled' unless can?(current_user, :update_cluster, @cluster)}", + class: "js-project-feature-toggle project-feature-toggle #{'is-checked' if @cluster.enabled?} #{'is-disabled' unless can?(current_user, :update_cluster, @cluster)}", "aria-label": s_("ClusterIntegration|Toggle Cluster"), disabled: !can?(current_user, :update_cluster, @cluster) } + = field.hidden_field :enabled, { class: 'js-project-feature-toggle-input'} %span.toggle-icon = sprite_icon('status_success_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-checked') = sprite_icon('status_failed_borderless', size: 16, css_class: 'toggle-icon-svg toggle-status-unchecked') diff --git a/app/views/projects/commits/_commit.html.haml b/app/views/projects/commits/_commit.html.haml index d66066a6d0b..90272ad9554 100644 --- a/app/views/projects/commits/_commit.html.haml +++ b/app/views/projects/commits/_commit.html.haml @@ -51,6 +51,7 @@ - if commit.status(ref) = render_commit_status(commit, ref: ref) + #commit-pipeline-status{ data: { endpoint: pipelines_project_commit_path(project, commit.id) } } = link_to commit.short_id, link, class: "commit-sha btn btn-transparent btn-link" = clipboard_button(text: commit.id, title: _("Copy commit SHA to clipboard")) = link_to_browse_code(project, commit) diff --git a/app/views/projects/issues/_new_branch.html.haml b/app/views/projects/issues/_new_branch.html.haml index 331d62cf247..37b00a14fc6 100644 --- a/app/views/projects/issues/_new_branch.html.haml +++ b/app/views/projects/issues/_new_branch.html.haml @@ -15,10 +15,10 @@ %span.text Checking branch availability… .btn-group.available.hide - %button.btn.js-create-merge-request.btn-default{ type: 'button', data: { action: data_action } } + %button.btn.js-create-merge-request.btn-success.btn-inverted{ type: 'button', data: { action: data_action } } = value - %button.btn.create-merge-request-dropdown-toggle.dropdown-toggle.btn-default.js-dropdown-toggle{ type: 'button', data: { dropdown: { trigger: '#create-merge-request-dropdown' } } } + %button.btn.create-merge-request-dropdown-toggle.dropdown-toggle.btn-success.btn-inverted.js-dropdown-toggle{ type: 'button', data: { dropdown: { trigger: '#create-merge-request-dropdown' } } } = icon('caret-down') %ul#create-merge-request-dropdown.create-merge-request-dropdown-menu.dropdown-menu.dropdown-menu-align-right.gl-show-field-errors{ data: { dropdown: true } } diff --git a/app/views/projects/pipeline_schedules/_pipeline_schedule.html.haml b/app/views/projects/pipeline_schedules/_pipeline_schedule.html.haml index 800e234275c..a8692b83b07 100644 --- a/app/views/projects/pipeline_schedules/_pipeline_schedule.html.haml +++ b/app/views/projects/pipeline_schedules/_pipeline_schedule.html.haml @@ -29,9 +29,10 @@ - if can?(current_user, :play_pipeline_schedule, pipeline_schedule) = link_to play_pipeline_schedule_path(pipeline_schedule), method: :post, title: s_('Play'), class: 'btn' do = icon('play') - - if can?(current_user, :update_pipeline_schedule, pipeline_schedule) + - if can?(current_user, :take_ownership_pipeline_schedule, pipeline_schedule) = link_to take_ownership_pipeline_schedule_path(pipeline_schedule), method: :post, title: s_('PipelineSchedules|Take ownership'), class: 'btn' do = s_('PipelineSchedules|Take ownership') + - if can?(current_user, :update_pipeline_schedule, pipeline_schedule) = link_to edit_pipeline_schedule_path(pipeline_schedule), title: _('Edit'), class: 'btn' do = icon('pencil') - if can?(current_user, :admin_pipeline_schedule, pipeline_schedule) diff --git a/app/views/projects/tags/_tag.html.haml b/app/views/projects/tags/_tag.html.haml index 467f19b4c56..55e45a5e954 100644 --- a/app/views/projects/tags/_tag.html.haml +++ b/app/views/projects/tags/_tag.html.haml @@ -32,5 +32,5 @@ = icon("pencil") - if can?(current_user, :admin_project, @project) - = link_to project_tag_path(@project, tag.name), class: "btn btn-remove remove-row has-tooltip #{protected_tag?(@project, tag) ? 'disabled' : ''}", title: s_('TagsPage|Delete tag'), method: :delete, data: { confirm: s_('TagsPage|Deleting the %{tag_name} tag cannot be undone. Are you sure?') % { tag_name: tag.name }, container: 'body' }, remote: true do + = link_to project_tag_path(@project, tag.name), class: "btn btn-remove remove-row has-tooltip prepend-left-10 #{protected_tag?(@project, tag) ? 'disabled' : ''}", title: s_('TagsPage|Delete tag'), method: :delete, data: { confirm: s_('TagsPage|Deleting the %{tag_name} tag cannot be undone. Are you sure?') % { tag_name: tag.name }, container: 'body' }, remote: true do = icon("trash-o") diff --git a/app/views/shared/form_elements/_description.html.haml b/app/views/shared/form_elements/_description.html.haml index f65bb6a29e6..38e9899ca4b 100644 --- a/app/views/shared/form_elements/_description.html.haml +++ b/app/views/shared/form_elements/_description.html.haml @@ -15,7 +15,7 @@ = render layout: 'projects/md_preview', locals: { url: preview_url, referenced_users: true } do = render 'projects/zen', f: form, attr: :description, - classes: 'note-textarea', + classes: 'note-textarea qa-issuable-form-description', placeholder: "Write a comment or drag your files here...", supports_quick_actions: supports_quick_actions = render 'shared/notes/hints', supports_quick_actions: supports_quick_actions diff --git a/app/views/shared/issuable/_form.html.haml b/app/views/shared/issuable/_form.html.haml index bb02dfa0d3a..79021a08719 100644 --- a/app/views/shared/issuable/_form.html.haml +++ b/app/views/shared/issuable/_form.html.haml @@ -65,7 +65,7 @@ %span.append-right-10 - if issuable.new_record? - = form.submit "Submit #{issuable.class.model_name.human.downcase}", class: 'btn btn-create' + = form.submit "Submit #{issuable.class.model_name.human.downcase}", class: 'btn btn-create qa-issuable-create-button' - else = form.submit 'Save changes', class: 'btn btn-save' diff --git a/app/views/shared/issuable/form/_title.html.haml b/app/views/shared/issuable/form/_title.html.haml index 64826d41d60..e81639f35ea 100644 --- a/app/views/shared/issuable/form/_title.html.haml +++ b/app/views/shared/issuable/form/_title.html.haml @@ -6,7 +6,7 @@ %div{ class: div_class } = form.text_field :title, required: true, maxlength: 255, autofocus: true, - autocomplete: 'off', class: 'form-control pad' + autocomplete: 'off', class: 'form-control pad qa-issuable-form-title' - if issuable.respond_to?(:work_in_progress?) %p.help-block diff --git a/app/workers/repository_import_worker.rb b/app/workers/repository_import_worker.rb index 31e2798c36b..d79b5ee5346 100644 --- a/app/workers/repository_import_worker.rb +++ b/app/workers/repository_import_worker.rb @@ -20,7 +20,11 @@ class RepositoryImportWorker # to those importers to mark the import process as complete. return if service.async? - raise result[:message] if result[:status] == :error + if result[:status] == :error + fail_import(project, result[:message]) if project.gitlab_project_import? + + raise result[:message] + end project.after_import end @@ -33,4 +37,8 @@ class RepositoryImportWorker Rails.logger.info("Project #{project.full_path} was in inconsistent state (#{project.import_status}) while importing.") false end + + def fail_import(project, message) + project.mark_import_as_failed(message) + end end diff --git a/app/workers/upload_checksum_worker.rb b/app/workers/upload_checksum_worker.rb index 9222760c031..65d40336f18 100644 --- a/app/workers/upload_checksum_worker.rb +++ b/app/workers/upload_checksum_worker.rb @@ -3,7 +3,7 @@ class UploadChecksumWorker def perform(upload_id) upload = Upload.find(upload_id) - upload.calculate_checksum + upload.calculate_checksum! upload.save! rescue ActiveRecord::RecordNotFound Rails.logger.error("UploadChecksumWorker: couldn't find upload #{upload_id}, skipping") diff --git a/bin/upgrade.rb b/bin/upgrade.rb deleted file mode 100755 index a5caecf8526..00000000000 --- a/bin/upgrade.rb +++ /dev/null @@ -1,3 +0,0 @@ -require_relative "../lib/gitlab/upgrader" - -Gitlab::Upgrader.new.execute diff --git a/changelogs/unreleased/21554-mark-new-user-as-external.yml b/changelogs/unreleased/21554-mark-new-user-as-external.yml new file mode 100644 index 00000000000..fb0826fc176 --- /dev/null +++ b/changelogs/unreleased/21554-mark-new-user-as-external.yml @@ -0,0 +1,5 @@ +--- +title: Login via OAuth now only marks new users as external +merge_request: 16672 +author: +type: fixed diff --git a/changelogs/unreleased/30106-group-issues.yml b/changelogs/unreleased/30106-group-issues.yml new file mode 100644 index 00000000000..d24996e6087 --- /dev/null +++ b/changelogs/unreleased/30106-group-issues.yml @@ -0,0 +1,5 @@ +--- +title: Include subgroup issues and merge requests on the group page +merge_request: +author: +type: changed diff --git a/changelogs/unreleased/32283-trending-projects-unique-constraint2.yml b/changelogs/unreleased/32283-trending-projects-unique-constraint2.yml new file mode 100644 index 00000000000..4fd6b6fddc4 --- /dev/null +++ b/changelogs/unreleased/32283-trending-projects-unique-constraint2.yml @@ -0,0 +1,5 @@ +--- +title: Add unique constraint to trending_projects#project_id. +merge_request: 16846 +author: +type: other diff --git a/changelogs/unreleased/32546-cannot-copy-paste-on-ios.yml b/changelogs/unreleased/32546-cannot-copy-paste-on-ios.yml deleted file mode 100644 index f4c44983736..00000000000 --- a/changelogs/unreleased/32546-cannot-copy-paste-on-ios.yml +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: Fix copy/paste on iOS devices due to a bug in webkit -merge_request: 15804 -author: -type: fixed diff --git a/changelogs/unreleased/34733-fix-default-avatar-when-gravatar-disabled.yml b/changelogs/unreleased/34733-fix-default-avatar-when-gravatar-disabled.yml new file mode 100644 index 00000000000..0791847b64d --- /dev/null +++ b/changelogs/unreleased/34733-fix-default-avatar-when-gravatar-disabled.yml @@ -0,0 +1,5 @@ +--- +title: Fix default avatar icon missing when Gravatar is disabled +merge_request: 16681 +author: Felix Geyer +type: fixed diff --git a/changelogs/unreleased/35285-user-interface-bugs-for-schedule-pipelines.yml b/changelogs/unreleased/35285-user-interface-bugs-for-schedule-pipelines.yml new file mode 100644 index 00000000000..f3a04469884 --- /dev/null +++ b/changelogs/unreleased/35285-user-interface-bugs-for-schedule-pipelines.yml @@ -0,0 +1,5 @@ +--- +title: Hide pipeline schedule take ownership for current owner +merge_request: 12986 +author: +type: fixed diff --git a/changelogs/unreleased/35779-realtime-update-of-pipeline-status-in-files-view.yml b/changelogs/unreleased/35779-realtime-update-of-pipeline-status-in-files-view.yml new file mode 100644 index 00000000000..82df00fe631 --- /dev/null +++ b/changelogs/unreleased/35779-realtime-update-of-pipeline-status-in-files-view.yml @@ -0,0 +1,5 @@ +--- +title: Add realtime ci status for the repository -> files view +merge_request: 16523 +author: +type: added diff --git a/changelogs/unreleased/40793-fix-mr-title-for-jira.yml b/changelogs/unreleased/40793-fix-mr-title-for-jira.yml new file mode 100644 index 00000000000..69461510a87 --- /dev/null +++ b/changelogs/unreleased/40793-fix-mr-title-for-jira.yml @@ -0,0 +1,5 @@ +--- +title: Prevent JIRA issue identifier from being humanized. +merge_request: 16491 +author: Andrew McCallum +type: fixed diff --git a/changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml b/changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml new file mode 100644 index 00000000000..f64fd66ef79 --- /dev/null +++ b/changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml @@ -0,0 +1,5 @@ +--- +title: Reduce the number of Prometheus metrics +merge_request: 16443 +author: +type: performance diff --git a/changelogs/unreleased/41802-add-space-to-edit-delete-tag-btns.yml b/changelogs/unreleased/41802-add-space-to-edit-delete-tag-btns.yml new file mode 100644 index 00000000000..f23a6452b0d --- /dev/null +++ b/changelogs/unreleased/41802-add-space-to-edit-delete-tag-btns.yml @@ -0,0 +1,5 @@ +--- +title: Adds spacing between edit and delete tag btn in tag list +merge_request: 16757 +author: Jacopo Beschi @jacopo-beschi +type: fixed diff --git a/changelogs/unreleased/42022-allow-users-to-request-access-not-visible-when-project-visibility-is-public.yml b/changelogs/unreleased/42022-allow-users-to-request-access-not-visible-when-project-visibility-is-public.yml deleted file mode 100644 index 38684cd3c44..00000000000 --- a/changelogs/unreleased/42022-allow-users-to-request-access-not-visible-when-project-visibility-is-public.yml +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: Fix missing "allow users to request access" option in public project permissions -merge_request: 16485 -author: -type: fixed diff --git a/changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml b/changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml new file mode 100644 index 00000000000..64340ab08cd --- /dev/null +++ b/changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml @@ -0,0 +1,5 @@ +--- +title: Fix 500 error when loading a merge request with an invalid comment +merge_request: 16795 +author: +type: fixed diff --git a/changelogs/unreleased/42161-gitaly-commitservice-encoding-undefinedconversionerror-u-c124-from-utf-8-to-ascii-8bit.yml b/changelogs/unreleased/42161-gitaly-commitservice-encoding-undefinedconversionerror-u-c124-from-utf-8-to-ascii-8bit.yml deleted file mode 100644 index c64bee9126e..00000000000 --- a/changelogs/unreleased/42161-gitaly-commitservice-encoding-undefinedconversionerror-u-c124-from-utf-8-to-ascii-8bit.yml +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: Fix encoding issue when counting commit count -merge_request: 16637 -author: -type: fixed diff --git a/changelogs/unreleased/42255-disable-mr-checkout-button-when-source-branch-deleted.yml b/changelogs/unreleased/42255-disable-mr-checkout-button-when-source-branch-deleted.yml new file mode 100644 index 00000000000..bd7e0d3a1b0 --- /dev/null +++ b/changelogs/unreleased/42255-disable-mr-checkout-button-when-source-branch-deleted.yml @@ -0,0 +1,5 @@ +--- +title: Disable MR check out button when source branch is deleted +merge_request: 16631 +author: Jacopo Beschi @jacopo-beschi +type: fixed diff --git a/changelogs/unreleased/42497-rubocop-style-regexpliteral.yml b/changelogs/unreleased/42497-rubocop-style-regexpliteral.yml new file mode 100644 index 00000000000..6053883bac4 --- /dev/null +++ b/changelogs/unreleased/42497-rubocop-style-regexpliteral.yml @@ -0,0 +1,5 @@ +--- +title: Enable RuboCop Style/RegexpLiteral +merge_request: 16752 +author: Takuya Noguchi +type: other diff --git a/changelogs/unreleased/42591-update-nokogiri.yml b/changelogs/unreleased/42591-update-nokogiri.yml new file mode 100644 index 00000000000..5f9587d2d92 --- /dev/null +++ b/changelogs/unreleased/42591-update-nokogiri.yml @@ -0,0 +1,5 @@ +--- +title: Update nokogiri to 1.8.2 +merge_request: 16807 +author: +type: security diff --git a/changelogs/unreleased/42696-gitlab-import-leaves-group_id-on-projectlabel.yml b/changelogs/unreleased/42696-gitlab-import-leaves-group_id-on-projectlabel.yml new file mode 100644 index 00000000000..c46cc86c99b --- /dev/null +++ b/changelogs/unreleased/42696-gitlab-import-leaves-group_id-on-projectlabel.yml @@ -0,0 +1,5 @@ +--- +title: Fix GitLab import leaving group_id on ProjectLabel +merge_request: 16877 +author: +type: fixed diff --git a/changelogs/unreleased/add-confirmation-input-for-modals.yml b/changelogs/unreleased/add-confirmation-input-for-modals.yml new file mode 100644 index 00000000000..ff1027bc55a --- /dev/null +++ b/changelogs/unreleased/add-confirmation-input-for-modals.yml @@ -0,0 +1,5 @@ +--- +title: Add confirmation-input component +merge_request: 16816 +author: +type: other diff --git a/changelogs/unreleased/contribution_calendar_label_cut_off.yml b/changelogs/unreleased/contribution_calendar_label_cut_off.yml new file mode 100644 index 00000000000..0b4a746bab8 --- /dev/null +++ b/changelogs/unreleased/contribution_calendar_label_cut_off.yml @@ -0,0 +1,5 @@ +--- +title: Contribution calendar label was cut off +merge_request: +author: Branka Martinovic +type: fixed diff --git a/changelogs/unreleased/cs-fix-commercial-content-check.yml b/changelogs/unreleased/cs-fix-commercial-content-check.yml new file mode 100644 index 00000000000..fec80e3ecd2 --- /dev/null +++ b/changelogs/unreleased/cs-fix-commercial-content-check.yml @@ -0,0 +1,6 @@ +--- +title: Fix version information not showing on help page if commercial content display + was disabled. +merge_request: 16743 +author: +type: fixed diff --git a/changelogs/unreleased/fix-improve-issue-note-dropdown.yml b/changelogs/unreleased/fix-improve-issue-note-dropdown.yml new file mode 100644 index 00000000000..aaf4811c64a --- /dev/null +++ b/changelogs/unreleased/fix-improve-issue-note-dropdown.yml @@ -0,0 +1,5 @@ +--- +title: Improve issue note dropdown and mr button +merge_request: 16758 +author: George Tsiolis +type: changed diff --git a/changelogs/unreleased/fix-install-docs.yml b/changelogs/unreleased/fix-install-docs.yml new file mode 100644 index 00000000000..c2c0dd1364b --- /dev/null +++ b/changelogs/unreleased/fix-install-docs.yml @@ -0,0 +1,5 @@ +--- +title: Update minimum git version to 2.9.5 +merge_request: 16683 +author: +type: other diff --git a/changelogs/unreleased/fix-postgresql-table-grant.yml b/changelogs/unreleased/fix-postgresql-table-grant.yml deleted file mode 100644 index 1c6559f6f73..00000000000 --- a/changelogs/unreleased/fix-postgresql-table-grant.yml +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: Use has_table_privilege for TRIGGER on PostgreSQL -merge_request: -author: -type: fixed diff --git a/changelogs/unreleased/issues-closed-at-steal.yml b/changelogs/unreleased/issues-closed-at-steal.yml new file mode 100644 index 00000000000..a5f0898995f --- /dev/null +++ b/changelogs/unreleased/issues-closed-at-steal.yml @@ -0,0 +1,5 @@ +--- +title: Finish any remaining jobs for issues.closed_at +merge_request: +author: +type: other diff --git a/changelogs/unreleased/osw-fix-lost-diffs-when-source-branch-deleted.yml b/changelogs/unreleased/osw-fix-lost-diffs-when-source-branch-deleted.yml new file mode 100644 index 00000000000..1cffb213f23 --- /dev/null +++ b/changelogs/unreleased/osw-fix-lost-diffs-when-source-branch-deleted.yml @@ -0,0 +1,5 @@ +--- +title: Close and do not reload MR diffs when source branch is deleted +merge_request: +author: +type: fixed diff --git a/changelogs/unreleased/osw-short-circuit-mergeable-disccusions-state.yml b/changelogs/unreleased/osw-short-circuit-mergeable-disccusions-state.yml new file mode 100644 index 00000000000..62931218861 --- /dev/null +++ b/changelogs/unreleased/osw-short-circuit-mergeable-disccusions-state.yml @@ -0,0 +1,5 @@ +--- +title: Stop checking if discussions are in a mergeable state if the MR isn't +merge_request: +author: +type: performance diff --git a/changelogs/unreleased/osw-system-notes-for-commits-regression.yml b/changelogs/unreleased/osw-system-notes-for-commits-regression.yml new file mode 100644 index 00000000000..6d0943c7716 --- /dev/null +++ b/changelogs/unreleased/osw-system-notes-for-commits-regression.yml @@ -0,0 +1,5 @@ +--- +title: Reload MRs memoization after diffs creation +merge_request: +author: +type: fixed diff --git a/changelogs/unreleased/osw-updates-merge-status-on-api-actions.yml b/changelogs/unreleased/osw-updates-merge-status-on-api-actions.yml new file mode 100644 index 00000000000..3854985e576 --- /dev/null +++ b/changelogs/unreleased/osw-updates-merge-status-on-api-actions.yml @@ -0,0 +1,5 @@ +--- +title: Return more consistent values for merge_status on MR APIs +merge_request: +author: +type: fixed diff --git a/changelogs/unreleased/sh-fix-events-collection.yml b/changelogs/unreleased/sh-fix-events-collection.yml new file mode 100644 index 00000000000..50af39d9caf --- /dev/null +++ b/changelogs/unreleased/sh-fix-events-collection.yml @@ -0,0 +1,5 @@ +--- +title: Fix not all events being shown in group dashboard +merge_request: +author: +type: fixed diff --git a/changelogs/unreleased/sh-fix-jira-trailing-slash.yml b/changelogs/unreleased/sh-fix-jira-trailing-slash.yml new file mode 100644 index 00000000000..786f6cd3727 --- /dev/null +++ b/changelogs/unreleased/sh-fix-jira-trailing-slash.yml @@ -0,0 +1,5 @@ +--- +title: Fix JIRA not working when a trailing slash is included +merge_request: +author: +type: fixed diff --git a/changelogs/unreleased/sh-fix-project-members-api-perf.yml b/changelogs/unreleased/sh-fix-project-members-api-perf.yml new file mode 100644 index 00000000000..c3fff933547 --- /dev/null +++ b/changelogs/unreleased/sh-fix-project-members-api-perf.yml @@ -0,0 +1,6 @@ +--- +title: Remove N+1 queries with /projects/:project_id/{access_requests,members} API + endpoints +merge_request: +author: +type: performance diff --git a/changelogs/unreleased/zj-gitaly-server-info.yml b/changelogs/unreleased/zj-gitaly-server-info.yml new file mode 100644 index 00000000000..cf6295f2bbc --- /dev/null +++ b/changelogs/unreleased/zj-gitaly-server-info.yml @@ -0,0 +1,5 @@ +--- +title: Add Gitaly Servers admin dashboard +merge_request: +author: +type: added diff --git a/config/application.rb b/config/application.rb index ea9a07cbde9..751307de975 100644 --- a/config/application.rb +++ b/config/application.rb @@ -6,6 +6,7 @@ Bundler.require(:default, Rails.env) module Gitlab class Application < Rails::Application + require_dependency Rails.root.join('lib/gitlab/redis/wrapper') require_dependency Rails.root.join('lib/gitlab/redis/cache') require_dependency Rails.root.join('lib/gitlab/redis/queues') require_dependency Rails.root.join('lib/gitlab/redis/shared_state') @@ -98,10 +99,11 @@ module Gitlab # Enable the asset pipeline config.assets.enabled = true + # Support legacy unicode file named img emojis, `1F939.png` config.assets.paths << Gemojione.images_path - config.assets.paths << "vendor/assets/fonts" - config.assets.precompile << "*.png" + config.assets.paths << "#{config.root}/vendor/assets/fonts" + config.assets.precompile << "print.css" config.assets.precompile << "notify.css" config.assets.precompile << "mailers/*.css" @@ -110,7 +112,6 @@ module Gitlab config.assets.precompile << "xterm/xterm.css" config.assets.precompile << "performance_bar.css" config.assets.precompile << "lib/ace.js" - config.assets.precompile << "vendor/assets/fonts/*" config.assets.precompile << "test.css" config.assets.precompile << "locale/**/app.js" diff --git a/config/dependency_decisions.yml b/config/dependency_decisions.yml index 5cd30dcde2a..778cca4297f 100644 --- a/config/dependency_decisions.yml +++ b/config/dependency_decisions.yml @@ -467,8 +467,8 @@ - - :license - pikaday - MIT - - :who: - :why: + - :who: + :why: :versions: [] :when: 2017-10-17 17:46:12.367554000 Z - - :license diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example index 238e1583770..33230b9355d 100644 --- a/config/gitlab.yml.example +++ b/config/gitlab.yml.example @@ -152,6 +152,12 @@ production: &base # The location where LFS objects are stored (default: shared/lfs-objects). # storage_path: shared/lfs-objects + ## Uploads (attachments, avatars, etc...) + uploads: + # The location where uploads objects are stored (default: public/). + # storage_path: public/ + # base_dir: uploads/-/system + ## GitLab Pages pages: enabled: false @@ -184,7 +190,7 @@ production: &base # ssl_url: "https://..." # default: https://secure.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon ## Auxiliary jobs - # Periodically executed jobs, to self-heal Gitlab, do external synchronizations, etc. + # Periodically executed jobs, to self-heal GitLab, do external synchronizations, etc. # Please read here for more information: https://github.com/ondrejbartas/sidekiq-cron#adding-cron-job cron_jobs: # Flag stuck CI jobs as failed @@ -642,6 +648,10 @@ test: enabled: true lfs: enabled: false + artifacts: + path: tmp/tests/artifacts + uploads: + storage_path: tmp/tests/public gitlab: host: localhost port: 80 @@ -652,8 +662,6 @@ test: # user: YOUR_USERNAME pages: path: tmp/tests/pages - artifacts: - path: tmp/tests/artifacts repositories: storages: default: diff --git a/config/initializers/0_post_deployment_migrations.rb b/config/initializers/0_post_deployment_migrations.rb index 0068a03d214..3d81b869b52 100644 --- a/config/initializers/0_post_deployment_migrations.rb +++ b/config/initializers/0_post_deployment_migrations.rb @@ -2,11 +2,13 @@ # before other initializers as Rails may otherwise memoize a list of migrations # excluding the post deployment migrations. unless ENV['SKIP_POST_DEPLOYMENT_MIGRATIONS'] - path = Rails.root.join('db', 'post_migrate').to_s + Rails.application.config.paths['db'].each do |db_path| + path = Rails.root.join(db_path, 'post_migrate').to_s - Rails.application.config.paths['db/migrate'] << path + Rails.application.config.paths['db/migrate'] << path - # Rails memoizes migrations at certain points where it won't read the above - # path just yet. As such we must also update the following list of paths. - ActiveRecord::Migrator.migrations_paths << path + # Rails memoizes migrations at certain points where it won't read the above + # path just yet. As such we must also update the following list of paths. + ActiveRecord::Migrator.migrations_paths << path + end end diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb index 899e612ffbd..5ad46d47cb6 100644 --- a/config/initializers/1_settings.rb +++ b/config/initializers/1_settings.rb @@ -110,7 +110,7 @@ class Settings < Settingslogic url = "http://#{url}" unless url.start_with?('http') # Get rid of the path so that we don't even have to encode it - url_without_path = url.sub(%r{(https?://[^\/]+)/?.*}, '\1') + url_without_path = url.sub(%r{(https?://[^/]+)/?.*}, '\1') URI.parse(url_without_path).host end @@ -300,8 +300,10 @@ Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled'] # Settings['artifacts'] ||= Settingslogic.new({}) Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil? -Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts")) -Settings.artifacts['max_size'] ||= 100 # in megabytes +Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts")) +# Settings.artifact['path'] is deprecated, use `storage_path` instead +Settings.artifacts['path'] = Settings.artifacts['storage_path'] +Settings.artifacts['max_size'] ||= 100 # in megabytes # # Registry @@ -339,6 +341,13 @@ Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil? Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects")) # +# Uploads +# +Settings['uploads'] ||= Settingslogic.new({}) +Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public') +Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system' + +# # Mattermost # Settings['mattermost'] ||= Settingslogic.new({}) @@ -469,10 +478,10 @@ end # repository_downloads_path value. # repositories_storages = Settings.repositories.storages.values -repository_downloads_path = Settings.gitlab['repository_downloads_path'].to_s.gsub(/\/$/, '') +repository_downloads_path = Settings.gitlab['repository_downloads_path'].to_s.gsub(%r{/$}, '') repository_downloads_full_path = File.expand_path(repository_downloads_path, Settings.gitlab['user_home']) -if repository_downloads_path.blank? || repositories_storages.any? { |rs| [repository_downloads_path, repository_downloads_full_path].include?(rs['path'].gsub(/\/$/, '')) } +if repository_downloads_path.blank? || repositories_storages.any? { |rs| [repository_downloads_path, repository_downloads_full_path].include?(rs['path'].gsub(%r{/$}, '')) } Settings.gitlab['repository_downloads_path'] = File.join(Settings.shared['path'], 'cache/archive') end diff --git a/config/initializers/active_record_data_types.rb b/config/initializers/active_record_data_types.rb index 0359e14b232..fda13d0c4cb 100644 --- a/config/initializers/active_record_data_types.rb +++ b/config/initializers/active_record_data_types.rb @@ -54,7 +54,7 @@ elsif Gitlab::Database.mysql? def initialize_type_map(mapping) super mapping - mapping.register_type(%r(timestamp)i) do |sql_type| + mapping.register_type(/timestamp/i) do |sql_type| precision = extract_precision(sql_type) ActiveRecord::ConnectionAdapters::AbstractMysqlAdapter::MysqlDateTimeWithTimeZone.new(precision: precision) end diff --git a/config/initializers/ar5_pg_10_support.rb b/config/initializers/ar5_pg_10_support.rb index 6fae770015c..a529c74a8ce 100644 --- a/config/initializers/ar5_pg_10_support.rb +++ b/config/initializers/ar5_pg_10_support.rb @@ -1,57 +1,59 @@ raise "Vendored ActiveRecord 5 code! Delete #{__FILE__}!" if ActiveRecord::VERSION::MAJOR >= 5 -require 'active_record/connection_adapters/postgresql_adapter' -require 'active_record/connection_adapters/postgresql/schema_statements' - -# -# Monkey-patch the refused Rails 4.2 patch at https://github.com/rails/rails/pull/31330 -# -# Updates sequence logic to support PostgreSQL 10. -# -# rubocop:disable all -module ActiveRecord - module ConnectionAdapters - - # We need #postgresql_version to be public as in ActiveRecord 5 for seed_fu - # to work. In ActiveRecord 4, it is protected. - # https://github.com/mbleigh/seed-fu/issues/123 - class PostgreSQLAdapter - public :postgresql_version - end +if Gitlab::Database.postgresql? + require 'active_record/connection_adapters/postgresql_adapter' + require 'active_record/connection_adapters/postgresql/schema_statements' + + # + # Monkey-patch the refused Rails 4.2 patch at https://github.com/rails/rails/pull/31330 + # + # Updates sequence logic to support PostgreSQL 10. + # + # rubocop:disable all + module ActiveRecord + module ConnectionAdapters + + # We need #postgresql_version to be public as in ActiveRecord 5 for seed_fu + # to work. In ActiveRecord 4, it is protected. + # https://github.com/mbleigh/seed-fu/issues/123 + class PostgreSQLAdapter + public :postgresql_version + end - module PostgreSQL - module SchemaStatements - # Resets the sequence of a table's primary key to the maximum value. - def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc: - unless pk and sequence - default_pk, default_sequence = pk_and_sequence_for(table) + module PostgreSQL + module SchemaStatements + # Resets the sequence of a table's primary key to the maximum value. + def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc: + unless pk and sequence + default_pk, default_sequence = pk_and_sequence_for(table) - pk ||= default_pk - sequence ||= default_sequence - end + pk ||= default_pk + sequence ||= default_sequence + end - if @logger && pk && !sequence - @logger.warn "#{table} has primary key #{pk} with no default sequence" - end + if @logger && pk && !sequence + @logger.warn "#{table} has primary key #{pk} with no default sequence" + end - if pk && sequence - quoted_sequence = quote_table_name(sequence) - max_pk = select_value("SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}") - if max_pk.nil? - if postgresql_version >= 100000 - minvalue = select_value("SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass") - else - minvalue = select_value("SELECT min_value FROM #{quoted_sequence}") + if pk && sequence + quoted_sequence = quote_table_name(sequence) + max_pk = select_value("SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}") + if max_pk.nil? + if postgresql_version >= 100000 + minvalue = select_value("SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass") + else + minvalue = select_value("SELECT min_value FROM #{quoted_sequence}") + end end - end - select_value <<-end_sql, 'SCHEMA' - SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue}, #{max_pk ? true : false}) - end_sql + select_value <<-end_sql, 'SCHEMA' + SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue}, #{max_pk ? true : false}) + end_sql + end end end end end end + # rubocop:enable all end -# rubocop:enable all diff --git a/config/initializers/date_time_formats.rb b/config/initializers/date_time_formats.rb index 57568203cab..1939ced512d 100644 --- a/config/initializers/date_time_formats.rb +++ b/config/initializers/date_time_formats.rb @@ -2,8 +2,10 @@ # :medium - Nov 10, 2007 # :long - November 10, 2007 Date::DATE_FORMATS[:medium] = '%b %-d, %Y' +Date::DATE_FORMATS[:csv] = '%Y-%m-%d' # :short - 18 Jan 06:10 # :medium - Jan 18, 2007 6:10am # :long - January 18, 2007 06:10 Time::DATE_FORMATS[:medium] = '%b %-d, %Y %-I:%M%P' +Time::DATE_FORMATS[:csv] = '%Y-%m-%d %H:%M:%S' diff --git a/config/initializers/grape_route_helpers_fix.rb b/config/initializers/grape_route_helpers_fix.rb index d3cf9e453d0..612cca3dfbd 100644 --- a/config/initializers/grape_route_helpers_fix.rb +++ b/config/initializers/grape_route_helpers_fix.rb @@ -1,5 +1,21 @@ if defined?(GrapeRouteHelpers) module GrapeRouteHelpers + module AllRoutes + # Bringing in PR https://github.com/reprah/grape-route-helpers/pull/21 due to abandonment. + # + # Without the following fix, when two helper methods are the same, but have different arguments + # (for example: api_v1_cats_owners_path(id: 1) vs api_v1_cats_owners_path(id: 1, owner_id: 2)) + # if the helper method with the least number of arguments is defined first (because the route was defined first) + # then it will shadow the longer route. + # + # The fix is to sort descending by amount of arguments + def decorated_routes + @decorated_routes ||= all_routes + .map { |r| DecoratedRoute.new(r) } + .sort_by { |r| -r.dynamic_path_segments.count } + end + end + class DecoratedRoute # GrapeRouteHelpers gem tries to parse the versions # from a string, not supporting Grape `version` array definition. diff --git a/config/initializers/peek.rb b/config/initializers/peek.rb index e74b95f1646..11759801112 100644 --- a/config/initializers/peek.rb +++ b/config/initializers/peek.rb @@ -7,10 +7,12 @@ if Gitlab::Database.mysql? require 'peek-mysql2' PEEK_DB_CLIENT = ::Mysql2::Client PEEK_DB_VIEW = Peek::Views::Mysql2 -else +elsif Gitlab::Database.postgresql? require 'peek-pg' PEEK_DB_CLIENT = ::PG::Connection PEEK_DB_VIEW = Peek::Views::PG +else + raise "Unsupported database adapter for peek!" end Peek.into PEEK_DB_VIEW diff --git a/config/locales/en.yml b/config/locales/en.yml index 8932db138d9..795e5d4e6bc 100644 --- a/config/locales/en.yml +++ b/config/locales/en.yml @@ -2,6 +2,18 @@ # See https://github.com/svenfuchs/rails-i18n/tree/master/rails%2Flocale for starting points. en: + hello: "Hello world" + activerecord: + attributes: + issue_link: + source: Source issue + target: Target issue + errors: + messages: + label_already_exists_at_group_level: "already exists at group level for %{group}. Please choose another one." + wrong_size: "is the wrong size (should be %{file_size})" + size_too_small: "is too small (should be at least %{file_size})" + size_too_big: "is too big (should be at most %{file_size})" views: pagination: previous: "Prev" diff --git a/config/routes/admin.rb b/config/routes/admin.rb index e22fb440abc..3cca1210e39 100644 --- a/config/routes/admin.rb +++ b/config/routes/admin.rb @@ -1,5 +1,5 @@ namespace :admin do - resources :users, constraints: { id: /[a-zA-Z.\/0-9_\-]+/ } do + resources :users, constraints: { id: %r{[a-zA-Z./0-9_\-]+} } do resources :keys, only: [:show, :destroy] resources :identities, except: [:show] resources :impersonation_tokens, only: [:index, :create] do @@ -24,6 +24,8 @@ namespace :admin do resource :impersonation, only: :destroy resources :abuse_reports, only: [:index, :destroy] + resources :gitaly_servers, only: [:index] + resources :spam_logs, only: [:index, :destroy] do member do post :mark_as_ham diff --git a/config/routes/group.rb b/config/routes/group.rb index 976837a246d..24c76bc55ab 100644 --- a/config/routes/group.rb +++ b/config/routes/group.rb @@ -35,7 +35,7 @@ constraints(GroupUrlConstrainer.new) do post :toggle_subscription, on: :member end - resources :milestones, constraints: { id: /[^\/]+/ }, only: [:index, :show, :edit, :update, :new, :create] do + resources :milestones, constraints: { id: %r{[^/]+} }, only: [:index, :show, :edit, :update, :new, :create] do member do get :merge_requests get :participants @@ -52,7 +52,7 @@ constraints(GroupUrlConstrainer.new) do resources :uploads, only: [:create] do collection do - get ":secret/:filename", action: :show, as: :show, constraints: { filename: /[^\/]+/ } + get ":secret/:filename", action: :show, as: :show, constraints: { filename: %r{[^/]+} } end end end diff --git a/config/routes/project.rb b/config/routes/project.rb index 0496bd85b4e..bcaa68c8ce5 100644 --- a/config/routes/project.rb +++ b/config/routes/project.rb @@ -40,7 +40,7 @@ constraints(ProjectUrlConstrainer.new) do # # Templates # - get '/templates/:template_type/:key' => 'templates#show', as: :template, constraints: { key: /[^\/]+/ } + get '/templates/:template_type/:key' => 'templates#show', as: :template, constraints: { key: %r{[^/]+} } resource :avatar, only: [:show, :destroy] resources :commit, only: [:show], constraints: { id: /\h{7,40}/ } do @@ -55,7 +55,7 @@ constraints(ProjectUrlConstrainer.new) do end resource :pages, only: [:show, :destroy] do - resources :domains, only: [:show, :new, :create, :destroy], controller: 'pages_domains', constraints: { id: /[^\/]+/ } + resources :domains, only: [:show, :new, :create, :destroy], controller: 'pages_domains', constraints: { id: %r{[^/]+} } end resources :snippets, concerns: :awardable, constraints: { id: /\d+/ } do @@ -65,7 +65,7 @@ constraints(ProjectUrlConstrainer.new) do end end - resources :services, constraints: { id: /[^\/]+/ }, only: [:index, :edit, :update] do + resources :services, constraints: { id: %r{[^/]+} }, only: [:index, :edit, :update] do member do put :test end @@ -346,7 +346,7 @@ constraints(ProjectUrlConstrainer.new) do end end - resources :project_members, except: [:show, :new, :edit], constraints: { id: /[a-zA-Z.\/0-9_\-#%+]+/ }, concerns: :access_requestable do + resources :project_members, except: [:show, :new, :edit], constraints: { id: %r{[a-zA-Z./0-9_\-#%+]+} }, concerns: :access_requestable do collection do delete :leave @@ -379,7 +379,7 @@ constraints(ProjectUrlConstrainer.new) do resources :uploads, only: [:create] do collection do - get ":secret/:filename", action: :show, as: :show, constraints: { filename: /[^\/]+/ } + get ":secret/:filename", action: :show, as: :show, constraints: { filename: %r{[^/]+} } end end diff --git a/config/routes/uploads.rb b/config/routes/uploads.rb index d7bca8310e4..6370645bcb9 100644 --- a/config/routes/uploads.rb +++ b/config/routes/uploads.rb @@ -2,17 +2,17 @@ scope path: :uploads do # Note attachments and User/Group/Project avatars get "-/system/:model/:mounted_as/:id/:filename", to: "uploads#show", - constraints: { model: /note|user|group|project/, mounted_as: /avatar|attachment/, filename: /[^\/]+/ } + constraints: { model: /note|user|group|project/, mounted_as: /avatar|attachment/, filename: %r{[^/]+} } # show uploads for models, snippets (notes) available for now get '-/system/:model/:id/:secret/:filename', to: 'uploads#show', - constraints: { model: /personal_snippet/, id: /\d+/, filename: /[^\/]+/ } + constraints: { model: /personal_snippet/, id: /\d+/, filename: %r{[^/]+} } # show temporary uploads get '-/system/temp/:secret/:filename', to: 'uploads#show', - constraints: { filename: /[^\/]+/ } + constraints: { filename: %r{[^/]+} } # Appearance get "-/system/:model/:mounted_as/:id/:filename", @@ -22,7 +22,7 @@ scope path: :uploads do # Project markdown uploads get ":namespace_id/:project_id/:secret/:filename", to: "projects/uploads#show", - constraints: { namespace_id: /[a-zA-Z.0-9_\-]+/, project_id: /[a-zA-Z.0-9_\-]+/, filename: /[^\/]+/ } + constraints: { namespace_id: /[a-zA-Z.0-9_\-]+/, project_id: /[a-zA-Z.0-9_\-]+/, filename: %r{[^/]+} } # create uploads for models, snippets (notes) available for now post ':model', @@ -34,4 +34,4 @@ end # Redirect old note attachments path to new uploads path. get "files/note/:id/:filename", to: redirect("uploads/note/attachment/%{id}/%{filename}"), - constraints: { filename: /[^\/]+/ } + constraints: { filename: %r{[^/]+} } diff --git a/db/migrate/20180119135717_add_uploader_index_to_uploads.rb b/db/migrate/20180119135717_add_uploader_index_to_uploads.rb new file mode 100644 index 00000000000..a678c3d049f --- /dev/null +++ b/db/migrate/20180119135717_add_uploader_index_to_uploads.rb @@ -0,0 +1,20 @@ +# See http://doc.gitlab.com/ce/development/migration_style_guide.html +# for more information on how to write migrations for GitLab. + +class AddUploaderIndexToUploads < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + disable_ddl_transaction! + + def up + remove_concurrent_index :uploads, :path + add_concurrent_index :uploads, [:uploader, :path], using: :btree + end + + def down + remove_concurrent_index :uploads, [:uploader, :path] + add_concurrent_index :uploads, :path, using: :btree + end +end diff --git a/db/migrate/20180201102129_add_unique_constraint_to_trending_projects_project_id.rb b/db/migrate/20180201102129_add_unique_constraint_to_trending_projects_project_id.rb new file mode 100644 index 00000000000..02e53b8fa8a --- /dev/null +++ b/db/migrate/20180201102129_add_unique_constraint_to_trending_projects_project_id.rb @@ -0,0 +1,19 @@ +class AddUniqueConstraintToTrendingProjectsProjectId < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + disable_ddl_transaction! + + def up + add_concurrent_index :trending_projects, :project_id, unique: true, name: 'index_trending_projects_on_project_id_unique' + remove_concurrent_index_by_name :trending_projects, 'index_trending_projects_on_project_id' + rename_index :trending_projects, 'index_trending_projects_on_project_id_unique', 'index_trending_projects_on_project_id' + end + + def down + rename_index :trending_projects, 'index_trending_projects_on_project_id', 'index_trending_projects_on_project_id_old' + add_concurrent_index :trending_projects, :project_id + remove_concurrent_index_by_name :trending_projects, 'index_trending_projects_on_project_id_old' + end +end diff --git a/db/migrate/20180201145907_migrate_remaining_issues_closed_at.rb b/db/migrate/20180201145907_migrate_remaining_issues_closed_at.rb new file mode 100644 index 00000000000..7cb913bb2bf --- /dev/null +++ b/db/migrate/20180201145907_migrate_remaining_issues_closed_at.rb @@ -0,0 +1,42 @@ +# See http://doc.gitlab.com/ce/development/migration_style_guide.html +# for more information on how to write migrations for GitLab. + +class MigrateRemainingIssuesClosedAt < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + # Set this constant to true if this migration requires downtime. + DOWNTIME = false + + disable_ddl_transaction! + + class Issue < ActiveRecord::Base + self.table_name = 'issues' + include EachBatch + end + + def up + Gitlab::BackgroundMigration.steal('CopyColumn') + Gitlab::BackgroundMigration.steal('CleanupConcurrentTypeChange') + + # It's possible the cleanup job was killed which means we need to manually + # migrate any remaining rows. + migrate_remaining_rows if migrate_column_type? + end + + def down + end + + def migrate_remaining_rows + Issue.where('closed_at_for_type_change IS NULL AND closed_at IS NOT NULL').each_batch do |batch| + batch.update_all('closed_at_for_type_change = closed_at') + end + + cleanup_concurrent_column_type_change(:issues, :closed_at) + end + + def migrate_column_type? + # Some environments may have already executed the previous version of this + # migration, thus we don't need to migrate those environments again. + column_for('issues', 'closed_at').type == :datetime # rubocop:disable Migration/Datetime + end +end diff --git a/db/post_migrate/20180202111106_remove_project_labels_group_id.rb b/db/post_migrate/20180202111106_remove_project_labels_group_id.rb new file mode 100644 index 00000000000..db7fd0d167d --- /dev/null +++ b/db/post_migrate/20180202111106_remove_project_labels_group_id.rb @@ -0,0 +1,19 @@ +# See http://doc.gitlab.com/ce/development/migration_style_guide.html +# for more information on how to write migrations for GitLab. + +class RemoveProjectLabelsGroupId < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + disable_ddl_transaction! + + def up + update_column_in_batches(:labels, :group_id, nil) do |table, query| + query.where(table[:type].eq('ProjectLabel').and(table[:group_id].not_eq(nil))) + end + end + + def down + end +end diff --git a/db/schema.rb b/db/schema.rb index 4e82a688725..c701a5f1e17 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -11,7 +11,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 20180115201419) do +ActiveRecord::Schema.define(version: 20180202111106) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" @@ -1727,7 +1727,7 @@ ActiveRecord::Schema.define(version: 20180115201419) do t.integer "project_id", null: false end - add_index "trending_projects", ["project_id"], name: "index_trending_projects_on_project_id", using: :btree + add_index "trending_projects", ["project_id"], name: "index_trending_projects_on_project_id", unique: true, using: :btree create_table "u2f_registrations", force: :cascade do |t| t.text "certificate" @@ -1755,7 +1755,7 @@ ActiveRecord::Schema.define(version: 20180115201419) do add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree add_index "uploads", ["model_id", "model_type"], name: "index_uploads_on_model_id_and_model_type", using: :btree - add_index "uploads", ["path"], name: "index_uploads_on_path", using: :btree + add_index "uploads", ["uploader", "path"], name: "index_uploads_on_uploader_and_path", using: :btree create_table "user_agent_details", force: :cascade do |t| t.string "user_agent", null: false diff --git a/doc/README.md b/doc/README.md index 330670587f5..c8b6b4f32b8 100644 --- a/doc/README.md +++ b/doc/README.md @@ -8,23 +8,13 @@ comments: false Welcome to [GitLab](https://about.gitlab.com/), a Git-based fully featured platform for software development! -GitLab offers the most scalable Git-based fully integrated platform for software development, with flexible products and subscription plans: +GitLab offers the most scalable Git-based fully integrated platform for software development, with flexible products and subscription plans. -- **GitLab Community Edition (CE)** is an [open source product](https://gitlab.com/gitlab-org/gitlab-ce/), -self-hosted, free to use. Every feature available in GitLab CE is also available on GitLab Enterprise Edition (Starter and Premium) and GitLab.com. -- **GitLab Enterprise Edition (EE)** is an [open-core product](https://gitlab.com/gitlab-org/gitlab-ee/), -self-hosted, fully featured solution of GitLab, available under distinct [subscriptions](https://about.gitlab.com/products/): **GitLab Enterprise Edition Starter (EES)**, **GitLab Enterprise Edition Premium (EEP)**, and **GitLab Enterprise Edition Ultimate (EEU)**. -- **GitLab.com**: SaaS GitLab solution, with [free and paid subscriptions](https://about.gitlab.com/gitlab-com/). GitLab.com is hosted by GitLab, Inc., and administrated by GitLab (users don't have access to admin settings). +With GitLab self-hosted, you deploy your own GitLab instance on-premises or on a private cloud of your choice. GitLab self-hosted is available for [free and with paid subscriptions](https://about.gitlab.com/products/): Libre, Starter, Premium, and Ultimate. -> **GitLab EE** contains all features available in **GitLab CE**, -plus premium features available in each version: **Enterprise Edition Starter** -(**EES**), **Enterprise Edition Premium** (**EEP**), and **Enterprise Edition Ultimate** -(**EEU**). Everything available in **EES** is also available in **EEP**. Every feature -available in **EEP** is also available in **EEU**. +GitLab.com is our SaaS offering. It's hosted, managed, and administered by GitLab, with [free and paid plans](https://about.gitlab.com/gitlab-com/) for individuals and teams: Free, Bronze, Silver, and Gold. ----- - -Shortcuts to GitLab's most visited docs: +## Shortcuts to GitLab's most visited docs | [GitLab CI/CD](ci/README.md) | Other | | :----- | :----- | @@ -134,14 +124,8 @@ Manage your [repositories](user/project/repository/index.md) from the UI (user i ## Administrator documentation -[Administration documentation](administration/index.md) applies to admin users of GitLab -self-hosted instances: - -- GitLab Community Edition -- GitLab [Enterprise Editions](https://about.gitlab.com/gitlab-ee/) - - Enterprise Edition Starter (EES) - - Enterprise Edition Premium (EEP) - - Enterprise Edition Ultimate (EEU) +[Administration documentation](administration/index.md) applies to admin users of [GitLab +self-hosted instances](#self-hosted-gitlab): Libre, Starter, Premium, Ultimate. Learn how to install, configure, update, upgrade, integrate, and maintain your own instance. Regular users don't have access to GitLab administration tools and settings. diff --git a/doc/api/repositories.md b/doc/api/repositories.md index 5fb25e40ed7..96609cd530f 100644 --- a/doc/api/repositories.md +++ b/doc/api/repositories.md @@ -114,6 +114,7 @@ Parameters: - `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user - `sha` (optional) - The commit SHA to download. A tag, branch reference or sha can be used. This defaults to the tip of the default branch if not specified +- `format` (optional) - The archive format. Default is `tar.gz`. Options are `tar.gz`, `tar.bz2`, `tbz`, `tbz2`, `tb2`, `bz2`, `tar`, `zip` ## Compare branches, tags or commits diff --git a/doc/api/repository_files.md b/doc/api/repository_files.md index a1a0b1b756c..c29dc22e12d 100644 --- a/doc/api/repository_files.md +++ b/doc/api/repository_files.md @@ -68,7 +68,7 @@ Example response: ```json { - "file_name": "app/project.rb", + "file_path": "app/project.rb", "branch": "master" } ``` @@ -98,7 +98,7 @@ Example response: ```json { - "file_name": "app/project.rb", + "file_path": "app/project.rb", "branch": "master" } ``` @@ -134,15 +134,6 @@ DELETE /projects/:id/repository/files/:file_path curl --request DELETE --header 'PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK' 'https://gitlab.example.com/api/v4/projects/13083/repository/files/app%2Fproject%2Erb?branch=master&author_email=author%40example.com&author_name=Firstname%20Lastname&commit_message=delete%20file' ``` -Example response: - -```json -{ - "file_name": "app/project.rb", - "branch": "master" -} -``` - Parameters: - `file_path` (required) - Url encoded full path to new file. Ex. lib%2Fclass%2Erb diff --git a/doc/development/automatic_ce_ee_merge.md b/doc/development/automatic_ce_ee_merge.md index 5a784b6de06..cf6314f9521 100644 --- a/doc/development/automatic_ce_ee_merge.md +++ b/doc/development/automatic_ce_ee_merge.md @@ -5,17 +5,32 @@ Enterprise Edition (look for the [`CE Upstream` merge requests]). This merge is done automatically in a [scheduled pipeline](https://gitlab.com/gitlab-org/release-tools/-/jobs/43201679). -If a merge is already in progress, the job [doesn't create a new one](https://gitlab.com/gitlab-org/release-tools/-/jobs/43157687). -**If you are pinged in a `CE Upstream` merge request to resolve a conflict, -please resolve the conflict as soon as possible or ask someone else to do it!** - ->**Note:** -It's ok to resolve more conflicts than the one that you are asked to resolve. In -that case, it's a good habit to ask for a double-check on your resolution by -someone who is familiar with the code you touched. +## What to do if you are pinged in a `CE Upstream` merge request to resolve a conflict? + +1. Please resolve the conflict as soon as possible or ask someone else to do it + - It's ok to resolve more conflicts than the one that you are asked to resolve. + In that case, it's a good habit to ask for a double-check on your resolution + by someone who is familiar with the code you touched. +1. Once you have resolved your conflicts, push to the branch (no force-push) +1. Assign the merge request to the next person that has to resolve a conflict +1. If all conflicts are resolved after your resolution is pushed, keep the merge + request assigned to you: **you are now responsible for the merge request to be + green** +1. If you need any help, you can ping the current [release managers], or ask in + the `#ce-to-ee` Slack channel + +A few notes about the automatic CE->EE merge job: + +- If a merge is already in progress, the job + [doesn't create a new one](https://gitlab.com/gitlab-org/release-tools/-/jobs/43157687). +- If there is nothing to merge (i.e. EE is up-to-date with CE), the job doesn't + create a new one +- The job posts messages to the `#ce-to-ee` Slack channel to inform what's the + current CE->EE merge status (e.g. "A new MR has been created", "A MR is still pending") [`CE Upstream` merge requests]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests?label_name%5B%5D=CE+upstream +[release managers]: https://about.gitlab.com/release-managers/ ## Always merge EE merge requests before their CE counterparts diff --git a/doc/development/file_storage.md b/doc/development/file_storage.md index cf00e24e11a..76354b92820 100644 --- a/doc/development/file_storage.md +++ b/doc/development/file_storage.md @@ -14,8 +14,8 @@ There are many places where file uploading is used, according to contexts: - User snippet attachments * Project - Project avatars - - Issues/MR Markdown attachments - - Issues/MR Legacy Markdown attachments + - Issues/MR/Notes Markdown attachments + - Issues/MR/Notes Legacy Markdown attachments - CI Build Artifacts - LFS Objects @@ -25,7 +25,7 @@ There are many places where file uploading is used, according to contexts: GitLab started saving everything on local disk. While directory location changed from previous versions, they are still not 100% standardized. You can see them below: -| Description | In DB? | Relative path | Uploader class | model_type | +| Description | In DB? | Relative path (from CarrierWave.root) | Uploader class | model_type | | ------------------------------------- | ------ | ----------------------------------------------------------- | ---------------------- | ---------- | | Instance logo | yes | uploads/-/system/appearance/logo/:id/:filename | `AttachmentUploader` | Appearance | | Header logo | yes | uploads/-/system/appearance/header_logo/:id/:filename | `AttachmentUploader` | Appearance | @@ -33,17 +33,107 @@ they are still not 100% standardized. You can see them below: | User avatars | yes | uploads/-/system/user/avatar/:id/:filename | `AvatarUploader` | User | | User snippet attachments | yes | uploads/-/system/personal_snippet/:id/:random_hex/:filename | `PersonalFileUploader` | Snippet | | Project avatars | yes | uploads/-/system/project/avatar/:id/:filename | `AvatarUploader` | Project | -| Issues/MR Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project | -| Issues/MR Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note | +| Issues/MR/Notes Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project | +| Issues/MR/Notes Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note | | CI Artifacts (CE) | yes | shared/artifacts/:year_:month/:project_id/:id | `ArtifactUploader` | Ci::Build | | LFS Objects (CE) | yes | shared/lfs-objects/:hex/:hex/:object_hash | `LfsObjectUploader` | LfsObject | CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader` -while in EE they inherit the `ObjectStoreUploader` and store files in and S3 API compatible object store. +while in EE they inherit the `ObjectStorage` and store files in and S3 API compatible object store. -In the case of Issues/MR Markdown attachments, there is a different approach using the [Hashed Storage] layout, +In the case of Issues/MR/Notes Markdown attachments, there is a different approach using the [Hashed Storage] layout, instead of basing the path into a mutable variable `:project_path_with_namespace`, it's possible to use the hash of the project ID instead, if project migrates to the new approach (introduced in 10.2). +### Path segments + +Files are stored at multiple locations and use different path schemes. +All the `GitlabUploader` derived classes should comply with this path segment schema: + +``` +| GitlabUploader +| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- | +| `<gitlab_root>/public/` | `uploads/-/system/` | `user/avatar/:id/` | `:filename` | +| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- | +| `CarrierWave.root` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` | +| | `CarrierWave::Uploader#store_dir` | | + +| FileUploader +| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- | +| `<gitlab_root>/shared/` | `artifacts/` | `:year_:month/:id` | `:filename` | +| `<gitlab_root>/shared/` | `snippets/` | `:secret/` | `:filename` | +| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- | +| `CarrierWave.root` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` | +| | `CarrierWave::Uploader#store_dir` | | +| | | `FileUploader#upload_path | + +| ObjectStore::Concern (store = remote) +| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- | +| `<bucket_name>` | <ignored> | `user/avatar/:id/` | `:filename` | +| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- | +| `#fog_dir` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` | +| | | `ObjectStorage::Concern#store_dir` | | +| | | `ObjectStorage::Concern#upload_path | +``` + +The `RecordsUploads::Concern` concern will create an `Upload` entry for every file stored by a `GitlabUploader` persisting the dynamic parts of the path using +`GitlabUploader#dynamic_path`. You may then use the `Upload#build_uploader` method to manipulate the file. + +## Object Storage + +By including the `ObjectStorage::Concern` in the `GitlabUploader` derived class, you may enable the object storage for this uploader. To enable the object storage +in your uploader, you need to either 1) include `RecordsUpload::Concern` and prepend `ObjectStorage::Extension::RecordsUploads` or 2) mount the uploader and create a new field named `<mount>_store`. + +The `CarrierWave::Uploader#store_dir` is overriden to + + - `GitlabUploader.base_dir` + `GitlabUploader.dynamic_segment` when the store is LOCAL + - `GitlabUploader.dynamic_segment` when the store is REMOTE (the bucket name is used to namespace) + +### Using `ObjectStorage::Extension::RecordsUploads` + +> Note: this concern will automatically include `RecordsUploads::Concern` if not already included. + +The `ObjectStorage::Concern` uploader will search for the matching `Upload` to select the correct object store. The `Upload` is mapped using `#store_dirs + identifier` for each store (LOCAL/REMOTE). + +```ruby +class SongUploader < GitlabUploader + include RecordsUploads::Concern + include ObjectStorage::Concern + prepend ObjectStorage::Extension::RecordsUploads + + ... +end + +class Thing < ActiveRecord::Base + mount :theme, SongUploader # we have a great theme song! + + ... +end +``` + +### Using a mounted uploader + +The `ObjectStorage::Concern` will query the `model.<mount>_store` attribute to select the correct object store. +This column must be present in the model schema. + +```ruby +class SongUploader < GitlabUploader + include ObjectStorage::Concern + + ... +end + +class Thing < ActiveRecord::Base + attr_reader :theme_store # this is an ActiveRecord attribute + mount :theme, SongUploader # we have a great theme song! + + def theme_store + super || ObjectStorage::Store::LOCAL + end + + ... +end +``` + [CarrierWave]: https://github.com/carrierwaveuploader/carrierwave [Hashed Storage]: ../administration/repository_storage_types.md diff --git a/doc/install/installation.md b/doc/install/installation.md index 18e29271d0f..6eb8890cc4f 100644 --- a/doc/install/installation.md +++ b/doc/install/installation.md @@ -80,7 +80,7 @@ Make sure you have the right version of Git installed # Install Git sudo apt-get install -y git-core - # Make sure Git is version 2.14.3 or higher + # Make sure Git is version 2.9.5 or higher git --version Is the system packaged Git too old? Remove it and compile from source. @@ -93,9 +93,9 @@ Is the system packaged Git too old? Remove it and compile from source. # Download and compile from source cd /tmp - curl --remote-name --progress https://www.kernel.org/pub/software/scm/git/git-2.8.4.tar.gz - echo '626e319f8a24fc0866167ea5f6bf3e2f38f69d6cb2e59e150f13709ca3ebf301 git-2.8.4.tar.gz' | shasum -a256 -c - && tar -xzf git-2.8.4.tar.gz - cd git-2.8.4/ + curl --remote-name --progress https://www.kernel.org/pub/software/scm/git/git-2.14.3.tar.gz + echo '023ffff6d3ba8a1bea779dfecc0ed0bb4ad68ab8601d14435dd8c08416f78d7f git-2.14.3.tar.gz' | shasum -a256 -c - && tar -xzf git-2.14.3.tar.gz + cd git-2.14.3/ ./configure make prefix=/usr/local all diff --git a/doc/install/kubernetes/gitlab_runner_chart.md b/doc/install/kubernetes/gitlab_runner_chart.md index ca9c95aeced..1f53e12d5f8 100644 --- a/doc/install/kubernetes/gitlab_runner_chart.md +++ b/doc/install/kubernetes/gitlab_runner_chart.md @@ -31,7 +31,7 @@ The default configuration can always be found in the [values.yaml](https://gitla In order for GitLab Runner to function, your config file **must** specify the following: - - `gitlabURL` - the GitLab Server URL (with protocol) to register the runner against + - `gitlabUrl` - the GitLab Server URL (with protocol) to register the runner against - `runnerRegistrationToken` - The Registration Token for adding new Runners to the GitLab Server. This must be retrieved from your GitLab Instance. See the [GitLab Runner Documentation](../../ci/runners/README.md#creating-and-registering-a-runner) for more information. @@ -47,7 +47,7 @@ Here is a snippet of the important settings: ## The GitLab Server URL (with protocol) that want to register the runner against ## ref: https://docs.gitlab.com/runner/commands/README.html#gitlab-runner-register ## -gitlabURL: http://gitlab.your-domain.com/ +gitlabUrl: http://gitlab.your-domain.com/ ## The Registration Token for adding new Runners to the GitLab Server. This must ## be retreived from your GitLab Instance. diff --git a/doc/raketasks/backup_restore.md b/doc/raketasks/backup_restore.md index 76f33b765d3..bbd2d214fe4 100644 --- a/doc/raketasks/backup_restore.md +++ b/doc/raketasks/backup_restore.md @@ -169,15 +169,11 @@ For Omnibus GitLab packages: 1. [Reconfigure GitLab] for the changes to take effect -#### Digital Ocean Spaces and other S3-compatible providers +#### Digital Ocean Spaces -Not all S3 providers are fully-compatible with the Fog library. For example, -if you see `411 Length Required` errors after attempting to upload, you may -need to downgrade the `aws_signature_version` value from the default value to -2 [due to this issue](https://github.com/fog/fog-aws/issues/428). +This example can be used for a bucket in Amsterdam (AMS3). -1. For example, with [Digital Ocean Spaces](https://www.digitalocean.com/products/spaces/), -this example configuration can be used for a bucket in Amsterdam (AMS3): +1. Add the following to `/etc/gitlab/gitlab.rb`: ```ruby gitlab_rails['backup_upload_connection'] = { @@ -185,7 +181,6 @@ this example configuration can be used for a bucket in Amsterdam (AMS3): 'region' => 'ams3', 'aws_access_key_id' => 'AKIAKIAKI', 'aws_secret_access_key' => 'secret123', - 'aws_signature_version' => 2, 'endpoint' => 'https://ams3.digitaloceanspaces.com' } gitlab_rails['backup_upload_remote_directory'] = 'my.s3.bucket' @@ -193,6 +188,13 @@ this example configuration can be used for a bucket in Amsterdam (AMS3): 1. [Reconfigure GitLab] for the changes to take effect +#### Other S3 Providers + +Not all S3 providers are fully-compatible with the Fog library. For example, +if you see `411 Length Required` errors after attempting to upload, you may +need to downgrade the `aws_signature_version` value from the default value to +2 [due to this issue](https://github.com/fog/fog-aws/issues/428). + --- For installations from source: @@ -494,7 +496,7 @@ more of the following options: - `BACKUP=timestamp_of_backup` - Required if more than one backup exists. Read what the [backup timestamp is about](#backup-timestamp). -- `force=yes` - Do not ask if the authorized_keys file should get regenerated. +- `force=yes` - Does not ask if the authorized_keys file should get regenerated and assumes 'yes' for warning that database tables will be removed. ### Restore for installation from source diff --git a/doc/user/group/subgroups/img/create_subgroup_button.png b/doc/user/group/subgroups/img/create_subgroup_button.png Binary files differindex 000b54c2855..d1355d4b2c3 100644 --- a/doc/user/group/subgroups/img/create_subgroup_button.png +++ b/doc/user/group/subgroups/img/create_subgroup_button.png diff --git a/doc/user/group/subgroups/index.md b/doc/user/group/subgroups/index.md index 161a3af9903..2a982344e5f 100644 --- a/doc/user/group/subgroups/index.md +++ b/doc/user/group/subgroups/index.md @@ -90,7 +90,8 @@ structure. To create a subgroup: -1. In the group's dashboard go to the **Subgroups** page and click **New subgroup**. +1. In the group's dashboard expand the **New project** split button, select + **New subgroup** and click the **New subgroup** button. ![Subgroups page](img/create_subgroup_button.png) diff --git a/doc/user/project/repository/branches/index.md b/doc/user/project/repository/branches/index.md index 26c55891b3c..9d16a4c74f2 100644 --- a/doc/user/project/repository/branches/index.md +++ b/doc/user/project/repository/branches/index.md @@ -18,7 +18,7 @@ When you create a new [project](../../index.md), GitLab sets `master` as the def branch for your project. You can choose another branch to be your project's default under your project's **Settings > General**. -The default branch is the branched affected by the +The default branch is the branch affected by the [issue closing pattern](../../issues/automatic_issue_closing.md), which means that _an issue will be closed when a merge request is merged to the **default branch**_. diff --git a/features/steps/project/source/markdown_render.rb b/features/steps/project/source/markdown_render.rb index fc4ef26f6b6..db99c179439 100644 --- a/features/steps/project/source/markdown_render.rb +++ b/features/steps/project/source/markdown_render.rb @@ -193,7 +193,7 @@ class Spinach::Features::ProjectSourceMarkdownRender < Spinach::FeatureSteps end step 'The link with text "/ID" should have url "tree/markdownID"' do - find('a', text: /^\/#id$/)['href'] == current_host + project_tree_path(@project, "markdown") + '#id' + find('a', text: %r{^/#id$})['href'] == current_host + project_tree_path(@project, "markdown") + '#id' end step 'The link with text "README.mdID" '\ @@ -203,7 +203,7 @@ class Spinach::Features::ProjectSourceMarkdownRender < Spinach::FeatureSteps step 'The link with text "d/README.mdID" should have '\ 'url "blob/markdown/d/README.mdID"' do - find('a', text: /^d\/README.md#id$/)['href'] == current_host + project_blob_path(@project, "d/markdown/README.md") + '#id' + find('a', text: %r{^d/README.md#id$})['href'] == current_host + project_blob_path(@project, "d/markdown/README.md") + '#id' end step 'The link with text "ID" should have url "blob/markdown/README.mdID"' do @@ -212,7 +212,7 @@ class Spinach::Features::ProjectSourceMarkdownRender < Spinach::FeatureSteps end step 'The link with text "/ID" should have url "blob/markdown/README.mdID"' do - find('a', text: /^\/#id$/)['href'] == current_host + project_blob_path(@project, "markdown/README.md") + '#id' + find('a', text: %r{^/#id$})['href'] == current_host + project_blob_path(@project, "markdown/README.md") + '#id' end # Wiki diff --git a/lib/api/access_requests.rb b/lib/api/access_requests.rb index 374b611f55e..60ae5e6b9a2 100644 --- a/lib/api/access_requests.rb +++ b/lib/api/access_requests.rb @@ -24,7 +24,7 @@ module API access_requesters = AccessRequestsFinder.new(source).execute!(current_user) access_requesters = paginate(access_requesters.includes(:user)) - present access_requesters.map(&:user), with: Entities::AccessRequester, source: source + present access_requesters, with: Entities::AccessRequester end desc "Requests access for the authenticated user to a #{source_type}." do @@ -36,7 +36,7 @@ module API access_requester = source.request_access(current_user) if access_requester.persisted? - present access_requester.user, with: Entities::AccessRequester, access_requester: access_requester + present access_requester, with: Entities::AccessRequester else render_validation_error!(access_requester) end @@ -56,7 +56,7 @@ module API member = ::Members::ApproveAccessRequestService.new(source, current_user, declared_params).execute status :created - present member.user, with: Entities::Member, member: member + present member, with: Entities::Member end desc 'Denies an access request for the given user.' do diff --git a/lib/api/entities.rb b/lib/api/entities.rb index 7b9a80a234b..e13463ec66b 100644 --- a/lib/api/entities.rb +++ b/lib/api/entities.rb @@ -205,22 +205,15 @@ module API expose :build_artifacts_size, as: :job_artifacts_size end - class Member < UserBasic - expose :access_level do |user, options| - member = options[:member] || options[:source].members.find_by(user_id: user.id) - member.access_level - end - expose :expires_at do |user, options| - member = options[:member] || options[:source].members.find_by(user_id: user.id) - member.expires_at - end + class Member < Grape::Entity + expose :user, merge: true, using: UserBasic + expose :access_level + expose :expires_at end - class AccessRequester < UserBasic - expose :requested_at do |user, options| - access_requester = options[:access_requester] || options[:source].requesters.find_by(user_id: user.id) - access_requester.requested_at - end + class AccessRequester < Grape::Entity + expose :user, merge: true, using: UserBasic + expose :requested_at end class Group < Grape::Entity @@ -507,7 +500,15 @@ module API expose :work_in_progress?, as: :work_in_progress expose :milestone, using: Entities::Milestone expose :merge_when_pipeline_succeeds - expose :merge_status + + # Ideally we should deprecate `MergeRequest#merge_status` exposure and + # use `MergeRequest#mergeable?` instead (boolean). + # See https://gitlab.com/gitlab-org/gitlab-ce/issues/42344 for more + # information. + expose :merge_status do |merge_request| + merge_request.check_if_can_be_merged + merge_request.merge_status + end expose :diff_head_sha, as: :sha expose :merge_commit_sha expose :user_notes_count diff --git a/lib/api/members.rb b/lib/api/members.rb index 130c6d6da71..bc1de37284a 100644 --- a/lib/api/members.rb +++ b/lib/api/members.rb @@ -21,10 +21,11 @@ module API get ":id/members" do source = find_source(source_type, params[:id]) - users = source.users - users = users.merge(User.search(params[:query])) if params[:query].present? + members = source.members.where.not(user_id: nil).includes(:user) + members = members.joins(:user).merge(User.search(params[:query])) if params[:query].present? + members = paginate(members) - present paginate(users), with: Entities::Member, source: source + present members, with: Entities::Member end desc 'Gets a member of a group or project.' do @@ -39,7 +40,7 @@ module API members = source.members member = members.find_by!(user_id: params[:user_id]) - present member.user, with: Entities::Member, member: member + present member, with: Entities::Member end desc 'Adds a member to a group or project.' do @@ -62,7 +63,7 @@ module API if !member not_allowed! # This currently can only be reached in EE elsif member.persisted? && member.valid? - present member.user, with: Entities::Member, member: member + present member, with: Entities::Member else render_validation_error!(member) end @@ -83,7 +84,7 @@ module API member = source.members.find_by!(user_id: params.delete(:user_id)) if member.update_attributes(declared_params(include_missing: false)) - present member.user, with: Entities::Member, member: member + present member, with: Entities::Member else render_validation_error!(member) end diff --git a/lib/api/runner.rb b/lib/api/runner.rb index 80feb629d54..1f80646a2ea 100644 --- a/lib/api/runner.rb +++ b/lib/api/runner.rb @@ -215,9 +215,9 @@ module API job = authenticate_job! forbidden!('Job is not running!') unless job.running? - artifacts_upload_path = JobArtifactUploader.artifacts_upload_path - artifacts = uploaded_file(:file, artifacts_upload_path) - metadata = uploaded_file(:metadata, artifacts_upload_path) + workhorse_upload_path = JobArtifactUploader.workhorse_upload_path + artifacts = uploaded_file(:file, workhorse_upload_path) + metadata = uploaded_file(:metadata, workhorse_upload_path) bad_request!('Missing artifacts file!') unless artifacts file_to_large! unless artifacts.size < max_artifacts_size diff --git a/lib/api/templates.rb b/lib/api/templates.rb index 6550b331fb8..41862768a3f 100644 --- a/lib/api/templates.rb +++ b/lib/api/templates.rb @@ -17,15 +17,15 @@ module API } }.freeze PROJECT_TEMPLATE_REGEX = - /[\<\{\[] + %r{[\<\{\[] (project|description| one\sline\s.+\swhat\sit\sdoes\.) # matching the start and end is enough here - [\>\}\]]/xi.freeze + [\>\}\]]}xi.freeze YEAR_TEMPLATE_REGEX = /[<{\[](year|yyyy)[>}\]]/i.freeze FULLNAME_TEMPLATE_REGEX = - /[\<\{\[] + %r{[\<\{\[] (fullname|name\sof\s(author|copyright\sowner)) - [\>\}\]]/xi.freeze + [\>\}\]]}xi.freeze helpers do def parsed_license_template diff --git a/lib/api/v3/members.rb b/lib/api/v3/members.rb index 46145cac7a5..d7bde8ceb89 100644 --- a/lib/api/v3/members.rb +++ b/lib/api/v3/members.rb @@ -22,10 +22,11 @@ module API get ":id/members" do source = find_source(source_type, params[:id]) - users = source.users - users = users.merge(User.search(params[:query])) if params[:query].present? + members = source.members.where.not(user_id: nil).includes(:user) + members = members.joins(:user).merge(User.search(params[:query])) if params[:query].present? + members = paginate(members) - present paginate(users), with: ::API::Entities::Member, source: source + present members, with: ::API::Entities::Member end desc 'Gets a member of a group or project.' do @@ -40,7 +41,7 @@ module API members = source.members member = members.find_by!(user_id: params[:user_id]) - present member.user, with: ::API::Entities::Member, member: member + present member, with: ::API::Entities::Member end desc 'Adds a member to a group or project.' do @@ -69,7 +70,7 @@ module API end if member.persisted? && member.valid? - present member.user, with: ::API::Entities::Member, member: member + present member, with: ::API::Entities::Member else # This is to ensure back-compatibility but 400 behavior should be used # for all validation errors in 9.0! @@ -93,7 +94,7 @@ module API member = source.members.find_by!(user_id: params.delete(:user_id)) if member.update_attributes(declared_params(include_missing: false)) - present member.user, with: ::API::Entities::Member, member: member + present member, with: ::API::Entities::Member else # This is to ensure back-compatibility but 400 behavior should be used # for all validation errors in 9.0! @@ -125,7 +126,7 @@ module API else ::Members::DestroyService.new(source, current_user, declared_params).execute - present member.user, with: ::API::Entities::Member, member: member + present member, with: ::API::Entities::Member end end end diff --git a/lib/api/v3/projects.rb b/lib/api/v3/projects.rb index a7f0813bf74..c856ba99f09 100644 --- a/lib/api/v3/projects.rb +++ b/lib/api/v3/projects.rb @@ -173,7 +173,7 @@ module API use :sort_params use :pagination end - get "/search/:query", requirements: { query: /[^\/]+/ } do + get "/search/:query", requirements: { query: %r{[^/]+} } do search_service = Search::GlobalService.new(current_user, search: params[:query]).execute projects = search_service.objects('projects', params[:page], false) projects = projects.reorder(params[:order_by] => params[:sort]) diff --git a/lib/api/v3/templates.rb b/lib/api/v3/templates.rb index 7298203df10..b82b02b5f49 100644 --- a/lib/api/v3/templates.rb +++ b/lib/api/v3/templates.rb @@ -16,15 +16,15 @@ module API } }.freeze PROJECT_TEMPLATE_REGEX = - /[\<\{\[] + %r{[\<\{\[] (project|description| one\sline\s.+\swhat\sit\sdoes\.) # matching the start and end is enough here - [\>\}\]]/xi.freeze + [\>\}\]]}xi.freeze YEAR_TEMPLATE_REGEX = /[<{\[](year|yyyy)[>}\]]/i.freeze FULLNAME_TEMPLATE_REGEX = - /[\<\{\[] + %r{[\<\{\[] (fullname|name\sof\s(author|copyright\sowner)) - [\>\}\]]/xi.freeze + [\>\}\]]}xi.freeze DEPRECATION_MESSAGE = ' This endpoint is deprecated and has been removed in V4.'.freeze helpers do diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb index 7a582a20056..4383124d150 100644 --- a/lib/backup/artifacts.rb +++ b/lib/backup/artifacts.rb @@ -3,7 +3,7 @@ require 'backup/files' module Backup class Artifacts < Files def initialize - super('artifacts', LegacyArtifactUploader.local_store_path) + super('artifacts', JobArtifactUploader.root) end def create_files_dir diff --git a/lib/banzai/filter/emoji_filter.rb b/lib/banzai/filter/emoji_filter.rb index 6255a611dbe..b82c6ca6393 100644 --- a/lib/banzai/filter/emoji_filter.rb +++ b/lib/banzai/filter/emoji_filter.rb @@ -54,9 +54,9 @@ module Banzai # Build a regexp that matches all valid :emoji: names. def self.emoji_pattern @emoji_pattern ||= - /(?<=[^[:alnum:]:]|\n|^) + %r{(?<=[^[:alnum:]:]|\n|^) :(#{Gitlab::Emoji.emojis_names.map { |name| Regexp.escape(name) }.join('|')}): - (?=[^[:alnum:]:]|$)/x + (?=[^[:alnum:]:]|$)}x end # Build a regexp that matches all valid unicode emojis names. diff --git a/lib/banzai/filter/gollum_tags_filter.rb b/lib/banzai/filter/gollum_tags_filter.rb index 2e259904673..c2b42673376 100644 --- a/lib/banzai/filter/gollum_tags_filter.rb +++ b/lib/banzai/filter/gollum_tags_filter.rb @@ -51,10 +51,10 @@ module Banzai # See https://github.com/gollum/gollum/wiki # # Rubular: http://rubular.com/r/7dQnE5CUCH - TAGS_PATTERN = %r{\[\[(.+?)\]\]}.freeze + TAGS_PATTERN = /\[\[(.+?)\]\]/.freeze # Pattern to match allowed image extensions - ALLOWED_IMAGE_EXTENSIONS = %r{.+(jpg|png|gif|svg|bmp)\z}i.freeze + ALLOWED_IMAGE_EXTENSIONS = /.+(jpg|png|gif|svg|bmp)\z/i.freeze def call search_text_nodes(doc).each do |node| diff --git a/lib/container_registry/registry.rb b/lib/container_registry/registry.rb index 63bce655f57..f90d711474a 100644 --- a/lib/container_registry/registry.rb +++ b/lib/container_registry/registry.rb @@ -11,7 +11,7 @@ module ContainerRegistry private def default_path - @uri.sub(/^https?:\/\//, '') + @uri.sub(%r{^https?://}, '') end end end diff --git a/lib/extracts_path.rb b/lib/extracts_path.rb index d8aca3304c5..a9b04c183ad 100644 --- a/lib/extracts_path.rb +++ b/lib/extracts_path.rb @@ -56,7 +56,7 @@ module ExtractsPath if valid_refs.length == 0 # No exact ref match, so just try our best - pair = id.match(/([^\/]+)(.*)/).captures + pair = id.match(%r{([^/]+)(.*)}).captures else # There is a distinct possibility that multiple refs prefix the ID. # Use the longest match to maximize the chance that we have the @@ -68,7 +68,7 @@ module ExtractsPath end # Remove ending slashes from path - pair[1].gsub!(/^\/|\/$/, '') + pair[1].gsub!(%r{^/|/$}, '') pair end diff --git a/lib/gitaly/server.rb b/lib/gitaly/server.rb new file mode 100644 index 00000000000..605e93022e7 --- /dev/null +++ b/lib/gitaly/server.rb @@ -0,0 +1,43 @@ +module Gitaly + class Server + def self.all + Gitlab.config.repositories.storages.keys.map { |s| Gitaly::Server.new(s) } + end + + attr_reader :storage + + def initialize(storage) + @storage = storage + end + + def server_version + info.server_version + end + + def git_binary_version + info.git_version + end + + def up_to_date? + server_version == Gitlab::GitalyClient.expected_server_version + end + + def address + Gitlab::GitalyClient.address(@storage) + rescue RuntimeError => e + "Error getting the address: #{e.message}" + end + + private + + def info + @info ||= + begin + Gitlab::GitalyClient::ServerService.new(@storage).info + rescue GRPC::Unavailable, GRPC::GRPC::DeadlineExceeded + # This will show the server as being out of date + Gitaly::ServerInfoResponse.new(git_version: '', server_version: '') + end + end + end +end diff --git a/lib/gitlab/background_migration/populate_untracked_uploads.rb b/lib/gitlab/background_migration/populate_untracked_uploads.rb index 81e95e5832d..8a8e770940e 100644 --- a/lib/gitlab/background_migration/populate_untracked_uploads.rb +++ b/lib/gitlab/background_migration/populate_untracked_uploads.rb @@ -12,7 +12,7 @@ module Gitlab # Ends with /:random_hex/:filename FILE_UPLOADER_PATH = %r{/\h+/[^/]+\z} - FULL_PATH_CAPTURE = %r{\A(.+)#{FILE_UPLOADER_PATH}} + FULL_PATH_CAPTURE = /\A(.+)#{FILE_UPLOADER_PATH}/ # These regex patterns are tested against a relative path, relative to # the upload directory. @@ -143,7 +143,7 @@ module Gitlab end def absolute_path - File.join(CarrierWave.root, path) + File.join(Gitlab.config.uploads.storage_path, path) end end diff --git a/lib/gitlab/background_migration/prepare_untracked_uploads.rb b/lib/gitlab/background_migration/prepare_untracked_uploads.rb index 4e0121ca34d..a7a1bbe1752 100644 --- a/lib/gitlab/background_migration/prepare_untracked_uploads.rb +++ b/lib/gitlab/background_migration/prepare_untracked_uploads.rb @@ -11,9 +11,12 @@ module Gitlab FIND_BATCH_SIZE = 500 RELATIVE_UPLOAD_DIR = "uploads".freeze - ABSOLUTE_UPLOAD_DIR = "#{CarrierWave.root}/#{RELATIVE_UPLOAD_DIR}".freeze + ABSOLUTE_UPLOAD_DIR = File.join( + Gitlab.config.uploads.storage_path, + RELATIVE_UPLOAD_DIR + ) FOLLOW_UP_MIGRATION = 'PopulateUntrackedUploads'.freeze - START_WITH_CARRIERWAVE_ROOT_REGEX = %r{\A#{CarrierWave.root}/} + START_WITH_ROOT_REGEX = %r{\A#{Gitlab.config.uploads.storage_path}/} EXCLUDED_HASHED_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/@hashed/*".freeze EXCLUDED_TMP_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/tmp/*".freeze @@ -81,7 +84,7 @@ module Gitlab paths = [] stdout.each_line("\0") do |line| - paths << line.chomp("\0").sub(START_WITH_CARRIERWAVE_ROOT_REGEX, '') + paths << line.chomp("\0").sub(START_WITH_ROOT_REGEX, '') if paths.size >= batch_size yield(paths) diff --git a/lib/gitlab/ci/build/artifacts/metadata/entry.rb b/lib/gitlab/ci/build/artifacts/metadata/entry.rb index 5b2f09e03ea..428c0505808 100644 --- a/lib/gitlab/ci/build/artifacts/metadata/entry.rb +++ b/lib/gitlab/ci/build/artifacts/metadata/entry.rb @@ -97,7 +97,7 @@ module Gitlab end def total_size - descendant_pattern = %r{^#{Regexp.escape(@path.to_s)}} + descendant_pattern = /^#{Regexp.escape(@path.to_s)}/ entries.sum do |path, entry| (entry[:size] if path =~ descendant_pattern).to_i end diff --git a/lib/gitlab/dependency_linker/composer_json_linker.rb b/lib/gitlab/dependency_linker/composer_json_linker.rb index 0245bf4077a..cfd4ec15125 100644 --- a/lib/gitlab/dependency_linker/composer_json_linker.rb +++ b/lib/gitlab/dependency_linker/composer_json_linker.rb @@ -11,7 +11,7 @@ module Gitlab end def package_url(name) - "https://packagist.org/packages/#{name}" if name =~ %r{\A#{REPO_REGEX}\z} + "https://packagist.org/packages/#{name}" if name =~ /\A#{REPO_REGEX}\z/ end end end diff --git a/lib/gitlab/dependency_linker/gemfile_linker.rb b/lib/gitlab/dependency_linker/gemfile_linker.rb index d034ea67387..bfea836bcb2 100644 --- a/lib/gitlab/dependency_linker/gemfile_linker.rb +++ b/lib/gitlab/dependency_linker/gemfile_linker.rb @@ -15,7 +15,7 @@ module Gitlab link_regex(/(github:|:github\s*=>)\s*['"](?<name>[^'"]+)['"]/, &method(:github_url)) # Link `git: "https://gitlab.example.com/user/repo"` to https://gitlab.example.com/user/repo - link_regex(%r{(git:|:git\s*=>)\s*['"](?<name>#{URL_REGEX})['"]}, &:itself) + link_regex(/(git:|:git\s*=>)\s*['"](?<name>#{URL_REGEX})['"]/, &:itself) # Link `source "https://rubygems.org"` to https://rubygems.org link_method_call('source', URL_REGEX, &:itself) diff --git a/lib/gitlab/dependency_linker/podspec_linker.rb b/lib/gitlab/dependency_linker/podspec_linker.rb index a52c7a02439..924e55e9820 100644 --- a/lib/gitlab/dependency_linker/podspec_linker.rb +++ b/lib/gitlab/dependency_linker/podspec_linker.rb @@ -12,7 +12,7 @@ module Gitlab def link_dependencies link_method_call('homepage', URL_REGEX, &:itself) - link_regex(%r{(git:|:git\s*=>)\s*['"](?<name>#{URL_REGEX})['"]}, &:itself) + link_regex(/(git:|:git\s*=>)\s*['"](?<name>#{URL_REGEX})['"]/, &:itself) link_method_call('license', &method(:license_url)) link_regex(/license\s*=\s*\{\s*(type:|:type\s*=>)\s*#{STRING_REGEX}/, &method(:license_url)) diff --git a/lib/gitlab/ee_compat_check.rb b/lib/gitlab/ee_compat_check.rb index d3b49b1ec75..0fb71976883 100644 --- a/lib/gitlab/ee_compat_check.rb +++ b/lib/gitlab/ee_compat_check.rb @@ -5,7 +5,7 @@ module Gitlab DEFAULT_CE_PROJECT_URL = 'https://gitlab.com/gitlab-org/gitlab-ce'.freeze EE_REPO_URL = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze CHECK_DIR = Rails.root.join('ee_compat_check') - IGNORED_FILES_REGEX = /(VERSION|CHANGELOG\.md:\d+)/.freeze + IGNORED_FILES_REGEX = %r{VERSION|CHANGELOG\.md|db/schema\.rb}i.freeze PLEASE_READ_THIS_BANNER = %Q{ ============================================================ ===================== PLEASE READ THIS ===================== diff --git a/lib/gitlab/email/reply_parser.rb b/lib/gitlab/email/reply_parser.rb index 558df87f36d..01c28d051ee 100644 --- a/lib/gitlab/email/reply_parser.rb +++ b/lib/gitlab/email/reply_parser.rb @@ -43,7 +43,7 @@ module Gitlab return "" unless decoded # Certain trigger phrases that means we didn't parse correctly - if decoded =~ /(Content\-Type\:|multipart\/alternative|text\/plain)/ + if decoded =~ %r{(Content\-Type\:|multipart/alternative|text/plain)} return "" end diff --git a/lib/gitlab/file_detector.rb b/lib/gitlab/file_detector.rb index 0e9ef4f897c..cc2638172ec 100644 --- a/lib/gitlab/file_detector.rb +++ b/lib/gitlab/file_detector.rb @@ -6,14 +6,14 @@ module Gitlab module FileDetector PATTERNS = { # Project files - readme: /\Areadme[^\/]*\z/i, - changelog: /\A(changelog|history|changes|news)[^\/]*\z/i, - license: /\A(licen[sc]e|copying)(\.[^\/]+)?\z/i, - contributing: /\Acontributing[^\/]*\z/i, + readme: %r{\Areadme[^/]*\z}i, + changelog: %r{\A(changelog|history|changes|news)[^/]*\z}i, + license: %r{\A(licen[sc]e|copying)(\.[^/]+)?\z}i, + contributing: %r{\Acontributing[^/]*\z}i, version: 'version', avatar: /\Alogo\.(png|jpg|gif)\z/, - issue_template: /\A\.gitlab\/issue_templates\/[^\/]+\.md\z/, - merge_request_template: /\A\.gitlab\/merge_request_templates\/[^\/]+\.md\z/, + issue_template: %r{\A\.gitlab/issue_templates/[^/]+\.md\z}, + merge_request_template: %r{\A\.gitlab/merge_request_templates/[^/]+\.md\z}, # Configuration files gitignore: '.gitignore', @@ -22,17 +22,17 @@ module Gitlab route_map: '.gitlab/route-map.yml', # Dependency files - cartfile: /\ACartfile[^\/]*\z/, + cartfile: %r{\ACartfile[^/]*\z}, composer_json: 'composer.json', gemfile: /\A(Gemfile|gems\.rb)\z/, gemfile_lock: 'Gemfile.lock', - gemspec: /\A[^\/]*\.gemspec\z/, + gemspec: %r{\A[^/]*\.gemspec\z}, godeps_json: 'Godeps.json', package_json: 'package.json', podfile: 'Podfile', - podspec_json: /\A[^\/]*\.podspec\.json\z/, - podspec: /\A[^\/]*\.podspec\z/, - requirements_txt: /\A[^\/]*requirements\.txt\z/, + podspec_json: %r{\A[^/]*\.podspec\.json\z}, + podspec: %r{\A[^/]*\.podspec\z}, + requirements_txt: %r{\A[^/]*requirements\.txt\z}, yarn_lock: 'yarn.lock' }.freeze diff --git a/lib/gitlab/gfm/uploads_rewriter.rb b/lib/gitlab/gfm/uploads_rewriter.rb index 8fab5489616..3fdc3c27f73 100644 --- a/lib/gitlab/gfm/uploads_rewriter.rb +++ b/lib/gitlab/gfm/uploads_rewriter.rb @@ -27,7 +27,7 @@ module Gitlab with_link_in_tmp_dir(file.file) do |open_tmp_file| new_uploader.store!(open_tmp_file) end - new_uploader.to_markdown + new_uploader.markdown_link end end diff --git a/lib/gitlab/git.rb b/lib/gitlab/git.rb index 71647099f83..d4e893b881c 100644 --- a/lib/gitlab/git.rb +++ b/lib/gitlab/git.rb @@ -6,12 +6,13 @@ module Gitlab CommandError = Class.new(StandardError) CommitError = Class.new(StandardError) + OSError = Class.new(StandardError) class << self include Gitlab::EncodingHelper def ref_name(ref) - encode!(ref).sub(/\Arefs\/(tags|heads|remotes)\//, '') + encode!(ref).sub(%r{\Arefs/(tags|heads|remotes)/}, '') end def branch_name(ref) diff --git a/lib/gitlab/git/blame.rb b/lib/gitlab/git/blame.rb index 31effdba292..6d6ed065f79 100644 --- a/lib/gitlab/git/blame.rb +++ b/lib/gitlab/git/blame.rb @@ -42,9 +42,7 @@ module Gitlab end def load_blame_by_shelling_out - cmd = %W(#{Gitlab.config.git.bin_path} --git-dir=#{@repo.path} blame -p #{@sha} -- #{@path}) - # Read in binary mode to ensure ASCII-8BIT - IO.popen(cmd, 'rb') {|io| io.read } + @repo.shell_blame(@sha, @path) end def process_raw_blame(output) diff --git a/lib/gitlab/git/blob.rb b/lib/gitlab/git/blob.rb index 13120120223..4828301dbb9 100644 --- a/lib/gitlab/git/blob.rb +++ b/lib/gitlab/git/blob.rb @@ -107,7 +107,7 @@ module Gitlab def find_entry_by_path(repository, root_id, path) root_tree = repository.lookup(root_id) # Strip leading slashes - path[/^\/*/] = '' + path[%r{^/*}] = '' path_arr = path.split('/') entry = root_tree.find do |entry| @@ -140,7 +140,7 @@ module Gitlab def find_by_gitaly(repository, sha, path, limit: MAX_DATA_DISPLAY_SIZE) return unless path - path = path.sub(/\A\/*/, '') + path = path.sub(%r{\A/*}, '') path = '/' if path.empty? name = File.basename(path) diff --git a/lib/gitlab/git/diff.rb b/lib/gitlab/git/diff.rb index ca94b4baa59..a203587aec1 100644 --- a/lib/gitlab/git/diff.rb +++ b/lib/gitlab/git/diff.rb @@ -44,7 +44,7 @@ module Gitlab # branch1...branch2) From the git documentation: # "git diff A...B" is equivalent to "git diff # $(git-merge-base A B) B" - repo.merge_base_commit(head, base) + repo.merge_base(head, base) end options ||= {} diff --git a/lib/gitlab/git/operation_service.rb b/lib/gitlab/git/operation_service.rb index 3fb0e2eed93..280def182d5 100644 --- a/lib/gitlab/git/operation_service.rb +++ b/lib/gitlab/git/operation_service.rb @@ -131,7 +131,10 @@ module Gitlab oldrev = branch.target - if oldrev == repository.merge_base(newrev, branch.target) + merge_base = repository.merge_base(newrev, branch.target) + raise Gitlab::Git::Repository::InvalidRef unless merge_base + + if oldrev == merge_base oldrev else raise Gitlab::Git::CommitError.new('Branch diverged') diff --git a/lib/gitlab/git/path_helper.rb b/lib/gitlab/git/path_helper.rb index 42c80aabd0a..155cf52f050 100644 --- a/lib/gitlab/git/path_helper.rb +++ b/lib/gitlab/git/path_helper.rb @@ -6,7 +6,7 @@ module Gitlab class << self def normalize_path(filename) # Strip all leading slashes so that //foo -> foo - filename[/^\/*/] = '' + filename[%r{^/*}] = '' # Expand relative paths (e.g. foo/../bar) filename = Pathname.new(filename) diff --git a/lib/gitlab/git/popen.rb b/lib/gitlab/git/popen.rb index 1ccca13ce2f..e0bd2bbe47b 100644 --- a/lib/gitlab/git/popen.rb +++ b/lib/gitlab/git/popen.rb @@ -19,6 +19,8 @@ module Gitlab cmd_output = "" cmd_status = 0 Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr| + stdout.set_encoding(Encoding::ASCII_8BIT) + yield(stdin) if block_given? stdin.close diff --git a/lib/gitlab/git/ref.rb b/lib/gitlab/git/ref.rb index a3ba9475ad0..fa71a4e7ea7 100644 --- a/lib/gitlab/git/ref.rb +++ b/lib/gitlab/git/ref.rb @@ -23,7 +23,7 @@ module Gitlab # Ex. # Ref.extract_branch_name('refs/heads/master') #=> 'master' def self.extract_branch_name(str) - str.gsub(/\Arefs\/heads\//, '') + str.gsub(%r{\Arefs/heads/}, '') end # Gitaly: this method will probably be migrated indirectly via its call sites. diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb index 638d335b523..f28624ff37a 100644 --- a/lib/gitlab/git/repository.rb +++ b/lib/gitlab/git/repository.rb @@ -462,15 +462,18 @@ module Gitlab path: nil, follow: false, skip_merges: false, - disable_walk: false, after: nil, before: nil } options = default_options.merge(options) - options[:limit] ||= 0 options[:offset] ||= 0 + limit = options[:limit] + if limit == 0 || !limit.is_a?(Integer) + raise ArgumentError.new("invalid Repository#log limit: #{limit.inspect}") + end + gitaly_migrate(:find_commits) do |is_enabled| if is_enabled gitaly_commit_client.find_commits(options) @@ -490,11 +493,7 @@ module Gitlab return [] end - if log_using_shell?(options) - log_by_shell(sha, options) - else - log_by_walk(sha, options) - end + log_by_shell(sha, options) end def count_commits(options) @@ -547,29 +546,34 @@ module Gitlab end # Returns the SHA of the most recent common ancestor of +from+ and +to+ - def merge_base_commit(from, to) + def merge_base(from, to) gitaly_migrate(:merge_base) do |is_enabled| if is_enabled gitaly_repository_client.find_merge_base(from, to) else - rugged.merge_base(from, to) + rugged_merge_base(from, to) end end end - alias_method :merge_base, :merge_base_commit # Gitaly note: JV: check gitlab-ee before removing this method. def rugged_is_ancestor?(ancestor_id, descendant_id) return false if ancestor_id.nil? || descendant_id.nil? - merge_base_commit(ancestor_id, descendant_id) == ancestor_id + rugged_merge_base(ancestor_id, descendant_id) == ancestor_id rescue Rugged::OdbError false end # Returns true is +from+ is direct ancestor to +to+, otherwise false def ancestor?(from, to) - gitaly_commit_client.ancestor?(from, to) + Gitlab::GitalyClient.migrate(:is_ancestor) do |is_enabled| + if is_enabled + gitaly_commit_client.ancestor?(from, to) + else + rugged_is_ancestor?(from, to) + end + end end def merged_branch_names(branch_names = []) @@ -614,11 +618,11 @@ module Gitlab if is_enabled gitaly_ref_client.find_ref_name(sha, ref_path) else - args = %W(#{Gitlab.config.git.bin_path} for-each-ref --count=1 #{ref_path} --contains #{sha}) + args = %W(for-each-ref --count=1 #{ref_path} --contains #{sha}) # Not found -> ["", 0] # Found -> ["b8d95eb4969eefacb0a58f6a28f6803f8070e7b9 commit\trefs/environments/production/77\n", 0] - popen(args, @path).first.split.last + run_git(args).first.split.last end end end @@ -676,11 +680,7 @@ module Gitlab if is_enabled gitaly_commit_client.commit_count(ref) else - walker = Rugged::Walker.new(rugged) - walker.sorting(Rugged::SORT_TOPO | Rugged::SORT_REVERSE) - oid = rugged.rev_parse_oid(ref) - walker.push(oid) - walker.count + rugged_commit_count(ref) end end end @@ -883,17 +883,12 @@ module Gitlab end def delete_refs(*ref_names) - instructions = ref_names.map do |ref| - "delete #{ref}\x00\x00" - end - - command = %W[#{Gitlab.config.git.bin_path} update-ref --stdin -z] - message, status = popen(command, path) do |stdin| - stdin.write(instructions.join) - end - - unless status.zero? - raise GitError.new("Could not delete refs #{ref_names}: #{message}") + gitaly_migrate(:delete_refs) do |is_enabled| + if is_enabled + gitaly_delete_refs(*ref_names) + else + git_delete_refs(*ref_names) + end end end @@ -1102,10 +1097,14 @@ module Gitlab end def write_ref(ref_path, ref, old_ref: nil, shell: true) - if shell - shell_write_ref(ref_path, ref, old_ref) - else - rugged_write_ref(ref_path, ref) + ref_path = "#{Gitlab::Git::BRANCH_REF_PREFIX}#{ref_path}" unless ref_path.start_with?("refs/") || ref_path == "HEAD" + + gitaly_migrate(:write_ref) do |is_enabled| + if is_enabled + gitaly_repository_client.write_ref(ref_path, ref, old_ref, shell) + else + local_write_ref(ref_path, ref, old_ref: old_ref, shell: shell) + end end end @@ -1128,13 +1127,6 @@ module Gitlab end # Refactoring aid; allows us to copy code from app/models/repository.rb - def run_git_with_timeout(args, timeout, env: {}) - circuit_breaker.perform do - popen_with_timeout([Gitlab.config.git.bin_path, *args], timeout, path, env) - end - end - - # Refactoring aid; allows us to copy code from app/models/repository.rb def commit(ref = 'HEAD') Gitlab::Git::Commit.find(self, ref) end @@ -1314,7 +1306,15 @@ module Gitlab # rubocop:enable Metrics/ParameterLists def write_config(full_path:) - rugged.config['gitlab.fullpath'] = full_path if full_path.present? + return unless full_path.present? + + gitaly_migrate(:write_config) do |is_enabled| + if is_enabled + gitaly_repository_client.write_config(full_path: full_path) + else + rugged_write_config(full_path: full_path) + end + end end def gitaly_repository @@ -1389,8 +1389,18 @@ module Gitlab run_git(args).first.scrub.split(/^--$/) end + def can_be_merged?(source_sha, target_branch) + gitaly_migrate(:can_be_merged) do |is_enabled| + if is_enabled + gitaly_can_be_merged?(source_sha, find_branch(target_branch, true).target) + else + rugged_can_be_merged?(source_sha, target_branch) + end + end + end + def search_files_by_name(query, ref) - safe_query = Regexp.escape(query.sub(/^\/*/, "")) + safe_query = Regexp.escape(query.sub(%r{^/*}, "")) return [] if empty? || safe_query.blank? @@ -1409,8 +1419,45 @@ module Gitlab end end + def shell_blame(sha, path) + output, _status = run_git(%W(blame -p #{sha} -- #{path})) + output + end + + def can_be_merged?(source_sha, target_branch) + gitaly_migrate(:can_be_merged) do |is_enabled| + if is_enabled + gitaly_can_be_merged?(source_sha, find_branch(target_branch).target) + else + rugged_can_be_merged?(source_sha, target_branch) + end + end + end + + def last_commit_id_for_path(sha, path) + gitaly_migrate(:last_commit_for_path) do |is_enabled| + if is_enabled + last_commit_for_path_by_gitaly(sha, path).id + else + last_commit_id_for_path_by_shelling_out(sha, path) + end + end + end + private + def local_write_ref(ref_path, ref, old_ref: nil, shell: true) + if shell + shell_write_ref(ref_path, ref, old_ref) + else + rugged_write_ref(ref_path, ref) + end + end + + def rugged_write_config(full_path:) + rugged.config['gitlab.fullpath'] = full_path + end + def shell_write_ref(ref_path, ref, old_ref) raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ') raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00") @@ -1433,6 +1480,12 @@ module Gitlab def run_git(args, chdir: path, env: {}, nice: false, &block) cmd = [Gitlab.config.git.bin_path, *args] cmd.unshift("nice") if nice + + object_directories = alternate_object_directories + if object_directories.any? + env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = object_directories.join(File::PATH_SEPARATOR) + end + circuit_breaker.perform do popen(cmd, chdir, env, &block) end @@ -1446,6 +1499,12 @@ module Gitlab output end + def run_git_with_timeout(args, timeout, env: {}) + circuit_breaker.perform do + popen_with_timeout([Gitlab.config.git.bin_path, *args], timeout, path, env) + end + end + def fresh_worktree?(path) File.exist?(path) && !clean_stuck_worktree(path) end @@ -1460,7 +1519,7 @@ module Gitlab if sparse_checkout_files # Create worktree without checking out run_git!(base_args + ['--no-checkout', worktree_path], env: env) - worktree_git_path = run_git!(%w(rev-parse --git-dir), chdir: worktree_path) + worktree_git_path = run_git!(%w(rev-parse --git-dir), chdir: worktree_path).chomp configure_sparse_checkout(worktree_git_path, sparse_checkout_files) @@ -1593,24 +1652,6 @@ module Gitlab end end - def log_using_shell?(options) - options[:path].present? || - options[:disable_walk] || - options[:skip_merges] || - options[:after] || - options[:before] - end - - def log_by_walk(sha, options) - walk_options = { - show: sha, - sort: Rugged::SORT_NONE, - limit: options[:limit], - offset: options[:offset] - } - Rugged::Walker.walk(rugged, walk_options).to_a - end - # Gitaly note: JV: although #log_by_shell shells out to Git I think the # complexity is such that we should migrate it as Ruby before trying to # do it in Go. @@ -1624,7 +1665,7 @@ module Gitlab offset_in_ruby = use_follow_flag && options[:offset].present? limit += offset if offset_in_ruby - cmd = %W[#{Gitlab.config.git.bin_path} --git-dir=#{path} log] + cmd = %w[log] cmd << "--max-count=#{limit}" cmd << '--format=%H' cmd << "--skip=#{offset}" unless offset_in_ruby @@ -1640,7 +1681,7 @@ module Gitlab cmd += Array(options[:path]) end - raw_output = IO.popen(cmd) { |io| io.read } + raw_output, _status = run_git(cmd) lines = offset_in_ruby ? raw_output.lines.drop(offset) : raw_output.lines lines.map! { |c| Rugged::Commit.new(rugged, c.strip) } @@ -1678,18 +1719,23 @@ module Gitlab end def alternate_object_directories - relative_paths = Gitlab::Git::Env.all.values_at(*ALLOWED_OBJECT_RELATIVE_DIRECTORIES_VARIABLES).flatten.compact + relative_paths = relative_object_directories if relative_paths.any? relative_paths.map { |d| File.join(path, d) } else - Gitlab::Git::Env.all.values_at(*ALLOWED_OBJECT_DIRECTORIES_VARIABLES) - .flatten - .compact - .flat_map { |d| d.split(File::PATH_SEPARATOR) } + absolute_object_directories.flat_map { |d| d.split(File::PATH_SEPARATOR) } end end + def relative_object_directories + Gitlab::Git::Env.all.values_at(*ALLOWED_OBJECT_RELATIVE_DIRECTORIES_VARIABLES).flatten.compact + end + + def absolute_object_directories + Gitlab::Git::Env.all.values_at(*ALLOWED_OBJECT_DIRECTORIES_VARIABLES).flatten.compact + end + # Get the content of a blob for a given commit. If the blob is a commit # (for submodules) then return the blob's OID. def blob_content(commit, blob_name) @@ -1833,13 +1879,13 @@ module Gitlab def count_commits_by_shelling_out(options) cmd = count_commits_shelling_command(options) - raw_output = IO.popen(cmd) { |io| io.read } + raw_output, _status = run_git(cmd) process_count_commits_raw_output(raw_output, options) end def count_commits_shelling_command(options) - cmd = %W[#{Gitlab.config.git.bin_path} --git-dir=#{path} rev-list] + cmd = %w[rev-list] cmd << "--after=#{options[:after].iso8601}" if options[:after] cmd << "--before=#{options[:before].iso8601}" if options[:before] cmd << "--max-count=#{options[:max_count]}" if options[:max_count] @@ -1884,20 +1930,17 @@ module Gitlab return [] end - cmd = %W(#{Gitlab.config.git.bin_path} --git-dir=#{path} ls-tree) - cmd += %w(-r) - cmd += %w(--full-tree) - cmd += %w(--full-name) - cmd += %W(-- #{actual_ref}) + cmd = %W(ls-tree -r --full-tree --full-name -- #{actual_ref}) + raw_output, _status = run_git(cmd) - raw_output = IO.popen(cmd, &:read).split("\n").map do |f| + lines = raw_output.split("\n").map do |f| stuff, path = f.split("\t") _mode, type, _sha = stuff.split(" ") path if type == "blob" # Contain only blob type end - raw_output.compact + lines.compact end # Returns true if the given ref name exists @@ -1971,7 +2014,7 @@ module Gitlab target_commit = Gitlab::Git::Commit.find(self, rugged_ref.target) Gitlab::Git::Branch.new(self, rugged_ref.name, rugged_ref.target, target_commit) rescue Rugged::ReferenceError => e - raise InvalidRef.new("Branch #{ref} already exists") if e.to_s =~ /'refs\/heads\/#{ref}'/ + raise InvalidRef.new("Branch #{ref} already exists") if e.to_s =~ %r{'refs/heads/#{ref}'} raise InvalidRef.new("Invalid reference #{start_point}") end @@ -2144,7 +2187,7 @@ module Gitlab source_sha end - rescue Rugged::ReferenceError + rescue Rugged::ReferenceError, InvalidRef raise ArgumentError, 'Invalid merge source' end @@ -2156,6 +2199,24 @@ module Gitlab remote_update(remote_name, url: url) end + def git_delete_refs(*ref_names) + instructions = ref_names.map do |ref| + "delete #{ref}\x00\x00" + end + + message, status = run_git(%w[update-ref --stdin -z]) do |stdin| + stdin.write(instructions.join) + end + + unless status.zero? + raise GitError.new("Could not delete refs #{ref_names}: #{message}") + end + end + + def gitaly_delete_refs(*ref_names) + gitaly_ref_client.delete_refs(refs: ref_names) + end + def rugged_remove_remote(remote_name) # When a remote is deleted all its remote refs are deleted too, but in # the case of mirrors we map its refs (that would usualy go under @@ -2218,6 +2279,14 @@ module Gitlab run_git(['fetch', remote_name], env: env).last.zero? end + def gitaly_can_be_merged?(their_commit, our_commit) + !gitaly_conflicts_client(our_commit, their_commit).conflicts? + end + + def rugged_can_be_merged?(their_commit, our_commit) + !rugged.merge_commits(our_commit, their_commit).conflicts? + end + def gitlab_projects_error raise CommandError, @gitlab_projects.output end @@ -2241,6 +2310,39 @@ module Gitlab .commits_by_message(query, revision: ref, path: path, limit: limit, offset: offset) .map { |c| commit(c) } end + + def gitaly_can_be_merged?(their_commit, our_commit) + !gitaly_conflicts_client(our_commit, their_commit).conflicts? + end + + def rugged_can_be_merged?(their_commit, our_commit) + !rugged.merge_commits(our_commit, their_commit).conflicts? + end + + def last_commit_for_path_by_gitaly(sha, path) + gitaly_commit_client.last_commit_for_path(sha, path) + end + + def last_commit_id_for_path_by_shelling_out(sha, path) + args = %W(rev-list --max-count=1 #{sha} -- #{path}) + run_git_with_timeout(args, Gitlab::Git::Popen::FAST_GIT_PROCESS_TIMEOUT).first.strip + end + + def rugged_merge_base(from, to) + rugged.merge_base(from, to) + rescue Rugged::ReferenceError + nil + end + + def rugged_commit_count(ref) + walker = Rugged::Walker.new(rugged) + walker.sorting(Rugged::SORT_TOPO | Rugged::SORT_REVERSE) + oid = rugged.rev_parse_oid(ref) + walker.push(oid) + walker.count + rescue Rugged::ReferenceError + 0 + end end end end diff --git a/lib/gitlab/git/repository_mirroring.rb b/lib/gitlab/git/repository_mirroring.rb index effb1f0ca19..dc424a433fb 100644 --- a/lib/gitlab/git/repository_mirroring.rb +++ b/lib/gitlab/git/repository_mirroring.rb @@ -43,7 +43,7 @@ module Gitlab branches = [] rugged.references.each("refs/remotes/#{remote_name}/*").map do |ref| - name = ref.name.sub(/\Arefs\/remotes\/#{remote_name}\//, '') + name = ref.name.sub(%r{\Arefs/remotes/#{remote_name}/}, '') begin target_commit = Gitlab::Git::Commit.find(self, ref.target) diff --git a/lib/gitlab/git/tree.rb b/lib/gitlab/git/tree.rb index 5cf336af3c6..ba6058fd3c9 100644 --- a/lib/gitlab/git/tree.rb +++ b/lib/gitlab/git/tree.rb @@ -83,6 +83,8 @@ module Gitlab commit_id: sha ) end + rescue Rugged::ReferenceError + [] end end diff --git a/lib/gitlab/git/wiki.rb b/lib/gitlab/git/wiki.rb index d4a53d32c28..ccdb8975342 100644 --- a/lib/gitlab/git/wiki.rb +++ b/lib/gitlab/git/wiki.rb @@ -117,6 +117,20 @@ module Gitlab page.url_path end + def page_formatted_data(title:, dir: nil, version: nil) + version = version&.id + + @repository.gitaly_migrate(:wiki_page_formatted_data) do |is_enabled| + if is_enabled + gitaly_wiki_client.get_formatted_data(title: title, dir: dir, version: version) + else + # We don't use #page because if wiki_find_page feature is enabled, we would + # get a page without formatted_data. + gollum_find_page(title: title, dir: dir, version: version)&.formatted_data + end + end + end + private # options: diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb index 6bd256f57c7..c5d3e944f7d 100644 --- a/lib/gitlab/gitaly_client.rb +++ b/lib/gitlab/gitaly_client.rb @@ -6,6 +6,7 @@ require 'grpc/health/v1/health_services_pb' module Gitlab module GitalyClient + include Gitlab::Metrics::Methods module MigrationStatus DISABLED = 1 OPT_IN = 2 @@ -33,8 +34,6 @@ module Gitlab CLIENT_NAME = (Sidekiq.server? ? 'gitlab-sidekiq' : 'gitlab-web').freeze MUTEX = Mutex.new - METRICS_MUTEX = Mutex.new - private_constant :MUTEX, :METRICS_MUTEX class << self attr_accessor :query_time @@ -42,28 +41,14 @@ module Gitlab self.query_time = 0 - def self.migrate_histogram - @migrate_histogram ||= - METRICS_MUTEX.synchronize do - # If a thread was blocked on the mutex, the value was set already - return @migrate_histogram if @migrate_histogram - - Gitlab::Metrics.histogram(:gitaly_migrate_call_duration_seconds, - "Gitaly migration call execution timings", - gitaly_enabled: nil, feature: nil) - end + define_histogram :gitaly_migrate_call_duration_seconds do + docstring "Gitaly migration call execution timings" + base_labels gitaly_enabled: nil, feature: nil end - def self.gitaly_call_histogram - @gitaly_call_histogram ||= - METRICS_MUTEX.synchronize do - # If a thread was blocked on the mutex, the value was set already - return @gitaly_call_histogram if @gitaly_call_histogram - - Gitlab::Metrics.histogram(:gitaly_controller_action_duration_seconds, - "Gitaly endpoint histogram by controller and action combination", - Gitlab::Metrics::Transaction::BASE_LABELS.merge(gitaly_service: nil, rpc: nil)) - end + define_histogram :gitaly_controller_action_duration_seconds do + docstring "Gitaly endpoint histogram by controller and action combination" + base_labels Gitlab::Metrics::Transaction::BASE_LABELS.merge(gitaly_service: nil, rpc: nil) end def self.stub(name, storage) @@ -145,7 +130,7 @@ module Gitlab # Keep track, seperately, for the performance bar self.query_time += duration - gitaly_call_histogram.observe( + gitaly_controller_action_duration_seconds.observe( current_transaction_labels.merge(gitaly_service: service.to_s, rpc: rpc.to_s), duration) end @@ -247,7 +232,7 @@ module Gitlab yield is_enabled ensure total_time = Gitlab::Metrics::System.monotonic_time - start - migrate_histogram.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time) + gitaly_migrate_call_duration_seconds.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time) feature_stack.shift Thread.current[:gitaly_feature_stack] = nil if feature_stack.empty? end diff --git a/lib/gitlab/gitaly_client/commit_service.rb b/lib/gitlab/gitaly_client/commit_service.rb index cadc7149301..5767f06b0ce 100644 --- a/lib/gitlab/gitaly_client/commit_service.rb +++ b/lib/gitlab/gitaly_client/commit_service.rb @@ -257,7 +257,7 @@ module Gitlab offset: options[:offset], follow: options[:follow], skip_merges: options[:skip_merges], - disable_walk: options[:disable_walk] + disable_walk: true # This option is deprecated. The 'walk' implementation is being removed. ) request.after = GitalyClient.timestamp(options[:after]) if options[:after] request.before = GitalyClient.timestamp(options[:before]) if options[:before] diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb index c2b4155e6a5..cd2734b5a07 100644 --- a/lib/gitlab/gitaly_client/operation_service.rb +++ b/lib/gitlab/gitaly_client/operation_service.rb @@ -103,7 +103,13 @@ module Gitlab request_enum.push(Gitaly::UserMergeBranchRequest.new(apply: true)) - branch_update = response_enum.next.branch_update + second_response = response_enum.next + + if second_response.pre_receive_error.present? + raise Gitlab::Git::HooksService::PreReceiveError, second_response.pre_receive_error + end + + branch_update = second_response.branch_update return if branch_update.nil? raise Gitlab::Git::CommitError.new('failed to apply merge to branch') unless branch_update.commit_id.present? diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb index f8e2a27f3fe..8b9a224b700 100644 --- a/lib/gitlab/gitaly_client/ref_service.rb +++ b/lib/gitlab/gitaly_client/ref_service.rb @@ -133,13 +133,16 @@ module Gitlab GitalyClient.call(@repository.storage, :ref_service, :delete_branch, request) end - def delete_refs(except_with_prefixes:) + def delete_refs(refs: [], except_with_prefixes: []) request = Gitaly::DeleteRefsRequest.new( repository: @gitaly_repo, - except_with_prefix: except_with_prefixes + refs: refs.map { |r| encode_binary(r) }, + except_with_prefix: except_with_prefixes.map { |r| encode_binary(r) } ) - GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request) + response = GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request) + + raise Gitlab::Git::Repository::GitError, response.git_error if response.git_error.present? end private diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb index b0dbaf11598..60706b4f0d8 100644 --- a/lib/gitlab/gitaly_client/repository_service.rb +++ b/lib/gitlab/gitaly_client/repository_service.rb @@ -203,6 +203,35 @@ module Gitlab timeout: GitalyClient.default_timeout ) end + + def write_ref(ref_path, ref, old_ref, shell) + request = Gitaly::WriteRefRequest.new( + repository: @gitaly_repo, + ref: ref_path.b, + revision: ref.b, + shell: shell + ) + request.old_revision = old_ref.b unless old_ref.nil? + + response = GitalyClient.call(@storage, :repository_service, :write_ref, request) + + raise Gitlab::Git::CommandError, encode!(response.error) if response.error.present? + + true + end + + def write_config(full_path:) + request = Gitaly::WriteConfigRequest.new(repository: @gitaly_repo, full_path: full_path) + response = GitalyClient.call( + @storage, + :repository_service, + :write_config, + request, + timeout: GitalyClient.fast_timeout + ) + + raise Gitlab::Git::OSError.new(response.error) unless response.error.empty? + end end end end diff --git a/lib/gitlab/gitaly_client/server_service.rb b/lib/gitlab/gitaly_client/server_service.rb new file mode 100644 index 00000000000..2e1076d1f66 --- /dev/null +++ b/lib/gitlab/gitaly_client/server_service.rb @@ -0,0 +1,16 @@ +module Gitlab + module GitalyClient + # Meant for extraction of server data, and later maybe to perform misc task + # + # Not meant for connection logic, look in Gitlab::GitalyClient + class ServerService + def initialize(storage) + @storage = storage + end + + def info + GitalyClient.call(@storage, :server_service, :server_info, Gitaly::ServerInfoRequest.new) + end + end + end +end diff --git a/lib/gitlab/gitaly_client/wiki_service.rb b/lib/gitlab/gitaly_client/wiki_service.rb index 5c5b170a3e0..8e87a8cc36f 100644 --- a/lib/gitlab/gitaly_client/wiki_service.rb +++ b/lib/gitlab/gitaly_client/wiki_service.rb @@ -127,6 +127,18 @@ module Gitlab wiki_file end + def get_formatted_data(title:, dir: nil, version: nil) + request = Gitaly::WikiGetFormattedDataRequest.new( + repository: @gitaly_repo, + title: encode_binary(title), + revision: encode_binary(version), + directory: encode_binary(dir) + ) + + response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_get_formatted_data, request) + response.reduce("") { |memo, msg| memo << msg.data } + end + private # If a block is given and the yielded value is true, iteration will be diff --git a/lib/gitlab/github_import/representation/diff_note.rb b/lib/gitlab/github_import/representation/diff_note.rb index bb7439a0641..be1334ca98a 100644 --- a/lib/gitlab/github_import/representation/diff_note.rb +++ b/lib/gitlab/github_import/representation/diff_note.rb @@ -13,7 +13,7 @@ module Gitlab :diff_hunk, :author, :note, :created_at, :updated_at, :github_id - NOTEABLE_ID_REGEX = /\/pull\/(?<iid>\d+)/i + NOTEABLE_ID_REGEX = %r{/pull/(?<iid>\d+)}i # Builds a diff note from a GitHub API response. # diff --git a/lib/gitlab/github_import/representation/note.rb b/lib/gitlab/github_import/representation/note.rb index a68bc4c002f..070e3b2db8d 100644 --- a/lib/gitlab/github_import/representation/note.rb +++ b/lib/gitlab/github_import/representation/note.rb @@ -12,7 +12,7 @@ module Gitlab expose_attribute :noteable_id, :noteable_type, :author, :note, :created_at, :updated_at, :github_id - NOTEABLE_TYPE_REGEX = /\/(?<type>(pull|issues))\/(?<iid>\d+)/i + NOTEABLE_TYPE_REGEX = %r{/(?<type>(pull|issues))/(?<iid>\d+)}i # Builds a note from a GitHub API response. # diff --git a/lib/gitlab/import_export/file_importer.rb b/lib/gitlab/import_export/file_importer.rb index 5c971564a73..0f4c3498036 100644 --- a/lib/gitlab/import_export/file_importer.rb +++ b/lib/gitlab/import_export/file_importer.rb @@ -59,7 +59,7 @@ module Gitlab end def extracted_files - Dir.glob("#{@shared.export_path}/**/*", File::FNM_DOTMATCH).reject { |f| f =~ /.*\/\.{1,2}$/ } + Dir.glob("#{@shared.export_path}/**/*", File::FNM_DOTMATCH).reject { |f| f =~ %r{.*/\.{1,2}$} } end end end diff --git a/lib/gitlab/import_export/relation_factory.rb b/lib/gitlab/import_export/relation_factory.rb index cb711a83433..759833a5ee5 100644 --- a/lib/gitlab/import_export/relation_factory.rb +++ b/lib/gitlab/import_export/relation_factory.rb @@ -139,13 +139,12 @@ module Gitlab end def setup_label - return unless @relation_hash['type'] == 'GroupLabel' - # If there's no group, move the label to a project label - if @relation_hash['group_id'] + if @relation_hash['type'] == 'GroupLabel' && @relation_hash['group_id'] @relation_hash['project_id'] = nil @relation_name = :group_label else + @relation_hash['group_id'] = nil @relation_hash['type'] = 'ProjectLabel' end end diff --git a/lib/gitlab/import_export/shared.rb b/lib/gitlab/import_export/shared.rb index d03cbc880fd..b34cafc6876 100644 --- a/lib/gitlab/import_export/shared.rb +++ b/lib/gitlab/import_export/shared.rb @@ -19,8 +19,13 @@ module Gitlab def error(error) error_out(error.message, caller[0].dup) @errors << error.message + # Debug: - Rails.logger.error(error.backtrace.join("\n")) + if error.backtrace + Rails.logger.error("Import/Export backtrace: #{error.backtrace.join("\n")}") + else + Rails.logger.error("No backtrace found") + end end private diff --git a/lib/gitlab/import_export/uploads_saver.rb b/lib/gitlab/import_export/uploads_saver.rb index 627a487d577..2f08dda55fd 100644 --- a/lib/gitlab/import_export/uploads_saver.rb +++ b/lib/gitlab/import_export/uploads_saver.rb @@ -17,15 +17,13 @@ module Gitlab false end - private + def uploads_path + FileUploader.absolute_base_dir(@project) + end def uploads_export_path File.join(@shared.export_path, 'uploads') end - - def uploads_path - FileUploader.dynamic_path_segment(@project) - end end end end diff --git a/lib/gitlab/metrics.rb b/lib/gitlab/metrics.rb index 4779755bb22..7d63ca5627d 100644 --- a/lib/gitlab/metrics.rb +++ b/lib/gitlab/metrics.rb @@ -1,7 +1,7 @@ module Gitlab module Metrics - extend Gitlab::Metrics::InfluxDb - extend Gitlab::Metrics::Prometheus + include Gitlab::Metrics::InfluxDb + include Gitlab::Metrics::Prometheus def self.enabled? influx_metrics_enabled? || prometheus_metrics_enabled? diff --git a/lib/gitlab/metrics/influx_db.rb b/lib/gitlab/metrics/influx_db.rb index ef44a13df51..66f30e3b397 100644 --- a/lib/gitlab/metrics/influx_db.rb +++ b/lib/gitlab/metrics/influx_db.rb @@ -1,179 +1,187 @@ module Gitlab module Metrics module InfluxDb - include Gitlab::CurrentSettings - extend self + extend ActiveSupport::Concern + include Gitlab::Metrics::Methods + + EXECUTION_MEASUREMENT_BUCKETS = [0.001, 0.01, 0.1, 1].freeze MUTEX = Mutex.new private_constant :MUTEX - def influx_metrics_enabled? - settings[:enabled] || false - end + class_methods do + def influx_metrics_enabled? + settings[:enabled] || false + end - # Prometheus histogram buckets used for arbitrary code measurements - EXECUTION_MEASUREMENT_BUCKETS = [0.001, 0.002, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1].freeze - RAILS_ROOT = Rails.root.to_s - METRICS_ROOT = Rails.root.join('lib', 'gitlab', 'metrics').to_s - PATH_REGEX = /^#{RAILS_ROOT}\/?/ - - def settings - @settings ||= { - enabled: current_application_settings[:metrics_enabled], - pool_size: current_application_settings[:metrics_pool_size], - timeout: current_application_settings[:metrics_timeout], - method_call_threshold: current_application_settings[:metrics_method_call_threshold], - host: current_application_settings[:metrics_host], - port: current_application_settings[:metrics_port], - sample_interval: current_application_settings[:metrics_sample_interval] || 15, - packet_size: current_application_settings[:metrics_packet_size] || 1 - } - end + # Prometheus histogram buckets used for arbitrary code measurements + + def settings + @settings ||= begin + current_settings = Gitlab::CurrentSettings.current_application_settings + + { + enabled: current_settings[:metrics_enabled], + pool_size: current_settings[:metrics_pool_size], + timeout: current_settings[:metrics_timeout], + method_call_threshold: current_settings[:metrics_method_call_threshold], + host: current_settings[:metrics_host], + port: current_settings[:metrics_port], + sample_interval: current_settings[:metrics_sample_interval] || 15, + packet_size: current_settings[:metrics_packet_size] || 1 + } + end + end - def mri? - RUBY_ENGINE == 'ruby' - end + def mri? + RUBY_ENGINE == 'ruby' + end - def method_call_threshold - # This is memoized since this method is called for every instrumented - # method. Loading data from an external cache on every method call slows - # things down too much. - # in milliseconds - @method_call_threshold ||= settings[:method_call_threshold] - end + def method_call_threshold + # This is memoized since this method is called for every instrumented + # method. Loading data from an external cache on every method call slows + # things down too much. + # in milliseconds + @method_call_threshold ||= settings[:method_call_threshold] + end - def submit_metrics(metrics) - prepared = prepare_metrics(metrics) + def submit_metrics(metrics) + prepared = prepare_metrics(metrics) - pool&.with do |connection| - prepared.each_slice(settings[:packet_size]) do |slice| - begin - connection.write_points(slice) - rescue StandardError + pool&.with do |connection| + prepared.each_slice(settings[:packet_size]) do |slice| + begin + connection.write_points(slice) + rescue StandardError + end end end + rescue Errno::EADDRNOTAVAIL, SocketError => ex + Gitlab::EnvironmentLogger.error('Cannot resolve InfluxDB address. GitLab Performance Monitoring will not work.') + Gitlab::EnvironmentLogger.error(ex) end - rescue Errno::EADDRNOTAVAIL, SocketError => ex - Gitlab::EnvironmentLogger.error('Cannot resolve InfluxDB address. GitLab Performance Monitoring will not work.') - Gitlab::EnvironmentLogger.error(ex) - end - def prepare_metrics(metrics) - metrics.map do |hash| - new_hash = hash.symbolize_keys + def prepare_metrics(metrics) + metrics.map do |hash| + new_hash = hash.symbolize_keys - new_hash[:tags].each do |key, value| - if value.blank? - new_hash[:tags].delete(key) - else - new_hash[:tags][key] = escape_value(value) + new_hash[:tags].each do |key, value| + if value.blank? + new_hash[:tags].delete(key) + else + new_hash[:tags][key] = escape_value(value) + end end + + new_hash end + end - new_hash + def escape_value(value) + value.to_s.gsub('=', '\\=') end - end - def escape_value(value) - value.to_s.gsub('=', '\\=') - end + # Measures the execution time of a block. + # + # Example: + # + # Gitlab::Metrics.measure(:find_by_username_duration) do + # User.find_by_username(some_username) + # end + # + # name - The name of the field to store the execution time in. + # + # Returns the value yielded by the supplied block. + def measure(name) + trans = current_transaction + + return yield unless trans + + real_start = Time.now.to_f + cpu_start = System.cpu_time + + retval = yield + + cpu_stop = System.cpu_time + real_stop = Time.now.to_f + + real_time = (real_stop - real_start) + cpu_time = cpu_stop - cpu_start + + real_duration_seconds = fetch_histogram("gitlab_#{name}_real_duration_seconds".to_sym) do + docstring "Measure #{name}" + base_labels Transaction::BASE_LABELS + buckets EXECUTION_MEASUREMENT_BUCKETS + end - # Measures the execution time of a block. - # - # Example: - # - # Gitlab::Metrics.measure(:find_by_username_duration) do - # User.find_by_username(some_username) - # end - # - # name - The name of the field to store the execution time in. - # - # Returns the value yielded by the supplied block. - def measure(name) - trans = current_transaction - - return yield unless trans - - real_start = Time.now.to_f - cpu_start = System.cpu_time - - retval = yield - - cpu_stop = System.cpu_time - real_stop = Time.now.to_f - - real_time = (real_stop - real_start) - cpu_time = cpu_stop - cpu_start - - Gitlab::Metrics.histogram("gitlab_#{name}_real_duration_seconds".to_sym, - "Measure #{name}", - Transaction::BASE_LABELS, - EXECUTION_MEASUREMENT_BUCKETS) - .observe(trans.labels, real_time) - - Gitlab::Metrics.histogram("gitlab_#{name}_cpu_duration_seconds".to_sym, - "Measure #{name}", - Transaction::BASE_LABELS, - EXECUTION_MEASUREMENT_BUCKETS) - .observe(trans.labels, cpu_time / 1000.0) - - # InfluxDB stores the _real_time time values as milliseconds - trans.increment("#{name}_real_time", real_time * 1000, false) - trans.increment("#{name}_cpu_time", cpu_time, false) - trans.increment("#{name}_call_count", 1, false) - - retval - end + real_duration_seconds.observe(trans.labels, real_time) - # Sets the action of the current transaction (if any) - # - # action - The name of the action. - def action=(action) - trans = current_transaction + cpu_duration_seconds = fetch_histogram("gitlab_#{name}_cpu_duration_seconds".to_sym) do + docstring "Measure #{name}" + base_labels Transaction::BASE_LABELS + buckets EXECUTION_MEASUREMENT_BUCKETS + with_feature "prometheus_metrics_measure_#{name}_cpu_duration" + end + cpu_duration_seconds.observe(trans.labels, cpu_time) - trans&.action = action - end + # InfluxDB stores the _real_time and _cpu_time time values as milliseconds + trans.increment("#{name}_real_time", real_time.in_milliseconds, false) + trans.increment("#{name}_cpu_time", cpu_time.in_milliseconds, false) + trans.increment("#{name}_call_count", 1, false) - # Tracks an event. - # - # See `Gitlab::Metrics::Transaction#add_event` for more details. - def add_event(*args) - trans = current_transaction + retval + end - trans&.add_event(*args) - end + # Sets the action of the current transaction (if any) + # + # action - The name of the action. + def action=(action) + trans = current_transaction - # Returns the prefix to use for the name of a series. - def series_prefix - @series_prefix ||= Sidekiq.server? ? 'sidekiq_' : 'rails_' - end + trans&.action = action + end - # Allow access from other metrics related middlewares - def current_transaction - Transaction.current - end + # Tracks an event. + # + # See `Gitlab::Metrics::Transaction#add_event` for more details. + def add_event(*args) + trans = current_transaction - # When enabled this should be set before being used as the usual pattern - # "@foo ||= bar" is _not_ thread-safe. - # rubocop:disable Gitlab/ModuleWithInstanceVariables - def pool - if influx_metrics_enabled? - if @pool.nil? - MUTEX.synchronize do - @pool ||= ConnectionPool.new(size: settings[:pool_size], timeout: settings[:timeout]) do - host = settings[:host] - port = settings[:port] - - InfluxDB::Client - .new(udp: { host: host, port: port }) + trans&.add_event(*args) + end + + # Returns the prefix to use for the name of a series. + def series_prefix + @series_prefix ||= Sidekiq.server? ? 'sidekiq_' : 'rails_' + end + + # Allow access from other metrics related middlewares + def current_transaction + Transaction.current + end + + # When enabled this should be set before being used as the usual pattern + # "@foo ||= bar" is _not_ thread-safe. + # rubocop:disable Gitlab/ModuleWithInstanceVariables + def pool + if influx_metrics_enabled? + if @pool.nil? + MUTEX.synchronize do + @pool ||= ConnectionPool.new(size: settings[:pool_size], timeout: settings[:timeout]) do + host = settings[:host] + port = settings[:port] + + InfluxDB::Client + .new(udp: { host: host, port: port }) + end end end - end - @pool + @pool + end end + # rubocop:enable Gitlab/ModuleWithInstanceVariables end - # rubocop:enable Gitlab/ModuleWithInstanceVariables end end end diff --git a/lib/gitlab/metrics/method_call.rb b/lib/gitlab/metrics/method_call.rb index c2f9db56824..b11520a79bb 100644 --- a/lib/gitlab/metrics/method_call.rb +++ b/lib/gitlab/metrics/method_call.rb @@ -4,26 +4,15 @@ module Gitlab module Metrics # Class for tracking timing information about method calls class MethodCall - @@measurement_enabled_cache = Concurrent::AtomicBoolean.new(false) - @@measurement_enabled_cache_expires_at = Concurrent::AtomicReference.new(Time.now.to_i) - MUTEX = Mutex.new + include Gitlab::Metrics::Methods BASE_LABELS = { module: nil, method: nil }.freeze attr_reader :real_time, :cpu_time, :call_count, :labels - def self.call_duration_histogram - return @call_duration_histogram if @call_duration_histogram - - MUTEX.synchronize do - @call_duration_histogram ||= Gitlab::Metrics.histogram( - :gitlab_method_call_duration_seconds, - 'Method calls real duration', - Transaction::BASE_LABELS.merge(BASE_LABELS), - [0.01, 0.05, 0.1, 0.5, 1]) - end - end - - def self.measurement_enabled_cache_expires_at - @@measurement_enabled_cache_expires_at + define_histogram :gitlab_method_call_duration_seconds do + docstring 'Method calls real duration' + base_labels Transaction::BASE_LABELS.merge(BASE_LABELS) + buckets [0.01, 0.05, 0.1, 0.5, 1] + with_feature :prometheus_metrics_method_instrumentation end # name - The full name of the method (including namespace) such as @@ -53,8 +42,8 @@ module Gitlab @cpu_time += cpu_time @call_count += 1 - if call_measurement_enabled? && above_threshold? - self.class.call_duration_histogram.observe(@transaction.labels.merge(labels), real_time) + if above_threshold? + self.class.gitlab_method_call_duration_seconds.observe(@transaction.labels.merge(labels), real_time) end retval @@ -78,17 +67,6 @@ module Gitlab def above_threshold? real_time.in_milliseconds >= Metrics.method_call_threshold end - - def call_measurement_enabled? - expires_at = @@measurement_enabled_cache_expires_at.value - if expires_at < Time.now.to_i - if @@measurement_enabled_cache_expires_at.compare_and_set(expires_at, 1.minute.from_now.to_i) - @@measurement_enabled_cache.value = Feature.get(:prometheus_metrics_method_instrumentation).enabled? - end - end - - @@measurement_enabled_cache.value - end end end end diff --git a/lib/gitlab/metrics/methods.rb b/lib/gitlab/metrics/methods.rb new file mode 100644 index 00000000000..cd7c1e507f7 --- /dev/null +++ b/lib/gitlab/metrics/methods.rb @@ -0,0 +1,129 @@ +# rubocop:disable Style/ClassVars + +module Gitlab + module Metrics + module Methods + extend ActiveSupport::Concern + + included do + @@_metric_provider_mutex ||= Mutex.new + @@_metrics_provider_cache = {} + end + + class_methods do + def reload_metric!(name) + @@_metrics_provider_cache.delete(name) + end + + private + + def define_metric(type, name, opts = {}, &block) + if respond_to?(name) + raise ArgumentError, "method #{name} already exists" + end + + define_singleton_method(name) do + # inlining fetch_metric method to avoid method call overhead when instrumenting hot spots + @@_metrics_provider_cache[name] || init_metric(type, name, opts, &block) + end + end + + def fetch_metric(type, name, opts = {}, &block) + @@_metrics_provider_cache[name] || init_metric(type, name, opts, &block) + end + + def init_metric(type, name, opts = {}, &block) + options = MetricOptions.new(opts) + options.evaluate(&block) + + if disabled_by_feature(options) + synchronized_cache_fill(name) { NullMetric.instance } + else + synchronized_cache_fill(name) { build_metric!(type, name, options) } + end + end + + def synchronized_cache_fill(key) + @@_metric_provider_mutex.synchronize do + @@_metrics_provider_cache[key] ||= yield + end + end + + def disabled_by_feature(options) + options.with_feature && !Feature.get(options.with_feature).enabled? + end + + def build_metric!(type, name, options) + case type + when :gauge + Gitlab::Metrics.gauge(name, options.docstring, options.base_labels, options.multiprocess_mode) + when :counter + Gitlab::Metrics.counter(name, options.docstring, options.base_labels) + when :histogram + Gitlab::Metrics.histogram(name, options.docstring, options.base_labels, options.buckets) + when :summary + raise NotImplementedError, "summary metrics are not currently supported" + else + raise ArgumentError, "uknown metric type #{type}" + end + end + + # Fetch and/or initialize counter metric + # @param [Symbol] name + # @param [Hash] opts + def fetch_counter(name, opts = {}, &block) + fetch_metric(:counter, name, opts, &block) + end + + # Fetch and/or initialize gauge metric + # @param [Symbol] name + # @param [Hash] opts + def fetch_gauge(name, opts = {}, &block) + fetch_metric(:gauge, name, opts, &block) + end + + # Fetch and/or initialize histogram metric + # @param [Symbol] name + # @param [Hash] opts + def fetch_histogram(name, opts = {}, &block) + fetch_metric(:histogram, name, opts, &block) + end + + # Fetch and/or initialize summary metric + # @param [Symbol] name + # @param [Hash] opts + def fetch_summary(name, opts = {}, &block) + fetch_metric(:summary, name, opts, &block) + end + + # Define metric accessor method for a Counter + # @param [Symbol] name + # @param [Hash] opts + def define_counter(name, opts = {}, &block) + define_metric(:counter, name, opts, &block) + end + + # Define metric accessor method for a Gauge + # @param [Symbol] name + # @param [Hash] opts + def define_gauge(name, opts = {}, &block) + define_metric(:gauge, name, opts, &block) + end + + # Define metric accessor method for a Histogram + # @param [Symbol] name + # @param [Hash] opts + def define_histogram(name, opts = {}, &block) + define_metric(:histogram, name, opts, &block) + end + + # Define metric accessor method for a Summary + # @param [Symbol] name + # @param [Hash] opts + def define_summary(name, opts = {}, &block) + define_metric(:summary, name, opts, &block) + end + end + end + end +end diff --git a/lib/gitlab/metrics/methods/metric_options.rb b/lib/gitlab/metrics/methods/metric_options.rb new file mode 100644 index 00000000000..70e122d4e15 --- /dev/null +++ b/lib/gitlab/metrics/methods/metric_options.rb @@ -0,0 +1,61 @@ +module Gitlab + module Metrics + module Methods + class MetricOptions + SMALL_NETWORK_BUCKETS = [0.005, 0.01, 0.1, 1, 10].freeze + + def initialize(options = {}) + @multiprocess_mode = options[:multiprocess_mode] || :all + @buckets = options[:buckets] || SMALL_NETWORK_BUCKETS + @base_labels = options[:base_labels] || {} + @docstring = options[:docstring] + @with_feature = options[:with_feature] + end + + # Documentation describing metric in metrics endpoint '/-/metrics' + def docstring(docstring = nil) + @docstring = docstring unless docstring.nil? + + @docstring + end + + # Gauge aggregation mode for multiprocess metrics + # - :all (default) returns each gauge for every process + # - :livesum all process'es gauges summed up + # - :max maximum value of per process gauges + # - :min minimum value of per process gauges + def multiprocess_mode(mode = nil) + @multiprocess_mode = mode unless mode.nil? + + @multiprocess_mode + end + + # Measurement buckets for histograms + def buckets(buckets = nil) + @buckets = buckets unless buckets.nil? + + @buckets + end + + # Base labels are merged with per metric labels + def base_labels(base_labels = nil) + @base_labels = base_labels unless base_labels.nil? + + @base_labels + end + + # Use feature toggle to control whether certain metric is enabled/disabled + def with_feature(name = nil) + @with_feature = name unless name.nil? + + @with_feature + end + + def evaluate(&block) + instance_eval(&block) if block_given? + self + end + end + end + end +end diff --git a/lib/gitlab/metrics/null_metric.rb b/lib/gitlab/metrics/null_metric.rb index 3b5a2907195..aabada5c21a 100644 --- a/lib/gitlab/metrics/null_metric.rb +++ b/lib/gitlab/metrics/null_metric.rb @@ -2,6 +2,8 @@ module Gitlab module Metrics # Mocks ::Prometheus::Client::Metric and all derived metrics class NullMetric + include Singleton + def method_missing(name, *args, &block) nil end diff --git a/lib/gitlab/metrics/prometheus.rb b/lib/gitlab/metrics/prometheus.rb index b0b8e8436db..f07ea3560ff 100644 --- a/lib/gitlab/metrics/prometheus.rb +++ b/lib/gitlab/metrics/prometheus.rb @@ -3,73 +3,77 @@ require 'prometheus/client' module Gitlab module Metrics module Prometheus - include Gitlab::CurrentSettings - include Gitlab::Utils::StrongMemoize + extend ActiveSupport::Concern REGISTRY_MUTEX = Mutex.new PROVIDER_MUTEX = Mutex.new - def metrics_folder_present? - multiprocess_files_dir = ::Prometheus::Client.configuration.multiprocess_files_dir + class_methods do + include Gitlab::Utils::StrongMemoize - multiprocess_files_dir && - ::Dir.exist?(multiprocess_files_dir) && - ::File.writable?(multiprocess_files_dir) - end + def metrics_folder_present? + multiprocess_files_dir = ::Prometheus::Client.configuration.multiprocess_files_dir - def prometheus_metrics_enabled? - strong_memoize(:prometheus_metrics_enabled) do - prometheus_metrics_enabled_unmemoized + multiprocess_files_dir && + ::Dir.exist?(multiprocess_files_dir) && + ::File.writable?(multiprocess_files_dir) + end + + def prometheus_metrics_enabled? + strong_memoize(:prometheus_metrics_enabled) do + prometheus_metrics_enabled_unmemoized + end end - end - def registry - strong_memoize(:registry) do - REGISTRY_MUTEX.synchronize do - strong_memoize(:registry) do - ::Prometheus::Client.registry + def registry + strong_memoize(:registry) do + REGISTRY_MUTEX.synchronize do + strong_memoize(:registry) do + ::Prometheus::Client.registry + end end end end - end - def counter(name, docstring, base_labels = {}) - safe_provide_metric(:counter, name, docstring, base_labels) - end + def counter(name, docstring, base_labels = {}) + safe_provide_metric(:counter, name, docstring, base_labels) + end - def summary(name, docstring, base_labels = {}) - safe_provide_metric(:summary, name, docstring, base_labels) - end + def summary(name, docstring, base_labels = {}) + safe_provide_metric(:summary, name, docstring, base_labels) + end - def gauge(name, docstring, base_labels = {}, multiprocess_mode = :all) - safe_provide_metric(:gauge, name, docstring, base_labels, multiprocess_mode) - end + def gauge(name, docstring, base_labels = {}, multiprocess_mode = :all) + safe_provide_metric(:gauge, name, docstring, base_labels, multiprocess_mode) + end - def histogram(name, docstring, base_labels = {}, buckets = ::Prometheus::Client::Histogram::DEFAULT_BUCKETS) - safe_provide_metric(:histogram, name, docstring, base_labels, buckets) - end + def histogram(name, docstring, base_labels = {}, buckets = ::Prometheus::Client::Histogram::DEFAULT_BUCKETS) + safe_provide_metric(:histogram, name, docstring, base_labels, buckets) + end - private + private - def safe_provide_metric(method, name, *args) - metric = provide_metric(name) - return metric if metric + def safe_provide_metric(method, name, *args) + metric = provide_metric(name) + return metric if metric - PROVIDER_MUTEX.synchronize do - provide_metric(name) || registry.method(method).call(name, *args) + PROVIDER_MUTEX.synchronize do + provide_metric(name) || registry.method(method).call(name, *args) + end end - end - def provide_metric(name) - if prometheus_metrics_enabled? - registry.get(name) - else - NullMetric.new + def provide_metric(name) + if prometheus_metrics_enabled? + registry.get(name) + else + NullMetric.instance + end end - end - def prometheus_metrics_enabled_unmemoized - metrics_folder_present? && current_application_settings[:prometheus_metrics_enabled] || false + def prometheus_metrics_enabled_unmemoized + metrics_folder_present? && + Gitlab::CurrentSettings.current_application_settings[:prometheus_metrics_enabled] || false + end end end end diff --git a/lib/gitlab/metrics/subscribers/action_view.rb b/lib/gitlab/metrics/subscribers/action_view.rb index 3da474fc1ec..b600e8a2a50 100644 --- a/lib/gitlab/metrics/subscribers/action_view.rb +++ b/lib/gitlab/metrics/subscribers/action_view.rb @@ -3,6 +3,14 @@ module Gitlab module Subscribers # Class for tracking the rendering timings of views. class ActionView < ActiveSupport::Subscriber + include Gitlab::Metrics::Methods + define_histogram :gitlab_view_rendering_duration_seconds do + docstring 'View rendering time' + base_labels Transaction::BASE_LABELS.merge({ path: nil }) + buckets [0.001, 0.01, 0.1, 1, 10.0] + with_feature :prometheus_metrics_view_instrumentation + end + attach_to :action_view SERIES = 'views'.freeze @@ -15,30 +23,18 @@ module Gitlab private - def metric_view_rendering_duration_seconds - @metric_view_rendering_duration_seconds ||= Gitlab::Metrics.histogram( - :gitlab_view_rendering_duration_seconds, - 'View rendering time', - Transaction::BASE_LABELS.merge({ path: nil }), - [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0] - ) - end - def track(event) values = values_for(event) tags = tags_for(event) - metric_view_rendering_duration_seconds.observe( - current_transaction.labels.merge(tags), - event.duration - ) + self.class.gitlab_view_rendering_duration_seconds.observe(current_transaction.labels.merge(tags), event.duration) current_transaction.increment(:view_duration, event.duration) current_transaction.add_metric(SERIES, values, tags) end def relative_path(path) - path.gsub(/^#{Rails.root.to_s}\/?/, '') + path.gsub(%r{^#{Rails.root.to_s}/?}, '') end def values_for(event) diff --git a/lib/gitlab/metrics/subscribers/active_record.rb b/lib/gitlab/metrics/subscribers/active_record.rb index ead1acb8d44..4b3e8d0a6a0 100644 --- a/lib/gitlab/metrics/subscribers/active_record.rb +++ b/lib/gitlab/metrics/subscribers/active_record.rb @@ -3,12 +3,13 @@ module Gitlab module Subscribers # Class for tracking the total query duration of a transaction. class ActiveRecord < ActiveSupport::Subscriber + include Gitlab::Metrics::Methods attach_to :active_record def sql(event) return unless current_transaction - metric_sql_duration_seconds.observe(current_transaction.labels, event.duration / 1000.0) + self.class.gitlab_sql_duration_seconds.observe(current_transaction.labels, event.duration / 1000.0) current_transaction.increment(:sql_duration, event.duration, false) current_transaction.increment(:sql_count, 1, false) @@ -16,17 +17,14 @@ module Gitlab private - def current_transaction - Transaction.current + define_histogram :gitlab_sql_duration_seconds do + docstring 'SQL time' + base_labels Transaction::BASE_LABELS + buckets [0.001, 0.01, 0.1, 1.0, 10.0] end - def metric_sql_duration_seconds - @metric_sql_duration_seconds ||= Gitlab::Metrics.histogram( - :gitlab_sql_duration_seconds, - 'SQL time', - Transaction::BASE_LABELS, - [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0] - ) + def current_transaction + Transaction.current end end end diff --git a/lib/gitlab/metrics/transaction.rb b/lib/gitlab/metrics/transaction.rb index e7975c023a9..45b9e14ba55 100644 --- a/lib/gitlab/metrics/transaction.rb +++ b/lib/gitlab/metrics/transaction.rb @@ -2,11 +2,12 @@ module Gitlab module Metrics # Class for storing metrics information of a single transaction. class Transaction + include Gitlab::Metrics::Methods + # base labels shared among all transactions BASE_LABELS = { controller: nil, action: nil }.freeze THREAD_KEY = :_gitlab_metrics_transaction - METRICS_MUTEX = Mutex.new # The series to store events (e.g. Git pushes) in. EVENT_SERIES = 'events'.freeze @@ -54,8 +55,8 @@ module Gitlab @memory_after = System.memory_usage @finished_at = System.monotonic_time - self.class.metric_transaction_duration_seconds.observe(labels, duration) - self.class.metric_transaction_allocated_memory_bytes.observe(labels, allocated_memory * 1024.0) + self.class.gitlab_transaction_duration_seconds.observe(labels, duration) + self.class.gitlab_transaction_allocated_memory_bytes.observe(labels, allocated_memory * 1024.0) Thread.current[THREAD_KEY] = nil end @@ -72,7 +73,7 @@ module Gitlab # event_name - The name of the event (e.g. "git_push"). # tags - A set of tags to attach to the event. def add_event(event_name, tags = {}) - self.class.metric_event_counter(event_name, tags).increment(tags.merge(labels)) + self.class.transaction_metric(event_name, :counter, prefix: 'event_', tags: tags).increment(tags.merge(labels)) @metrics << Metric.new(EVENT_SERIES, { count: 1 }, tags.merge(event: event_name), :event) end @@ -86,12 +87,12 @@ module Gitlab end def increment(name, value, use_prometheus = true) - self.class.metric_transaction_counter(name).increment(labels, value) if use_prometheus + self.class.transaction_metric(name, :counter).increment(labels, value) if use_prometheus @values[name] += value end def set(name, value, use_prometheus = true) - self.class.metric_transaction_gauge(name).set(labels, value) if use_prometheus + self.class.transaction_metric(name, :gauge).set(labels, value) if use_prometheus @values[name] = value end @@ -136,64 +137,28 @@ module Gitlab "#{labels[:controller]}##{labels[:action]}" if labels && !labels.empty? end - def self.metric_transaction_duration_seconds - return @metric_transaction_duration_seconds if @metric_transaction_duration_seconds - - METRICS_MUTEX.synchronize do - @metric_transaction_duration_seconds ||= Gitlab::Metrics.histogram( - :gitlab_transaction_duration_seconds, - 'Transaction duration', - BASE_LABELS, - [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0] - ) - end - end - - def self.metric_transaction_allocated_memory_bytes - return @metric_transaction_allocated_memory_bytes if @metric_transaction_allocated_memory_bytes - - METRICS_MUTEX.synchronize do - @metric_transaction_allocated_memory_bytes ||= Gitlab::Metrics.histogram( - :gitlab_transaction_allocated_memory_bytes, - 'Transaction allocated memory bytes', - BASE_LABELS, - [1000, 10000, 20000, 500000, 1000000, 2000000, 5000000, 10000000, 20000000, 100000000] - ) - end + define_histogram :gitlab_transaction_duration_seconds do + docstring 'Transaction duration' + base_labels BASE_LABELS + buckets [0.001, 0.01, 0.1, 1.0, 10.0] end - def self.metric_event_counter(event_name, tags) - return @metric_event_counters[event_name] if @metric_event_counters&.has_key?(event_name) - - METRICS_MUTEX.synchronize do - @metric_event_counters ||= {} - @metric_event_counters[event_name] ||= Gitlab::Metrics.counter( - "gitlab_transaction_event_#{event_name}_total".to_sym, - "Transaction event #{event_name} counter", - tags.merge(BASE_LABELS) - ) - end - end - - def self.metric_transaction_counter(name) - return @metric_transaction_counters[name] if @metric_transaction_counters&.has_key?(name) - - METRICS_MUTEX.synchronize do - @metric_transaction_counters ||= {} - @metric_transaction_counters[name] ||= Gitlab::Metrics.counter( - "gitlab_transaction_#{name}_total".to_sym, "Transaction #{name} counter", BASE_LABELS - ) - end + define_histogram :gitlab_transaction_allocated_memory_bytes do + docstring 'Transaction allocated memory bytes' + base_labels BASE_LABELS + buckets [100, 1000, 10000, 100000, 1000000, 10000000] + with_feature :prometheus_metrics_transaction_allocated_memory end - def self.metric_transaction_gauge(name) - return @metric_transaction_gauges[name] if @metric_transaction_gauges&.has_key?(name) + def self.transaction_metric(name, type, prefix: nil, tags: {}) + metric_name = "gitlab_transaction_#{prefix}#{name}_total".to_sym + fetch_metric(type, metric_name) do + docstring "Transaction #{prefix}#{name} #{type}" + base_labels tags.merge(BASE_LABELS) - METRICS_MUTEX.synchronize do - @metric_transaction_gauges ||= {} - @metric_transaction_gauges[name] ||= Gitlab::Metrics.gauge( - "gitlab_transaction_#{name}".to_sym, "Transaction gauge #{name}", BASE_LABELS, :livesum - ) + if type == :gauge + multiprocess_mode :livesum + end end end end diff --git a/lib/gitlab/middleware/go.rb b/lib/gitlab/middleware/go.rb index c6a56277922..afbc2600634 100644 --- a/lib/gitlab/middleware/go.rb +++ b/lib/gitlab/middleware/go.rb @@ -56,12 +56,12 @@ module Gitlab end def strip_url(url) - url.gsub(/\Ahttps?:\/\//, '') + url.gsub(%r{\Ahttps?://}, '') end def project_path(request) path_info = request.env["PATH_INFO"] - path_info.sub!(/^\//, '') + path_info.sub!(%r{^/}, '') project_path_match = "#{path_info}/".match(PROJECT_PATH_REGEX) return unless project_path_match diff --git a/lib/gitlab/middleware/static.rb b/lib/gitlab/middleware/static.rb index 85ffa8aca68..aa1e9dc0fdb 100644 --- a/lib/gitlab/middleware/static.rb +++ b/lib/gitlab/middleware/static.rb @@ -1,7 +1,7 @@ module Gitlab module Middleware class Static < ActionDispatch::Static - UPLOADS_REGEX = /\A\/uploads(\/|\z)/.freeze + UPLOADS_REGEX = %r{\A/uploads(/|\z)}.freeze def call(env) return @app.call(env) if env['PATH_INFO'] =~ UPLOADS_REGEX diff --git a/lib/gitlab/o_auth/user.rb b/lib/gitlab/o_auth/user.rb index fff9360ea27..e40a001d20c 100644 --- a/lib/gitlab/o_auth/user.rb +++ b/lib/gitlab/o_auth/user.rb @@ -55,7 +55,7 @@ module Gitlab user ||= find_or_build_ldap_user if auto_link_ldap_user? user ||= build_new_user if signup_enabled? - user.external = true if external_provider? && user + user.external = true if external_provider? && user&.new_record? user end diff --git a/lib/gitlab/popen.rb b/lib/gitlab/popen.rb index 4bc5cda8cb5..b9832a724c4 100644 --- a/lib/gitlab/popen.rb +++ b/lib/gitlab/popen.rb @@ -5,7 +5,17 @@ module Gitlab module Popen extend self - def popen(cmd, path = nil, vars = {}) + Result = Struct.new(:cmd, :stdout, :stderr, :status, :duration) + + # Returns [stdout + stderr, status] + def popen(cmd, path = nil, vars = {}, &block) + result = popen_with_detail(cmd, path, vars, &block) + + [result.stdout << result.stderr, result.status&.exitstatus] + end + + # Returns Result + def popen_with_detail(cmd, path = nil, vars = {}) unless cmd.is_a?(Array) raise "System commands must be given as an array of strings" end @@ -18,18 +28,21 @@ module Gitlab FileUtils.mkdir_p(path) end - cmd_output = "" - cmd_status = 0 + cmd_stdout = '' + cmd_stderr = '' + cmd_status = nil + start = Time.now + Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr| yield(stdin) if block_given? stdin.close - cmd_output << stdout.read - cmd_output << stderr.read - cmd_status = wait_thr.value.exitstatus + cmd_stdout = stdout.read + cmd_stderr = stderr.read + cmd_status = wait_thr.value end - [cmd_output, cmd_status] + Result.new(cmd, cmd_stdout, cmd_stderr, cmd_status, Time.now - start) end end end diff --git a/lib/gitlab/popen/runner.rb b/lib/gitlab/popen/runner.rb new file mode 100644 index 00000000000..f44035a48bb --- /dev/null +++ b/lib/gitlab/popen/runner.rb @@ -0,0 +1,46 @@ +module Gitlab + module Popen + class Runner + attr_reader :results + + def initialize + @results = [] + end + + def run(commands, &block) + commands.each do |cmd| + # yield doesn't support blocks, so we need to use a block variable + block.call(cmd) do # rubocop:disable Performance/RedundantBlockCall + cmd_result = Gitlab::Popen.popen_with_detail(cmd) + + results << cmd_result + + cmd_result + end + end + end + + def all_success_and_clean? + all_success? && all_stderr_empty? + end + + def all_success? + results.all? { |result| result.status.success? } + end + + def all_stderr_empty? + results.all? { |result| result.stderr.empty? } + end + + def failed_results + results.reject { |result| result.status.success? } + end + + def warned_results + results.select do |result| + result.status.success? && !result.stderr.empty? + end + end + end + end +end diff --git a/lib/gitlab/quick_actions/spend_time_and_date_separator.rb b/lib/gitlab/quick_actions/spend_time_and_date_separator.rb index 3f52402b31f..7328c517a30 100644 --- a/lib/gitlab/quick_actions/spend_time_and_date_separator.rb +++ b/lib/gitlab/quick_actions/spend_time_and_date_separator.rb @@ -9,7 +9,7 @@ module Gitlab # if date doesn't present return time with current date # in other cases return nil class SpendTimeAndDateSeparator - DATE_REGEX = /(\d{2,4}[\/\-.]\d{1,2}[\/\-.]\d{1,2})/ + DATE_REGEX = %r{(\d{2,4}[/\-.]\d{1,2}[/\-.]\d{1,2})} def initialize(spend_command_arg) @spend_arg = spend_command_arg diff --git a/lib/gitlab/redis/cache.rb b/lib/gitlab/redis/cache.rb index 9bf019b72e6..a991933e910 100644 --- a/lib/gitlab/redis/cache.rb +++ b/lib/gitlab/redis/cache.rb @@ -1,5 +1,5 @@ # please require all dependencies below: -require_relative 'wrapper' unless defined?(::Gitlab::Redis::Wrapper) +require_relative 'wrapper' unless defined?(::Rails) && ::Rails.root.present? module Gitlab module Redis diff --git a/lib/gitlab/repo_path.rb b/lib/gitlab/repo_path.rb index 3591fa9145e..79265cf952d 100644 --- a/lib/gitlab/repo_path.rb +++ b/lib/gitlab/repo_path.rb @@ -30,7 +30,7 @@ module Gitlab raise NotFoundError.new("No known storage path matches #{repo_path.inspect}") end - result.sub(/\A\/*/, '') + result.sub(%r{\A/*}, '') end def self.find_project(project_path) diff --git a/lib/gitlab/seeder.rb b/lib/gitlab/seeder.rb index 94a481a0f2e..98f005cb61b 100644 --- a/lib/gitlab/seeder.rb +++ b/lib/gitlab/seeder.rb @@ -5,9 +5,15 @@ module DeliverNever end end +module MuteNotifications + def new_note(note) + end +end + module Gitlab class Seeder def self.quiet + mute_notifications mute_mailer SeedFu.quiet = true @@ -18,6 +24,10 @@ module Gitlab puts "\nOK".color(:green) end + def self.mute_notifications + NotificationService.prepend(MuteNotifications) + end + def self.mute_mailer ActionMailer::MessageDelivery.prepend(DeliverNever) end diff --git a/lib/gitlab/setup_helper.rb b/lib/gitlab/setup_helper.rb index d01213bb6e0..e90a90508a2 100644 --- a/lib/gitlab/setup_helper.rb +++ b/lib/gitlab/setup_helper.rb @@ -31,7 +31,7 @@ module Gitlab storages << { name: 'test_second_storage', path: Rails.root.join('tmp', 'tests', 'second_storage').to_s } end - config = { socket_path: address.sub(%r{\Aunix:}, ''), storage: storages } + config = { socket_path: address.sub(/\Aunix:/, ''), storage: storages } config[:auth] = { token: 'secret' } if Rails.env.test? config[:'gitaly-ruby'] = { dir: File.join(gitaly_dir, 'ruby') } if gitaly_ruby config[:'gitlab-shell'] = { dir: Gitlab.config.gitlab_shell.path } diff --git a/lib/gitlab/sherlock/file_sample.rb b/lib/gitlab/sherlock/file_sample.rb index 8a3e1a5e5bf..89072b01f2e 100644 --- a/lib/gitlab/sherlock/file_sample.rb +++ b/lib/gitlab/sherlock/file_sample.rb @@ -16,7 +16,7 @@ module Gitlab end def relative_path - @relative_path ||= @file.gsub(/^#{Rails.root.to_s}\/?/, '') + @relative_path ||= @file.gsub(%r{^#{Rails.root.to_s}/?}, '') end def to_param diff --git a/lib/gitlab/sherlock/middleware.rb b/lib/gitlab/sherlock/middleware.rb index 687332fc5fc..4c88e33699a 100644 --- a/lib/gitlab/sherlock/middleware.rb +++ b/lib/gitlab/sherlock/middleware.rb @@ -2,7 +2,7 @@ module Gitlab module Sherlock # Rack middleware used for tracking request metrics. class Middleware - CONTENT_TYPES = /text\/html|application\/json/i + CONTENT_TYPES = %r{text/html|application/json}i IGNORE_PATHS = %r{^/sherlock} diff --git a/lib/gitlab/sherlock/query.rb b/lib/gitlab/sherlock/query.rb index 948bf5e6528..02ddc3f47eb 100644 --- a/lib/gitlab/sherlock/query.rb +++ b/lib/gitlab/sherlock/query.rb @@ -4,7 +4,7 @@ module Gitlab attr_reader :id, :query, :started_at, :finished_at, :backtrace # SQL identifiers that should be prefixed with newlines. - PREFIX_NEWLINE = / + PREFIX_NEWLINE = %r{ \s+(FROM |(LEFT|RIGHT)?INNER\s+JOIN |(LEFT|RIGHT)?OUTER\s+JOIN @@ -13,7 +13,7 @@ module Gitlab |GROUP\s+BY |ORDER\s+BY |LIMIT - |OFFSET)\s+/ix # Vim indent breaks when this is on a newline :< + |OFFSET)\s+}ix # Vim indent breaks when this is on a newline :< # Creates a new Query using a String and a separate Array of bindings. # diff --git a/lib/tasks/gitlab/task_helpers.rb b/lib/gitlab/task_helpers.rb index c1182af1014..34bee6fecbe 100644 --- a/lib/tasks/gitlab/task_helpers.rb +++ b/lib/gitlab/task_helpers.rb @@ -1,6 +1,7 @@ require 'rainbow/ext/string' require 'gitlab/utils/strong_memoize' +# rubocop:disable Rails/Output module Gitlab TaskFailedError = Class.new(StandardError) TaskAbortedByUserError = Class.new(StandardError) @@ -96,11 +97,9 @@ module Gitlab end def gid_for(group_name) - begin - Etc.getgrnam(group_name).gid - rescue ArgumentError # no group - "group #{group_name} doesn't exist" - end + Etc.getgrnam(group_name).gid + rescue ArgumentError # no group + "group #{group_name} doesn't exist" end def gitlab_user diff --git a/lib/gitlab/upgrader.rb b/lib/gitlab/upgrader.rb index 3b64cb32afa..024be6aca44 100644 --- a/lib/gitlab/upgrader.rb +++ b/lib/gitlab/upgrader.rb @@ -1,6 +1,3 @@ -require_relative "popen" -require_relative "version_info" - module Gitlab class Upgrader def execute @@ -52,7 +49,7 @@ module Gitlab def fetch_git_tags remote_tags, _ = Gitlab::Popen.popen(%W(#{Gitlab.config.git.bin_path} ls-remote --tags https://gitlab.com/gitlab-org/gitlab-ce.git)) - remote_tags.split("\n").grep(/tags\/v#{current_version.major}/) + remote_tags.split("\n").grep(%r{tags/v#{current_version.major}}) end def update_commands diff --git a/lib/gitlab/uploads_transfer.rb b/lib/gitlab/uploads_transfer.rb index b5f41240529..7d7400bdabf 100644 --- a/lib/gitlab/uploads_transfer.rb +++ b/lib/gitlab/uploads_transfer.rb @@ -1,7 +1,7 @@ module Gitlab class UploadsTransfer < ProjectTransfer def root_dir - File.join(CarrierWave.root, FileUploader.base_dir) + FileUploader.root end end end diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb index 633da44b22d..b3f8b0d174d 100644 --- a/lib/gitlab/workhorse.rb +++ b/lib/gitlab/workhorse.rb @@ -55,14 +55,14 @@ module Gitlab def lfs_upload_ok(oid, size) { - StoreLFSPath: "#{Gitlab.config.lfs.storage_path}/tmp/upload", + StoreLFSPath: LfsObjectUploader.workhorse_upload_path, LfsOid: oid, LfsSize: size } end def artifact_upload_ok - { TempPath: JobArtifactUploader.artifacts_upload_path } + { TempPath: JobArtifactUploader.workhorse_upload_path } end def send_git_blob(repository, blob) @@ -147,8 +147,11 @@ module Gitlab end def send_artifacts_entry(build, entry) + file = build.artifacts_file + archive = file.file_storage? ? file.path : file.url + params = { - 'Archive' => build.artifacts_file.path, + 'Archive' => archive, 'Entry' => Base64.encode64(entry.to_s) } diff --git a/lib/support/nginx/gitlab b/lib/support/nginx/gitlab index 54f51d9d633..0e27a28ea6e 100644 --- a/lib/support/nginx/gitlab +++ b/lib/support/nginx/gitlab @@ -17,6 +17,8 @@ ## See installation.md#using-https for additional HTTPS configuration details. upstream gitlab-workhorse { + # Gitlab socket file, + # for Omnibus this would be: unix:/var/opt/gitlab/gitlab-workhorse/socket server unix:/home/git/gitlab/tmp/sockets/gitlab-workhorse.socket fail_timeout=0; } @@ -110,6 +112,8 @@ server { error_page 502 /502.html; error_page 503 /503.html; location ~ ^/(404|422|500|502|503)\.html$ { + # Location to the Gitlab's public directory, + # for Omnibus this would be: /opt/gitlab/embedded/service/gitlab-rails/public. root /home/git/gitlab/public; internal; } diff --git a/lib/support/nginx/gitlab-ssl b/lib/support/nginx/gitlab-ssl index ed8131ef24f..8218d68f9ba 100644 --- a/lib/support/nginx/gitlab-ssl +++ b/lib/support/nginx/gitlab-ssl @@ -21,6 +21,8 @@ ## See installation.md#using-https for additional HTTPS configuration details. upstream gitlab-workhorse { + # Gitlab socket file, + # for Omnibus this would be: unix:/var/opt/gitlab/gitlab-workhorse/socket server unix:/home/git/gitlab/tmp/sockets/gitlab-workhorse.socket fail_timeout=0; } @@ -160,6 +162,8 @@ server { error_page 502 /502.html; error_page 503 /503.html; location ~ ^/(404|422|500|502|503)\.html$ { + # Location to the Gitlab's public directory, + # for Omnibus this would be: /opt/gitlab/embedded/service/gitlab-rails/public root /home/git/gitlab/public; internal; } diff --git a/lib/system_check/app/git_version_check.rb b/lib/system_check/app/git_version_check.rb index 6ee8c8874ec..44ec888c197 100644 --- a/lib/system_check/app/git_version_check.rb +++ b/lib/system_check/app/git_version_check.rb @@ -5,7 +5,7 @@ module SystemCheck set_check_pass -> { "yes (#{self.current_version})" } def self.required_version - @required_version ||= Gitlab::VersionInfo.new(2, 7, 3) + @required_version ||= Gitlab::VersionInfo.new(2, 9, 5) end def self.current_version diff --git a/lib/system_check/helpers.rb b/lib/system_check/helpers.rb index c42ae4fe4c4..914ed794601 100644 --- a/lib/system_check/helpers.rb +++ b/lib/system_check/helpers.rb @@ -1,5 +1,3 @@ -require 'tasks/gitlab/task_helpers' - module SystemCheck module Helpers include ::Gitlab::TaskHelpers diff --git a/lib/tasks/flay.rake b/lib/tasks/flay.rake index 7ad2b2e4d39..b1e012e70c5 100644 --- a/lib/tasks/flay.rake +++ b/lib/tasks/flay.rake @@ -1,6 +1,6 @@ desc 'Code duplication analyze via flay' task :flay do - output = `bundle exec flay --mass 35 app/ lib/gitlab/` + output = `bundle exec flay --mass 35 app/ lib/gitlab/ 2> #{File::NULL}` if output.include? "Similar code found" puts output diff --git a/lib/tasks/gitlab/backup.rake b/lib/tasks/gitlab/backup.rake index 2383bcf954b..24e37f6c6cc 100644 --- a/lib/tasks/gitlab/backup.rake +++ b/lib/tasks/gitlab/backup.rake @@ -4,7 +4,7 @@ namespace :gitlab do namespace :backup do # Create backup of GitLab system desc "GitLab | Create a backup of the GitLab system" - task create: :environment do + task create: :gitlab_environment do warn_user_is_not_gitlab configure_cron_mode @@ -25,7 +25,7 @@ namespace :gitlab do # Restore backup of GitLab system desc 'GitLab | Restore a previously created backup' - task restore: :environment do + task restore: :gitlab_environment do warn_user_is_not_gitlab configure_cron_mode @@ -73,7 +73,7 @@ namespace :gitlab do end namespace :repo do - task create: :environment do + task create: :gitlab_environment do $progress.puts "Dumping repositories ...".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("repositories") @@ -84,7 +84,7 @@ namespace :gitlab do end end - task restore: :environment do + task restore: :gitlab_environment do $progress.puts "Restoring repositories ...".color(:blue) Backup::Repository.new.restore $progress.puts "done".color(:green) @@ -92,7 +92,7 @@ namespace :gitlab do end namespace :db do - task create: :environment do + task create: :gitlab_environment do $progress.puts "Dumping database ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("db") @@ -103,7 +103,7 @@ namespace :gitlab do end end - task restore: :environment do + task restore: :gitlab_environment do $progress.puts "Restoring database ... ".color(:blue) Backup::Database.new.restore $progress.puts "done".color(:green) @@ -111,7 +111,7 @@ namespace :gitlab do end namespace :builds do - task create: :environment do + task create: :gitlab_environment do $progress.puts "Dumping builds ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("builds") @@ -122,7 +122,7 @@ namespace :gitlab do end end - task restore: :environment do + task restore: :gitlab_environment do $progress.puts "Restoring builds ... ".color(:blue) Backup::Builds.new.restore $progress.puts "done".color(:green) @@ -130,7 +130,7 @@ namespace :gitlab do end namespace :uploads do - task create: :environment do + task create: :gitlab_environment do $progress.puts "Dumping uploads ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("uploads") @@ -141,7 +141,7 @@ namespace :gitlab do end end - task restore: :environment do + task restore: :gitlab_environment do $progress.puts "Restoring uploads ... ".color(:blue) Backup::Uploads.new.restore $progress.puts "done".color(:green) @@ -149,7 +149,7 @@ namespace :gitlab do end namespace :artifacts do - task create: :environment do + task create: :gitlab_environment do $progress.puts "Dumping artifacts ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("artifacts") @@ -160,7 +160,7 @@ namespace :gitlab do end end - task restore: :environment do + task restore: :gitlab_environment do $progress.puts "Restoring artifacts ... ".color(:blue) Backup::Artifacts.new.restore $progress.puts "done".color(:green) @@ -168,7 +168,7 @@ namespace :gitlab do end namespace :pages do - task create: :environment do + task create: :gitlab_environment do $progress.puts "Dumping pages ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("pages") @@ -179,7 +179,7 @@ namespace :gitlab do end end - task restore: :environment do + task restore: :gitlab_environment do $progress.puts "Restoring pages ... ".color(:blue) Backup::Pages.new.restore $progress.puts "done".color(:green) @@ -187,7 +187,7 @@ namespace :gitlab do end namespace :lfs do - task create: :environment do + task create: :gitlab_environment do $progress.puts "Dumping lfs objects ... ".color(:blue) if ENV["SKIP"] && ENV["SKIP"].include?("lfs") @@ -198,7 +198,7 @@ namespace :gitlab do end end - task restore: :environment do + task restore: :gitlab_environment do $progress.puts "Restoring lfs objects ... ".color(:blue) Backup::Lfs.new.restore $progress.puts "done".color(:green) @@ -206,7 +206,7 @@ namespace :gitlab do end namespace :registry do - task create: :environment do + task create: :gitlab_environment do $progress.puts "Dumping container registry images ... ".color(:blue) if Gitlab.config.registry.enabled @@ -221,7 +221,7 @@ namespace :gitlab do end end - task restore: :environment do + task restore: :gitlab_environment do $progress.puts "Restoring container registry images ... ".color(:blue) if Gitlab.config.registry.enabled diff --git a/lib/tasks/gitlab/check.rake b/lib/tasks/gitlab/check.rake index a584eb97cf5..e05a3aad824 100644 --- a/lib/tasks/gitlab/check.rake +++ b/lib/tasks/gitlab/check.rake @@ -1,7 +1,3 @@ -# Temporary hack, until we migrate all checks to SystemCheck format -require 'system_check' -require 'system_check/helpers' - namespace :gitlab do desc 'GitLab | Check the configuration of GitLab and its environment' task check: %w{gitlab:gitlab_shell:check @@ -12,7 +8,7 @@ namespace :gitlab do namespace :app do desc 'GitLab | Check the configuration of the GitLab Rails app' - task check: :environment do + task check: :gitlab_environment do warn_user_is_not_gitlab checks = [ @@ -43,7 +39,7 @@ namespace :gitlab do namespace :gitlab_shell do desc "GitLab | Check the configuration of GitLab Shell" - task check: :environment do + task check: :gitlab_environment do warn_user_is_not_gitlab start_checking "GitLab Shell" @@ -251,7 +247,7 @@ namespace :gitlab do namespace :sidekiq do desc "GitLab | Check the configuration of Sidekiq" - task check: :environment do + task check: :gitlab_environment do warn_user_is_not_gitlab start_checking "Sidekiq" @@ -310,7 +306,7 @@ namespace :gitlab do namespace :incoming_email do desc "GitLab | Check the configuration of Reply by email" - task check: :environment do + task check: :gitlab_environment do warn_user_is_not_gitlab if Gitlab.config.incoming_email.enabled @@ -333,7 +329,7 @@ namespace :gitlab do end namespace :ldap do - task :check, [:limit] => :environment do |_, args| + task :check, [:limit] => :gitlab_environment do |_, args| # Only show up to 100 results because LDAP directories can be very big. # This setting only affects the `rake gitlab:check` script. args.with_defaults(limit: 100) @@ -389,7 +385,7 @@ namespace :gitlab do namespace :repo do desc "GitLab | Check the integrity of the repositories managed by GitLab" - task check: :environment do + task check: :gitlab_environment do puts "This task is deprecated. Please use gitlab:git:fsck instead".color(:red) Rake::Task["gitlab:git:fsck"].execute end @@ -397,7 +393,7 @@ namespace :gitlab do namespace :orphans do desc 'Gitlab | Check for orphaned namespaces and repositories' - task check: :environment do + task check: :gitlab_environment do warn_user_is_not_gitlab checks = [ SystemCheck::Orphans::NamespaceCheck, @@ -408,7 +404,7 @@ namespace :gitlab do end desc 'GitLab | Check for orphaned namespaces in the repositories path' - task check_namespaces: :environment do + task check_namespaces: :gitlab_environment do warn_user_is_not_gitlab checks = [SystemCheck::Orphans::NamespaceCheck] @@ -416,7 +412,7 @@ namespace :gitlab do end desc 'GitLab | Check for orphaned repositories in the repositories path' - task check_repositories: :environment do + task check_repositories: :gitlab_environment do warn_user_is_not_gitlab checks = [SystemCheck::Orphans::RepositoryCheck] @@ -426,7 +422,7 @@ namespace :gitlab do namespace :user do desc "GitLab | Check the integrity of a specific user's repositories" - task :check_repos, [:username] => :environment do |t, args| + task :check_repos, [:username] => :gitlab_environment do |t, args| username = args[:username] || prompt("Check repository integrity for username? ".color(:blue)) user = User.find_by(username: username) if user diff --git a/lib/tasks/gitlab/cleanup.rake b/lib/tasks/gitlab/cleanup.rake index ab601b0d66b..5a53eac0897 100644 --- a/lib/tasks/gitlab/cleanup.rake +++ b/lib/tasks/gitlab/cleanup.rake @@ -5,7 +5,7 @@ namespace :gitlab do HASHED_REPOSITORY_NAME = '@hashed'.freeze desc "GitLab | Cleanup | Clean namespaces" - task dirs: :environment do + task dirs: :gitlab_environment do warn_user_is_not_gitlab remove_flag = ENV['REMOVE'] @@ -49,7 +49,7 @@ namespace :gitlab do end desc "GitLab | Cleanup | Clean repositories" - task repos: :environment do + task repos: :gitlab_environment do warn_user_is_not_gitlab move_suffix = "+orphaned+#{Time.now.to_i}" @@ -78,7 +78,7 @@ namespace :gitlab do end desc "GitLab | Cleanup | Block users that have been removed in LDAP" - task block_removed_ldap_users: :environment do + task block_removed_ldap_users: :gitlab_environment do warn_user_is_not_gitlab block_flag = ENV['BLOCK'] @@ -109,7 +109,7 @@ namespace :gitlab do # released. So likely this should only be run once on gitlab.com # Faulty refs are moved so they are kept around, else some features break. desc 'GitLab | Cleanup | Remove faulty deployment refs' - task move_faulty_deployment_refs: :environment do + task move_faulty_deployment_refs: :gitlab_environment do projects = Project.where(id: Deployment.select(:project_id).distinct) projects.find_each do |project| diff --git a/lib/tasks/gitlab/git.rake b/lib/tasks/gitlab/git.rake index 3f5dd2ae3b3..cb4f7e5c8a8 100644 --- a/lib/tasks/gitlab/git.rake +++ b/lib/tasks/gitlab/git.rake @@ -1,7 +1,7 @@ namespace :gitlab do namespace :git do desc "GitLab | Git | Repack" - task repack: :environment do + task repack: :gitlab_environment do failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} repack -a --quiet), "Repacking repo") if failures.empty? puts "Done".color(:green) @@ -11,7 +11,7 @@ namespace :gitlab do end desc "GitLab | Git | Run garbage collection on all repos" - task gc: :environment do + task gc: :gitlab_environment do failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} gc --auto --quiet), "Garbage Collecting") if failures.empty? puts "Done".color(:green) @@ -21,7 +21,7 @@ namespace :gitlab do end desc "GitLab | Git | Prune all repos" - task prune: :environment do + task prune: :gitlab_environment do failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} prune), "Git Prune") if failures.empty? puts "Done".color(:green) @@ -31,7 +31,7 @@ namespace :gitlab do end desc 'GitLab | Git | Check all repos integrity' - task fsck: :environment do + task fsck: :gitlab_environment do failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} fsck --name-objects --no-progress), "Checking integrity") do |repo| check_config_lock(repo) check_ref_locks(repo) diff --git a/lib/tasks/gitlab/gitaly.rake b/lib/tasks/gitlab/gitaly.rake index a2e68c0471b..107ff1d8aeb 100644 --- a/lib/tasks/gitlab/gitaly.rake +++ b/lib/tasks/gitlab/gitaly.rake @@ -1,7 +1,7 @@ namespace :gitlab do namespace :gitaly do desc "GitLab | Install or upgrade gitaly" - task :install, [:dir, :repo] => :environment do |t, args| + task :install, [:dir, :repo] => :gitlab_environment do |t, args| require 'toml' warn_user_is_not_gitlab @@ -21,7 +21,11 @@ namespace :gitlab do _, status = Gitlab::Popen.popen(%w[which gmake]) command << (status.zero? ? 'gmake' : 'make') - command << 'BUNDLE_FLAGS=--no-deployment' if Rails.env.test? + if Rails.env.test? + command.push( + 'BUNDLE_FLAGS=--no-deployment', + "BUNDLE_PATH=#{Bundler.bundle_path}") + end Gitlab::SetupHelper.create_gitaly_configuration(args.dir) Dir.chdir(args.dir) do diff --git a/lib/tasks/gitlab/helpers.rake b/lib/tasks/gitlab/helpers.rake index b0a24790c4a..14d1125a03d 100644 --- a/lib/tasks/gitlab/helpers.rake +++ b/lib/tasks/gitlab/helpers.rake @@ -1,8 +1,6 @@ -require 'tasks/gitlab/task_helpers' - # Prevent StateMachine warnings from outputting during a cron task StateMachines::Machine.ignore_method_conflicts = true if ENV['CRON'] -namespace :gitlab do +task gitlab_environment: :environment do extend SystemCheck::Helpers end diff --git a/lib/tasks/gitlab/info.rake b/lib/tasks/gitlab/info.rake index e9fb6a008b0..45e9a1a1c72 100644 --- a/lib/tasks/gitlab/info.rake +++ b/lib/tasks/gitlab/info.rake @@ -1,7 +1,7 @@ namespace :gitlab do namespace :env do desc "GitLab | Show information about GitLab and its environment" - task info: :environment do + task info: :gitlab_environment do # check if there is an RVM environment rvm_version = run_and_match(%w(rvm --version), /[\d\.]+/).try(:to_s) # check Ruby version diff --git a/lib/tasks/gitlab/setup.rake b/lib/tasks/gitlab/setup.rake index 05fcb8e3da5..1d903c81358 100644 --- a/lib/tasks/gitlab/setup.rake +++ b/lib/tasks/gitlab/setup.rake @@ -1,6 +1,6 @@ namespace :gitlab do desc "GitLab | Setup production application" - task setup: :environment do + task setup: :gitlab_environment do setup_db end diff --git a/lib/tasks/gitlab/shell.rake b/lib/tasks/gitlab/shell.rake index 12ae4199b69..844664b12d4 100644 --- a/lib/tasks/gitlab/shell.rake +++ b/lib/tasks/gitlab/shell.rake @@ -1,7 +1,7 @@ namespace :gitlab do namespace :shell do desc "GitLab | Install or upgrade gitlab-shell" - task :install, [:repo] => :environment do |t, args| + task :install, [:repo] => :gitlab_environment do |t, args| warn_user_is_not_gitlab default_version = Gitlab::Shell.version_required @@ -58,12 +58,12 @@ namespace :gitlab do end desc "GitLab | Setup gitlab-shell" - task setup: :environment do + task setup: :gitlab_environment do setup end desc "GitLab | Build missing projects" - task build_missing_projects: :environment do + task build_missing_projects: :gitlab_environment do Project.find_each(batch_size: 1000) do |project| path_to_repo = project.repository.path_to_repo if File.exist?(path_to_repo) @@ -80,7 +80,7 @@ namespace :gitlab do end desc 'Create or repair repository hooks symlink' - task create_hooks: :environment do + task create_hooks: :gitlab_environment do warn_user_is_not_gitlab puts 'Creating/Repairing hooks symlinks for all repositories' diff --git a/lib/tasks/gitlab/workhorse.rake b/lib/tasks/gitlab/workhorse.rake index 308ffb0e284..b917a293095 100644 --- a/lib/tasks/gitlab/workhorse.rake +++ b/lib/tasks/gitlab/workhorse.rake @@ -1,7 +1,7 @@ namespace :gitlab do namespace :workhorse do desc "GitLab | Install or upgrade gitlab-workhorse" - task :install, [:dir, :repo] => :environment do |t, args| + task :install, [:dir, :repo] => :gitlab_environment do |t, args| warn_user_is_not_gitlab unless args.dir.present? diff --git a/lib/tasks/haml-lint.rake b/lib/tasks/haml-lint.rake index ad2d034b0b4..5c0cc4990fc 100644 --- a/lib/tasks/haml-lint.rake +++ b/lib/tasks/haml-lint.rake @@ -2,5 +2,14 @@ unless Rails.env.production? require 'haml_lint/rake_task' require 'haml_lint/inline_javascript' + # Workaround for warnings from parser/current + # TODO: Remove this after we update parser gem + task :haml_lint do + require 'parser' + def Parser.warn(*args) + puts(*args) # static-analysis ignores stdout if status is 0 + end + end + HamlLint::RakeTask.new end diff --git a/lib/tasks/migrate/setup_postgresql.rake b/lib/tasks/migrate/setup_postgresql.rake index c996537cfbe..31cbd651edb 100644 --- a/lib/tasks/migrate/setup_postgresql.rake +++ b/lib/tasks/migrate/setup_postgresql.rake @@ -1,16 +1,14 @@ -require Rails.root.join('lib/gitlab/database') -require Rails.root.join('lib/gitlab/database/migration_helpers') -require Rails.root.join('db/migrate/20151007120511_namespaces_projects_path_lower_indexes') -require Rails.root.join('db/migrate/20151008110232_add_users_lower_username_email_indexes') -require Rails.root.join('db/migrate/20161212142807_add_lower_path_index_to_routes') -require Rails.root.join('db/migrate/20170317203554_index_routes_path_for_like') -require Rails.root.join('db/migrate/20170724214302_add_lower_path_index_to_redirect_routes') -require Rails.root.join('db/migrate/20170503185032_index_redirect_routes_path_for_like') -require Rails.root.join('db/migrate/20171220191323_add_index_on_namespaces_lower_name.rb') -require Rails.root.join('db/migrate/20180113220114_rework_redirect_routes_indexes.rb') - desc 'GitLab | Sets up PostgreSQL' task setup_postgresql: :environment do + require Rails.root.join('db/migrate/20151007120511_namespaces_projects_path_lower_indexes') + require Rails.root.join('db/migrate/20151008110232_add_users_lower_username_email_indexes') + require Rails.root.join('db/migrate/20161212142807_add_lower_path_index_to_routes') + require Rails.root.join('db/migrate/20170317203554_index_routes_path_for_like') + require Rails.root.join('db/migrate/20170724214302_add_lower_path_index_to_redirect_routes') + require Rails.root.join('db/migrate/20170503185032_index_redirect_routes_path_for_like') + require Rails.root.join('db/migrate/20171220191323_add_index_on_namespaces_lower_name.rb') + require Rails.root.join('db/migrate/20180113220114_rework_redirect_routes_indexes.rb') + NamespacesProjectsPathLowerIndexes.new.up AddUsersLowerUsernameEmailIndexes.new.up AddLowerPathIndexToRoutes.new.up diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 74d76caf47d..94458d60e01 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -8,8 +8,8 @@ msgid "" msgstr "" "Project-Id-Version: gitlab 1.0.0\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2017-12-05 20:31+0100\n" -"PO-Revision-Date: 2017-12-05 20:31+0100\n" +"POT-Creation-Date: 2018-01-30 14:59+0100\n" +"PO-Revision-Date: 2018-01-30 14:59+0100\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Language-Team: LANGUAGE <LL@li.org>\n" "Language: \n" @@ -23,17 +23,27 @@ msgid_plural "%d commits" msgstr[0] "" msgstr[1] "" +msgid "%d issue" +msgid_plural "%d issues" +msgstr[0] "" +msgstr[1] "" + msgid "%d layer" msgid_plural "%d layers" msgstr[0] "" msgstr[1] "" +msgid "%d merge request" +msgid_plural "%d merge requests" +msgstr[0] "" +msgstr[1] "" + msgid "%s additional commit has been omitted to prevent performance issues." msgid_plural "%s additional commits have been omitted to prevent performance issues." msgstr[0] "" msgstr[1] "" -msgid "%{commit_author_link} committed %{commit_timeago}" +msgid "%{commit_author_link} authored %{commit_timeago}" msgstr "" msgid "%{count} participant" @@ -47,9 +57,6 @@ msgstr "" msgid "%{number_of_failures} of %{maximum_failures} failures. GitLab will allow access on the next attempt." msgstr "" -msgid "%{number_of_failures} of %{maximum_failures} failures. GitLab will block access for %{number_of_seconds} seconds." -msgstr "" - msgid "%{number_of_failures} of %{maximum_failures} failures. GitLab will not retry automatically. Reset storage information when the problem is resolved." msgstr "" @@ -117,6 +124,21 @@ msgstr "" msgid "Add new directory" msgstr "" +msgid "AdminArea|Stop all jobs" +msgstr "" + +msgid "AdminArea|Stop all jobs?" +msgstr "" + +msgid "AdminArea|Stop jobs" +msgstr "" + +msgid "AdminArea|Stopping jobs failed" +msgstr "" + +msgid "AdminArea|You’re about to stop all jobs. This will halt all current jobs that are running." +msgstr "" + msgid "AdminHealthPageLink|health page" msgstr "" @@ -126,12 +148,18 @@ msgstr "" msgid "All" msgstr "" +msgid "An error occurred previewing the blob" +msgstr "" + msgid "An error occurred when toggling the notification subscription" msgstr "" msgid "An error occurred while fetching sidebar data" msgstr "" +msgid "An error occurred while retrieving diff" +msgstr "" + msgid "An error occurred. Please try again." msgstr "" @@ -156,9 +184,6 @@ msgstr "" msgid "Are you sure you want to discard your changes?" msgstr "" -msgid "Are you sure you want to leave this group?" -msgstr "" - msgid "Are you sure you want to reset registration token?" msgstr "" @@ -186,13 +211,13 @@ msgstr "" msgid "Author" msgstr "" -msgid "Auto Review Apps and Auto Deploy need a domain name and a %{kubernetes} to work correctly." +msgid "Auto Review Apps and Auto Deploy need a %{kubernetes} to work correctly." msgstr "" -msgid "Auto Review Apps and Auto Deploy need a domain name to work correctly." +msgid "Auto Review Apps and Auto Deploy need a domain name and a %{kubernetes} to work correctly." msgstr "" -msgid "Auto Review Apps and Auto Deploy need a %{kubernetes} to work correctly." +msgid "Auto Review Apps and Auto Deploy need a domain name to work correctly." msgstr "" msgid "AutoDevOps|Auto DevOps (Beta)" @@ -216,6 +241,9 @@ msgstr "" msgid "Available" msgstr "" +msgid "Avatar will be removed. Are you sure?" +msgstr "" + msgid "Branch" msgid_plural "Branches" msgstr[0] "" @@ -359,15 +387,24 @@ msgstr "" msgid "ChangeTypeAction|Revert" msgstr "" +msgid "ChangeTypeAction|This will create a new commit in order to revert the existing changes." +msgstr "" + msgid "Changelog" msgstr "" +msgid "Changes are shown as if the <b>source</b> revision was being merged into the <b>target</b> revision." +msgstr "" + msgid "Charts" msgstr "" msgid "Chat" msgstr "" +msgid "Check interval" +msgstr "" + msgid "Checking %{text} availability…" msgstr "" @@ -380,6 +417,15 @@ msgstr "" msgid "Cherry-pick this merge request" msgstr "" +msgid "Choose File ..." +msgstr "" + +msgid "Choose a branch/tag (e.g. %{master}) or enter a commit (e.g. %{sha}) to see what's changed or to create a merge request." +msgstr "" + +msgid "Choose file..." +msgstr "" + msgid "CiStatusLabel|canceled" msgstr "" @@ -437,6 +483,9 @@ msgstr "" msgid "CircuitBreakerApiLink|circuitbreaker api" msgstr "" +msgid "Click to expand text" +msgstr "" + msgid "Clone repository" msgstr "" @@ -446,27 +495,24 @@ msgstr "" msgid "ClusterIntegration|%{appList} was successfully installed on your cluster" msgstr "" -msgid "ClusterIntegration|%{boldNotice} This will add some extra resources like a load balancer, which incur additional costs. See %{pricingLink}" -msgstr "" - msgid "ClusterIntegration|API URL" msgstr "" -msgid "ClusterIntegration|Active" -msgstr "" - msgid "ClusterIntegration|Add an existing cluster" msgstr "" msgid "ClusterIntegration|Add cluster" msgstr "" -msgid "ClusterIntegration|All" +msgid "ClusterIntegration|Advanced options on this cluster's integration" msgstr "" msgid "ClusterIntegration|Applications" msgstr "" +msgid "ClusterIntegration|Are you sure you want to remove this cluster's integration? This will not delete your actual cluster." +msgstr "" + msgid "ClusterIntegration|CA Certificate" msgstr "" @@ -476,6 +522,9 @@ msgstr "" msgid "ClusterIntegration|Choose how to set up cluster integration" msgstr "" +msgid "ClusterIntegration|Choose which of your project's environments will use this cluster." +msgstr "" + msgid "ClusterIntegration|Cluster" msgstr "" @@ -506,6 +555,12 @@ msgstr "" msgid "ClusterIntegration|Clusters allow you to use review apps, deploy your applications, run your pipelines, and much more in an easy way. %{link_to_help_page}" msgstr "" +msgid "ClusterIntegration|Clusters can be used to deploy applications and to provide Review Apps for this project" +msgstr "" + +msgid "ClusterIntegration|Control how your cluster integrates with GitLab" +msgstr "" + msgid "ClusterIntegration|Copy API URL" msgstr "" @@ -518,7 +573,7 @@ msgstr "" msgid "ClusterIntegration|Copy cluster name" msgstr "" -msgid "ClusterIntegration|Create a new cluster on Google Engine right from GitLab" +msgid "ClusterIntegration|Create a new cluster on Google Kubernetes Engine right from GitLab" msgstr "" msgid "ClusterIntegration|Create cluster" @@ -530,19 +585,16 @@ msgstr "" msgid "ClusterIntegration|Create on GKE" msgstr "" -msgid "ClusterIntegration|Enable cluster integration" -msgstr "" - msgid "ClusterIntegration|Enter the details for an existing Kubernetes cluster" msgstr "" msgid "ClusterIntegration|Enter the details for your cluster" msgstr "" -msgid "ClusterIntegration|Environment pattern" +msgid "ClusterIntegration|Environment scope" msgstr "" -msgid "ClusterIntegration|GKE pricing" +msgid "ClusterIntegration|GitLab Integration" msgstr "" msgid "ClusterIntegration|GitLab Runner" @@ -560,18 +612,12 @@ msgstr "" msgid "ClusterIntegration|Helm Tiller" msgstr "" -msgid "ClusterIntegration|Inactive" -msgstr "" - msgid "ClusterIntegration|Ingress" msgstr "" msgid "ClusterIntegration|Install" msgstr "" -msgid "ClusterIntegration|Install applications on your cluster. Read more about %{helpLink}" -msgstr "" - msgid "ClusterIntegration|Installed" msgstr "" @@ -581,27 +627,27 @@ msgstr "" msgid "ClusterIntegration|Integrate cluster automation" msgstr "" +msgid "ClusterIntegration|Integration status" +msgstr "" + msgid "ClusterIntegration|Learn more about %{link_to_documentation}" msgstr "" msgid "ClusterIntegration|Learn more about Clusters" msgstr "" -msgid "ClusterIntegration|Machine type" +msgid "ClusterIntegration|Learn more about environments" msgstr "" -msgid "ClusterIntegration|Make sure your account %{link_to_requirements} to create clusters" +msgid "ClusterIntegration|Machine type" msgstr "" -msgid "ClusterIntegration|Manage cluster integration on your GitLab project" +msgid "ClusterIntegration|Make sure your account %{link_to_requirements} to create clusters" msgstr "" msgid "ClusterIntegration|Manage your cluster by visiting %{link_gke}" msgstr "" -msgid "ClusterIntegration|Multiple clusters are available in GitLab Entreprise Edition Premium and Ultimate" -msgstr "" - msgid "ClusterIntegration|Note:" msgstr "" @@ -614,12 +660,6 @@ msgstr "" msgid "ClusterIntegration|Please make sure that your Google account meets the following requirements:" msgstr "" -msgid "ClusterIntegration|Problem setting up the cluster" -msgstr "" - -msgid "ClusterIntegration|Problem setting up the clusters list" -msgstr "" - msgid "ClusterIntegration|Project ID" msgstr "" @@ -629,6 +669,9 @@ msgstr "" msgid "ClusterIntegration|Project namespace (optional, unique)" msgstr "" +msgid "ClusterIntegration|Prometheus" +msgstr "" + msgid "ClusterIntegration|Read our %{link_to_help_page} on cluster integration." msgstr "" @@ -638,7 +681,7 @@ msgstr "" msgid "ClusterIntegration|Remove integration" msgstr "" -msgid "ClusterIntegration|Removing cluster integration will remove the cluster configuration you have added to this project. It will not delete your cluster on Google Kubernetes Engine." +msgid "ClusterIntegration|Remove this cluster's configuration from this project. This will not delete your actual cluster." msgstr "" msgid "ClusterIntegration|Request to begin installing failed" @@ -674,9 +717,6 @@ msgstr "" msgid "ClusterIntegration|Something went wrong while installing %{title}" msgstr "" -msgid "ClusterIntegration|There are no clusters to show" -msgstr "" - msgid "ClusterIntegration|This account must have permissions to create a cluster in the %{link_to_container_project} specified below" msgstr "" @@ -698,6 +738,9 @@ msgstr "" msgid "ClusterIntegration|access to Google Kubernetes Engine" msgstr "" +msgid "ClusterIntegration|check the pricing here" +msgstr "" + msgid "ClusterIntegration|cluster" msgstr "" @@ -745,15 +788,42 @@ msgstr "" msgid "Commits feed" msgstr "" +msgid "Commits|An error occurred while fetching merge requests data." +msgstr "" + msgid "Commits|History" msgstr "" +msgid "Commits|No related merge requests found" +msgstr "" + msgid "Committed by" msgstr "" msgid "Compare" msgstr "" +msgid "Compare Git revisions" +msgstr "" + +msgid "Compare Revisions" +msgstr "" + +msgid "CompareBranches|%{source_branch} and %{target_branch} are the same." +msgstr "" + +msgid "CompareBranches|Compare" +msgstr "" + +msgid "CompareBranches|Source" +msgstr "" + +msgid "CompareBranches|Target" +msgstr "" + +msgid "CompareBranches|There isn't anything to compare." +msgstr "" + msgid "Container Registry" msgstr "" @@ -805,6 +875,9 @@ msgstr "" msgid "Contributors" msgstr "" +msgid "ContributorsPage|%{startDate} – %{endDate}" +msgstr "" + msgid "ContributorsPage|Building repository graph." msgstr "" @@ -874,9 +947,6 @@ msgstr "" msgid "Cycle Analytics" msgstr "" -msgid "Cycle Analytics gives an overview of how much time it takes to go from idea to production in your project." -msgstr "" - msgid "CycleAnalyticsStage|Code" msgstr "" @@ -930,6 +1000,9 @@ msgstr "" msgid "Details" msgstr "" +msgid "Diffs|No file name available" +msgstr "" + msgid "Directory name" msgstr "" @@ -996,9 +1069,6 @@ msgstr "" msgid "Environments|Environments" msgstr "" -msgid "Environments|Environments are places where code gets deployed, such as staging or production." -msgstr "" - msgid "Environments|Job" msgstr "" @@ -1029,6 +1099,9 @@ msgstr "" msgid "Environments|You don't have any environments right now." msgstr "" +msgid "Error fetching refs" +msgstr "" + msgid "Error occurred when toggling the notification subscription" msgstr "" @@ -1121,12 +1194,21 @@ msgstr "" msgid "GPG Keys" msgstr "" +msgid "Generate a default set of labels" +msgstr "" + msgid "Git storage health information has been reset" msgstr "" +msgid "Git version" +msgstr "" + msgid "GitLab Runner section" msgstr "" +msgid "Gitaly Servers" +msgstr "" + msgid "Go to your fork" msgstr "" @@ -1172,10 +1254,7 @@ msgstr "" msgid "GroupsEmptyState|You can manage your group member’s permissions and access to each project in the group." msgstr "" -msgid "GroupsTreeRole|as" -msgstr "" - -msgid "GroupsTree|Are you sure you want to leave the \"${this.group.fullName}\" group?" +msgid "GroupsTree|Are you sure you want to leave the \"${group.fullName}\" group?" msgstr "" msgid "GroupsTree|Create a project in this group." @@ -1223,6 +1302,11 @@ msgstr "" msgid "HealthCheck|Unhealthy" msgstr "" +msgid "Hide value" +msgid_plural "Hide values" +msgstr[0] "" +msgstr[1] "" + msgid "History" msgstr "" @@ -1235,6 +1319,9 @@ msgstr "" msgid "Install a Runner compatible with GitLab CI" msgstr "" +msgid "Interested parties can even contribute by pushing commits if they want to." +msgstr "" + msgid "Internal - The group and any internal projects can be viewed by any logged in user." msgstr "" @@ -1256,6 +1343,9 @@ msgstr "" msgid "Issues" msgstr "" +msgid "Issues can be bugs, tasks or ideas to be discussed. Also, issues are searchable and filterable." +msgstr "" + msgid "Jan" msgstr "" @@ -1283,6 +1373,9 @@ msgstr "" msgid "Labels" msgstr "" +msgid "Labels can be applied to issues and merge requests to categorize them." +msgstr "" + msgid "Last %d day" msgid_plural "Last %d days" msgstr[0] "" @@ -1327,10 +1420,8 @@ msgstr "" msgid "Leave project" msgstr "" -msgid "Limited to showing %d event at most" -msgid_plural "Limited to showing %d events at most" -msgstr[0] "" -msgstr[1] "" +msgid "Loading the GitLab IDE..." +msgstr "" msgid "Lock" msgstr "" @@ -1368,9 +1459,27 @@ msgstr "" msgid "Merge request" msgstr "" +msgid "Merge requests are a place to propose changes you've made to a project and discuss those changes with others" +msgstr "" + +msgid "Merged" +msgstr "" + msgid "Messages" msgstr "" +msgid "Milestones|Delete milestone" +msgstr "" + +msgid "Milestones|Delete milestone %{milestoneTitle}?" +msgstr "" + +msgid "Milestones|Failed to delete milestone %{milestoneTitle}" +msgstr "" + +msgid "Milestones|Milestone %{milestoneTitle} was not found" +msgstr "" + msgid "MissingSSHKeyWarningLink|add an SSH key" msgstr "" @@ -1409,6 +1518,9 @@ msgstr "" msgid "New issue" msgstr "" +msgid "New label" +msgstr "" + msgid "New merge request" msgstr "" @@ -1427,7 +1539,10 @@ msgstr "" msgid "New tag" msgstr "" -msgid "No container images stored for this project. Add one by following the instructions above." +msgid "No connection could be made to a Gitaly Server, please check your logs!" +msgstr "" + +msgid "No file chosen" msgstr "" msgid "No repository" @@ -1505,6 +1620,12 @@ msgstr "" msgid "Notifications" msgstr "" +msgid "Notifications off" +msgstr "" + +msgid "Notifications on" +msgstr "" + msgid "Nov" msgstr "" @@ -1514,9 +1635,6 @@ msgstr "" msgid "Number of access attempts" msgstr "" -msgid "Number of failures before backing off" -msgstr "" - msgid "Oct" msgstr "" @@ -1559,9 +1677,6 @@ msgstr "" msgid "Password" msgstr "" -msgid "People without permission will never get a notification and won\\'t be able to comment." -msgstr "" - msgid "Pipeline" msgstr "" @@ -1646,6 +1761,12 @@ msgstr "" msgid "Pipelines for last year" msgstr "" +msgid "Pipelines|Build with confidence" +msgstr "" + +msgid "Pipelines|Get started with Pipelines" +msgstr "" + msgid "Pipeline|all" msgstr "" @@ -1658,6 +1779,15 @@ msgstr "" msgid "Pipeline|with stages" msgstr "" +msgid "Play" +msgstr "" + +msgid "Please <a href=%{link_to_billing} target=\"_blank\" rel=\"noopener noreferrer\">enable billing for one of your projects to be able to create a cluster</a>, then try again." +msgstr "" + +msgid "Please solve the reCAPTCHA" +msgstr "" + msgid "Preferences" msgstr "" @@ -1721,6 +1851,15 @@ msgstr "" msgid "Project access must be granted explicitly to each user." msgstr "" +msgid "Project avatar" +msgstr "" + +msgid "Project avatar in repository: %{link}" +msgstr "" + +msgid "Project cache successfully reset." +msgstr "" + msgid "Project details" msgstr "" @@ -1760,12 +1899,6 @@ msgstr "" msgid "ProjectNetworkGraph|Graph" msgstr "" -msgid "ProjectSettings|Immediately run a pipeline on the default branch" -msgstr "" - -msgid "ProjectSettings|Problem setting up the CI/CD settings JavaScript" -msgstr "" - msgid "Projects" msgstr "" @@ -1844,6 +1977,9 @@ msgstr "" msgid "RefSwitcher|Tags" msgstr "" +msgid "Register / Sign In" +msgstr "" + msgid "Related Commits" msgstr "" @@ -1865,6 +2001,9 @@ msgstr "" msgid "Remind later" msgstr "" +msgid "Remove avatar" +msgstr "" + msgid "Remove project" msgstr "" @@ -1883,6 +2022,11 @@ msgstr "" msgid "Reset runners registration token" msgstr "" +msgid "Reveal value" +msgid_plural "Reveal values" +msgstr[0] "" +msgstr[1] "" + msgid "Revert this commit" msgstr "" @@ -1892,9 +2036,6 @@ msgstr "" msgid "SSH Keys" msgstr "" -msgid "Save" -msgstr "" - msgid "Save changes" msgstr "" @@ -1916,9 +2057,6 @@ msgstr "" msgid "Seconds before reseting failure information" msgstr "" -msgid "Seconds to wait after a storage failure" -msgstr "" - msgid "Seconds to wait for a storage access attempt" msgstr "" @@ -1928,6 +2066,9 @@ msgstr "" msgid "Select a timezone" msgstr "" +msgid "Select branch/tag" +msgstr "" + msgid "Select target branch" msgstr "" @@ -1937,6 +2078,9 @@ msgstr "" msgid "September" msgstr "" +msgid "Server version" +msgstr "" + msgid "Service Templates" msgstr "" @@ -1975,7 +2119,7 @@ msgstr "" msgid "Something went wrong on our end." msgstr "" -msgid "Something went wrong trying to change the locked state of this ${this.issuableDisplayName}" +msgid "Something went wrong when toggling the button" msgstr "" msgid "Something went wrong while fetching the projects." @@ -1984,6 +2128,9 @@ msgstr "" msgid "Something went wrong while fetching the registry list." msgstr "" +msgid "Something went wrong. Please try again." +msgstr "" + msgid "Sort by" msgstr "" @@ -2104,10 +2251,10 @@ msgstr "" msgid "Stopped" msgstr "" -msgid "Subgroups" +msgid "Storage" msgstr "" -msgid "Subscribe" +msgid "Subgroups" msgstr "" msgid "Switch branch/tag" @@ -2199,7 +2346,10 @@ msgstr "" msgid "Team" msgstr "" -msgid "The circuitbreaker backoff threshold should be lower than the failure count threshold" +msgid "The Issue Tracker is the place to add things that need to be improved or solved in a project" +msgstr "" + +msgid "The Issue Tracker is the place to add things that need to be improved or solved in a project. You can register or sign in to create issues for this project." msgstr "" msgid "The coding stage shows the time from the first commit to creating the merge request. The data will automatically be added here once you create your first merge request." @@ -2214,10 +2364,10 @@ msgstr "" msgid "The issue stage shows the time it takes from creating an issue to assigning the issue to a milestone, or add the issue to a list on your Issue Board. Begin creating issues to see data for this stage." msgstr "" -msgid "The number of attempts GitLab will make to access a storage." +msgid "The maximum file size allowed is 200KB." msgstr "" -msgid "The number of failures after which GitLab will start temporarily disabling access to a storage shard on a host" +msgid "The number of attempts GitLab will make to access a storage." msgstr "" msgid "The number of failures of after which GitLab will completely prevent access to the storage. The number of failures can be reset in the admin interface: %{link_to_health_page} or using the %{api_documentation_link}." @@ -2226,9 +2376,6 @@ msgstr "" msgid "The phase of the development lifecycle." msgstr "" -msgid "The pipelines schedule runs pipelines in the future, repeatedly, for specific branches or tags. Those scheduled pipelines will inherit limited project access based on their associated user." -msgstr "" - msgid "The planning stage shows the time from the previous step to pushing your first commit. This time will be added automatically once you push your first commit." msgstr "" @@ -2259,16 +2406,25 @@ msgstr "" msgid "The time in seconds GitLab will try to access storage. After this time a timeout error will be raised." msgstr "" +msgid "The time in seconds between storage checks. When a previous check did complete yet, GitLab will skip a check." +msgstr "" + msgid "The time taken by each data entry gathered by that stage." msgstr "" msgid "The value lying at the midpoint of a series of observed values. E.g., between 3, 5, 9, the median is 5. Between 3, 5, 7, 8, the median is (5+7)/2 = 6." msgstr "" +msgid "There are no issues to show" +msgstr "" + +msgid "There are no merge requests to show" +msgstr "" + msgid "There are problems accessing Git storage: " msgstr "" -msgid "This branch has changed since you started editing. Would you like to create a new branch?" +msgid "This directory" msgstr "" msgid "This is a confidential issue." @@ -2283,12 +2439,36 @@ msgstr "" msgid "This issue is locked." msgstr "" +msgid "This job depends on a user to trigger its process. Often they are used to deploy code to production environments" +msgstr "" + +msgid "This job depends on upstream jobs that need to succeed in order for this job to be triggered" +msgstr "" + +msgid "This job has not been triggered yet" +msgstr "" + +msgid "This job has not started yet" +msgstr "" + +msgid "This job is in pending state and is waiting to be picked by a runner" +msgstr "" + +msgid "This job requires a manual action" +msgstr "" + msgid "This means you can not push code until you create an empty repository or import existing one." msgstr "" msgid "This merge request is locked." msgstr "" +msgid "This project" +msgstr "" + +msgid "This repository" +msgstr "" + msgid "Time before an issue gets scheduled" msgstr "" @@ -2437,6 +2617,12 @@ msgstr[1] "" msgid "Time|s" msgstr "" +msgid "ToggleButton|Toggle Status: OFF" +msgstr "" + +msgid "ToggleButton|Toggle Status: ON" +msgstr "" + msgid "Total Time" msgstr "" @@ -2446,6 +2632,12 @@ msgstr "" msgid "Total test time for all commits/merges" msgstr "" +msgid "Trigger this manual action" +msgstr "" + +msgid "Unable to reset project cache." +msgstr "" + msgid "Unlock" msgstr "" @@ -2455,7 +2647,7 @@ msgstr "" msgid "Unstar" msgstr "" -msgid "Unsubscribe" +msgid "Up to date" msgstr "" msgid "Upload New File" @@ -2464,6 +2656,9 @@ msgstr "" msgid "Upload file" msgstr "" +msgid "Upload new avatar" +msgstr "" + msgid "UploadLink|click to upload" msgstr "" @@ -2497,10 +2692,13 @@ msgstr "" msgid "Want to see the data? Please ask an administrator for access." msgstr "" +msgid "We could not verify that one of your projects on GCP has billing enabled. Please try again." +msgstr "" + msgid "We don't have enough data to show this stage." msgstr "" -msgid "When access to a storage fails. GitLab will prevent access to the storage for the time specified here. This allows the filesystem to recover. Repositories on failing shards are temporarly unavailable" +msgid "We want to be sure it is you, please confirm you are not a robot." msgstr "" msgid "Wiki" @@ -2623,9 +2821,15 @@ msgstr "" msgid "You are on a read-only GitLab instance." msgstr "" +msgid "You can also star a label to make it a priority label." +msgstr "" + msgid "You can only add files when you are on a branch" msgstr "" +msgid "You can only edit files when you are on a branch" +msgstr "" + msgid "You cannot write to this read-only GitLab instance." msgstr "" @@ -2662,6 +2866,9 @@ msgstr "" msgid "You won't be able to pull or push project code via SSH until you add an SSH key to your profile" msgstr "" +msgid "You'll need to use different branch names to get a valid comparison." +msgstr "" + msgid "Your comment will not be visible to the public." msgstr "" @@ -2682,12 +2889,113 @@ msgid_plural "days" msgstr[0] "" msgstr[1] "" +msgid "merge request" +msgid_plural "merge requests" +msgstr[0] "" +msgstr[1] "" + +msgid "mrWidget|Cancel automatic merge" +msgstr "" + +msgid "mrWidget|Checking ability to merge automatically" +msgstr "" + +msgid "mrWidget|Cherry-pick" +msgstr "" + +msgid "mrWidget|Cherry-pick this merge request in a new merge request" +msgstr "" + +msgid "mrWidget|Closed by" +msgstr "" + +msgid "mrWidget|Merge" +msgstr "" + +msgid "mrWidget|Merge failed." +msgstr "" + +msgid "mrWidget|Merge locally" +msgstr "" + +msgid "mrWidget|Merged by" +msgstr "" + +msgid "mrWidget|Refresh" +msgstr "" + +msgid "mrWidget|Refresh now" +msgstr "" + +msgid "mrWidget|Refreshing now" +msgstr "" + +msgid "mrWidget|Remove Source Branch" +msgstr "" + +msgid "mrWidget|Remove source branch" +msgstr "" + +msgid "mrWidget|Resolve conflicts" +msgstr "" + +msgid "mrWidget|Revert" +msgstr "" + +msgid "mrWidget|Revert this merge request in a new merge request" +msgstr "" + +msgid "mrWidget|Set by" +msgstr "" + +msgid "mrWidget|The changes were merged into" +msgstr "" + +msgid "mrWidget|The changes were not merged into" +msgstr "" + +msgid "mrWidget|The changes will be merged into" +msgstr "" + +msgid "mrWidget|The source branch has been removed" +msgstr "" + +msgid "mrWidget|The source branch is being removed" +msgstr "" + +msgid "mrWidget|The source branch will be removed" +msgstr "" + +msgid "mrWidget|The source branch will not be removed" +msgstr "" + +msgid "mrWidget|There are merge conflicts" +msgstr "" + +msgid "mrWidget|This merge request failed to be merged automatically" +msgstr "" + +msgid "mrWidget|This merge request is in the process of being merged" +msgstr "" + +msgid "mrWidget|This project is archived, write access has been disabled" +msgstr "" + +msgid "mrWidget|You can remove source branch now" +msgstr "" + +msgid "mrWidget|to be merged automatically when the pipeline succeeds" +msgstr "" + msgid "new merge request" msgstr "" msgid "notification emails" msgstr "" +msgid "or" +msgstr "" + msgid "parent" msgid_plural "parents" msgstr[0] "" diff --git a/qa/README.md b/qa/README.md index 3c1b61900d9..b937dc4c7a0 100644 --- a/qa/README.md +++ b/qa/README.md @@ -34,6 +34,9 @@ You can use GitLab QA to exercise tests on any live instance! For example, the following call would login to a local [GDK] instance and run all specs in `qa/specs/features`: +First, `cd` into the `$gdk/gitlab/qa` directory. +The `bin/qa` script expects you to be in the `qa` folder of the app. + ``` bin/qa Test::Instance http://localhost:3000 ``` @@ -27,7 +27,9 @@ module QA module Resource autoload :Sandbox, 'qa/factory/resource/sandbox' autoload :Group, 'qa/factory/resource/group' + autoload :Issue, 'qa/factory/resource/issue' autoload :Project, 'qa/factory/resource/project' + autoload :MergeRequest, 'qa/factory/resource/merge_request' autoload :DeployKey, 'qa/factory/resource/deploy_key' autoload :SecretVariable, 'qa/factory/resource/secret_variable' autoload :Runner, 'qa/factory/resource/runner' @@ -107,9 +109,17 @@ module QA module Project autoload :New, 'qa/page/project/new' autoload :Show, 'qa/page/project/show' + autoload :Activity, 'qa/page/project/activity' + + module Pipeline + autoload :Index, 'qa/page/project/pipeline/index' + autoload :Show, 'qa/page/project/pipeline/show' + end module Settings autoload :Common, 'qa/page/project/settings/common' + autoload :Advanced, 'qa/page/project/settings/advanced' + autoload :Main, 'qa/page/project/settings/main' autoload :Repository, 'qa/page/project/settings/repository' autoload :CICD, 'qa/page/project/settings/ci_cd' autoload :DeployKeys, 'qa/page/project/settings/deploy_keys' @@ -117,9 +127,10 @@ module QA autoload :Runners, 'qa/page/project/settings/runners' end - module Pipeline - autoload :Index, 'qa/page/project/pipeline/index' - autoload :Show, 'qa/page/project/pipeline/show' + module Issue + autoload :New, 'qa/page/project/issue/new' + autoload :Show, 'qa/page/project/issue/show' + autoload :Index, 'qa/page/project/issue/index' end end @@ -127,6 +138,10 @@ module QA autoload :PersonalAccessTokens, 'qa/page/profile/personal_access_tokens' end + module MergeRequest + autoload :New, 'qa/page/merge_request/new' + end + module Admin autoload :Settings, 'qa/page/admin/settings' end @@ -135,6 +150,13 @@ module QA autoload :Main, 'qa/page/mattermost/main' autoload :Login, 'qa/page/mattermost/login' end + + ## + # Classes describing components that are used by several pages. + # + module Component + autoload :Dropzone, 'qa/page/component/dropzone' + end end ## diff --git a/qa/qa/factory/dependency.rb b/qa/qa/factory/dependency.rb index d0e85a68237..fc5dc82ce29 100644 --- a/qa/qa/factory/dependency.rb +++ b/qa/qa/factory/dependency.rb @@ -16,20 +16,21 @@ module QA def build! return if overridden? - Builder.new(@signature).fabricate!.tap do |product| + Builder.new(@signature, @factory).fabricate!.tap do |product| @factory.public_send("#{@name}=", product) end end class Builder - def initialize(signature) + def initialize(signature, caller_factory) @factory = signature.factory @block = signature.block + @caller_factory = caller_factory end def fabricate! @factory.fabricate! do |factory| - @block&.call(factory) + @block&.call(factory, @caller_factory) end end end diff --git a/qa/qa/factory/resource/issue.rb b/qa/qa/factory/resource/issue.rb new file mode 100644 index 00000000000..95f48e20b3e --- /dev/null +++ b/qa/qa/factory/resource/issue.rb @@ -0,0 +1,32 @@ +module QA + module Factory + module Resource + class Issue < Factory::Base + attr_writer :title, :description, :project + + dependency Factory::Resource::Project, as: :project do |project| + project.name = 'project-for-issues' + project.description = 'project for adding issues' + end + + product :title do + Page::Project::Issue::Show.act { issue_title } + end + + def fabricate! + project.visit! + + Page::Project::Show.act do + go_to_new_issue + end + + Page::Project::Issue::New.perform do |page| + page.add_title(@title) + page.add_description(@description) + page.create_new_issue + end + end + end + end + end +end diff --git a/qa/qa/factory/resource/merge_request.rb b/qa/qa/factory/resource/merge_request.rb new file mode 100644 index 00000000000..ce04e904aaf --- /dev/null +++ b/qa/qa/factory/resource/merge_request.rb @@ -0,0 +1,49 @@ +require 'securerandom' + +module QA + module Factory + module Resource + class MergeRequest < Factory::Base + attr_accessor :title, + :description, + :source_branch, + :target_branch + + dependency Factory::Resource::Project, as: :project do |project| + project.name = 'project-with-merge-request' + end + + dependency Factory::Repository::Push, as: :target do |push, factory| + push.project = factory.project + push.branch_name = "master:#{factory.target_branch}" + end + + dependency Factory::Repository::Push, as: :source do |push, factory| + push.project = factory.project + push.branch_name = "#{factory.target_branch}:#{factory.source_branch}" + push.file_name = "added_file.txt" + push.file_content = "File Added" + end + + def initialize + @title = 'QA test - merge request' + @description = 'This is a test merge request' + @source_branch = "qa-test-feature-#{SecureRandom.hex(8)}" + @target_branch = "master" + end + + def fabricate! + project.visit! + + Page::Project::Show.act { new_merge_request } + + Page::MergeRequest::New.perform do |page| + page.fill_title(@title) + page.fill_description(@description) + page.create_merge_request + end + end + end + end + end +end diff --git a/qa/qa/page/admin/settings.rb b/qa/qa/page/admin/settings.rb index 1904732aee6..1f646103e7f 100644 --- a/qa/qa/page/admin/settings.rb +++ b/qa/qa/page/admin/settings.rb @@ -2,12 +2,13 @@ module QA module Page module Admin class Settings < Page::Base - ## - # TODO, define all selectors required by this page object - # - # See gitlab-org/gitlab-qa#154 - # - view 'app/views/admin/application_settings/show.html.haml' + view 'app/views/admin/application_settings/_form.html.haml' do + element :form_actions, '.form-actions' + element :submit, "submit 'Save'" + element :repository_storage, '%legend Repository Storage' + element :hashed_storage, + 'Create new projects using hashed storage paths' + end def enable_hashed_storage scroll_to 'legend', text: 'Repository Storage' diff --git a/qa/qa/page/base.rb b/qa/qa/page/base.rb index 81ba80cdbaf..5c3af4b9115 100644 --- a/qa/qa/page/base.rb +++ b/qa/qa/page/base.rb @@ -13,16 +13,18 @@ module QA visit current_url end - def wait(css = '.application', time: 60) - Time.now.tap do |start| - while Time.now - start < time - break if page.has_css?(css, wait: 5) + def wait(max: 60, time: 1, reload: true) + start = Time.now - refresh - end + while Time.now - start < max + return true if yield + + sleep(time) + + refresh if reload end - yield if block_given? + false end def scroll_to(selector, text: nil) @@ -40,14 +42,35 @@ module QA page.within(selector) { yield } if block_given? end - def click_element(name) - find_element(name).click + # Returns true if successfully GETs the given URL + # Useful because `page.status_code` is unsupported by our driver, and + # we don't have access to the `response` to use `have_http_status`. + def asset_exists?(url) + page.execute_script <<~JS + xhr = new XMLHttpRequest(); + xhr.open('GET', '#{url}', true); + xhr.send(); + JS + + return false unless wait(time: 0.5, max: 60, reload: false) do + page.evaluate_script('xhr.readyState == XMLHttpRequest.DONE') + end + + page.evaluate_script('xhr.status') == 200 end def find_element(name) find(element_selector_css(name)) end + def click_element(name) + find_element(name).click + end + + def fill_element(name, content) + find_element(name).set(content) + end + def within_element(name) page.within(element_selector_css(name)) do yield diff --git a/qa/qa/page/component/dropzone.rb b/qa/qa/page/component/dropzone.rb new file mode 100644 index 00000000000..15bdc742fda --- /dev/null +++ b/qa/qa/page/component/dropzone.rb @@ -0,0 +1,31 @@ +module QA + module Page + module Component + class Dropzone + attr_reader :page, :container + + # page - A QA::Page::Base object + # container - CSS selector of the comment textarea's container + def initialize(page, container) + @page = page + @container = container + end + + # Not tested and not expected to work with multiple dropzones + # instantiated on one page because there is no distinguishing + # attribute per dropzone file field. + def attach_file(attachment) + filename = File.basename(attachment) + + field_style = { visibility: 'visible', height: '', width: '' } + page.attach_file(attachment, class: 'dz-hidden-input', make_visible: field_style) + + # Wait for link to be appended to dropzone text + page.wait(reload: false) do + page.find("#{container} textarea").value.match(filename) + end + end + end + end + end +end diff --git a/qa/qa/page/dashboard/projects.rb b/qa/qa/page/dashboard/projects.rb index 71255b18362..73942cb856a 100644 --- a/qa/qa/page/dashboard/projects.rb +++ b/qa/qa/page/dashboard/projects.rb @@ -3,10 +3,21 @@ module QA module Dashboard class Projects < Page::Base view 'app/views/dashboard/projects/index.html.haml' + view 'app/views/shared/projects/_search_form.html.haml' do + element :form_filter_by_name, /form_tag.+id: 'project-filter-form'/ + end def go_to_project(name) + filter_by_name(name) + find_link(text: name).click end + + def filter_by_name(name) + page.within('form#project-filter-form') do + fill_in :name, with: name + end + end end end end diff --git a/qa/qa/page/group/show.rb b/qa/qa/page/group/show.rb index 37ed3b35bce..d215518d316 100644 --- a/qa/qa/page/group/show.rb +++ b/qa/qa/page/group/show.rb @@ -2,12 +2,20 @@ module QA module Page module Group class Show < Page::Base - ## - # TODO, define all selectors required by this page object - # - # See gitlab-org/gitlab-qa#154 - # - view 'app/views/groups/show.html.haml' + view 'app/views/groups/show.html.haml' do + element :new_project_or_subgroup_dropdown, '.new-project-subgroup' + element :new_project_or_subgroup_dropdown_toggle, '.dropdown-toggle' + element :new_project_option, /%li.*data:.*value: "new-project"/ + element :new_project_button, /%input.*data:.*action: "new-project"/ + element :new_subgroup_option, /%li.*data:.*value: "new-subgroup"/ + + # data-value and data-action get modified by JS for subgroup + element :new_subgroup_button, /%input.*\.js-new-group-child/ + end + + view 'app/assets/javascripts/groups/constants.js' do + element :no_result_text, 'Sorry, no groups or projects matched your search' + end def go_to_subgroup(name) click_link name @@ -20,26 +28,41 @@ module QA def has_subgroup?(name) filter_by_name(name) - page.has_link?(name) + wait(reload: false) do + return false if page.has_content?('Sorry, no groups or projects matched your search') + + page.has_link?(name) + end end def go_to_new_subgroup - within '.new-project-subgroup' do - find('.dropdown-toggle').click - find("li[data-value='new-subgroup']").click - end + click_new('subgroup') find("input[data-action='new-subgroup']").click end def go_to_new_project - within '.new-project-subgroup' do - find('.dropdown-toggle').click - find("li[data-value='new-project']").click - end + click_new('project') find("input[data-action='new-project']").click end + + private + + def click_new(kind) + within '.new-project-subgroup' do + css = "li[data-value='new-#{kind}']" + + # May need to click again because it is possible to click the button quicker than the JS is bound + wait(reload: false) do + find('.dropdown-toggle').click + + page.has_css?(css) + end + + find(css).click + end + end end end end diff --git a/qa/qa/page/main/login.rb b/qa/qa/page/main/login.rb index 9cff2c5c317..95880475ffa 100644 --- a/qa/qa/page/main/login.rb +++ b/qa/qa/page/main/login.rb @@ -10,12 +10,14 @@ module QA view 'app/views/devise/sessions/_new_base.html.haml' do element :login_field, 'text_field :login' - element :passowrd_field, 'password_field :password' + element :password_field, 'password_field :password' element :sign_in_button, 'submit "Sign in"' end def initialize - wait('.application', time: 500) + wait(max: 500) do + page.has_css?('.application') + end end def sign_in_using_credentials diff --git a/qa/qa/page/menu/admin.rb b/qa/qa/page/menu/admin.rb index 40da4a53e8a..573b98f7386 100644 --- a/qa/qa/page/menu/admin.rb +++ b/qa/qa/page/menu/admin.rb @@ -2,15 +2,8 @@ module QA module Page module Menu class Admin < Page::Base - ## - # TODO, define all selectors required by this page object - # - # See gitlab-org/gitlab-qa#154 - # - view 'app/views/admin/dashboard/index.html.haml' - - def go_to_license - click_link 'License' + view 'app/views/layouts/nav/sidebar/_admin.html.haml' do + element :settings, "_('Settings')" end def go_to_settings diff --git a/qa/qa/page/menu/side.rb b/qa/qa/page/menu/side.rb index 239f2872228..7e028add2ef 100644 --- a/qa/qa/page/menu/side.rb +++ b/qa/qa/page/menu/side.rb @@ -4,9 +4,13 @@ module QA class Side < Page::Base view 'app/views/layouts/nav/sidebar/_project.html.haml' do element :settings_item + element :settings_link, 'link_to edit_project_path' element :repository_link, "title: 'Repository'" element :pipelines_settings_link, "title: 'CI / CD'" + element :issues_link, /link_to.*shortcuts-issues/ + element :issues_link_text, "Issues" element :top_level_items, '.sidebar-top-level-items' + element :activity_link, "title: 'Activity'" end view 'app/assets/javascripts/fly_out_nav.js' do @@ -35,6 +39,18 @@ module QA end end + def go_to_settings + within_sidebar do + click_on 'Settings' + end + end + + def click_issues + within_sidebar do + click_link('Issues') + end + end + private def hover_settings @@ -51,6 +67,12 @@ module QA end end + def go_to_activity + within_sidebar do + click_on 'Activity' + end + end + def within_submenu page.within('.fly-out-list') do yield diff --git a/qa/qa/page/merge_request/new.rb b/qa/qa/page/merge_request/new.rb new file mode 100644 index 00000000000..ec94ff4ac98 --- /dev/null +++ b/qa/qa/page/merge_request/new.rb @@ -0,0 +1,31 @@ +module QA + module Page + module MergeRequest + class New < Page::Base + view 'app/views/shared/issuable/_form.html.haml' do + element :issuable_create_button + end + + view 'app/views/shared/issuable/form/_title.html.haml' do + element :issuable_form_title + end + + view 'app/views/shared/form_elements/_description.html.haml' do + element :issuable_form_description + end + + def create_merge_request + click_element :issuable_create_button + end + + def fill_title(title) + fill_element :issuable_form_title, title + end + + def fill_description(description) + fill_element :issuable_form_description, description + end + end + end + end +end diff --git a/qa/qa/page/project/activity.rb b/qa/qa/page/project/activity.rb new file mode 100644 index 00000000000..0196922c889 --- /dev/null +++ b/qa/qa/page/project/activity.rb @@ -0,0 +1,15 @@ +module QA + module Page + module Project + class Activity < Page::Base + view 'app/views/shared/_event_filter.html.haml' do + element :push_events, "event_filter_link EventFilter.push, _('Push events')" + end + + def go_to_push_events + click_on 'Push events' + end + end + end + end +end diff --git a/qa/qa/page/project/issue/index.rb b/qa/qa/page/project/issue/index.rb new file mode 100644 index 00000000000..b5903f536a4 --- /dev/null +++ b/qa/qa/page/project/issue/index.rb @@ -0,0 +1,17 @@ +module QA + module Page + module Project + module Issue + class Index < Page::Base + view 'app/views/projects/issues/_issue.html.haml' do + element :issue_link, 'link_to issue.title' + end + + def go_to_issue(title) + click_link(title) + end + end + end + end + end +end diff --git a/qa/qa/page/project/issue/new.rb b/qa/qa/page/project/issue/new.rb new file mode 100644 index 00000000000..7fc581da1ed --- /dev/null +++ b/qa/qa/page/project/issue/new.rb @@ -0,0 +1,33 @@ +module QA + module Page + module Project + module Issue + class New < Page::Base + view 'app/views/shared/issuable/_form.html.haml' do + element :submit_issue_button, 'form.submit "Submit' + end + + view 'app/views/shared/issuable/form/_title.html.haml' do + element :issue_title_textbox, 'form.text_field :title' + end + + view 'app/views/shared/form_elements/_description.html.haml' do + element :issue_description_textarea, "render 'projects/zen', f: form, attr: :description" + end + + def add_title(title) + fill_in 'issue_title', with: title + end + + def add_description(description) + fill_in 'issue_description', with: description + end + + def create_new_issue + click_on 'Submit issue' + end + end + end + end + end +end diff --git a/qa/qa/page/project/issue/show.rb b/qa/qa/page/project/issue/show.rb new file mode 100644 index 00000000000..5bc0598a524 --- /dev/null +++ b/qa/qa/page/project/issue/show.rb @@ -0,0 +1,40 @@ +module QA + module Page + module Project + module Issue + class Show < Page::Base + view 'app/views/projects/issues/show.html.haml' do + element :issue_details, '.issue-details' + element :title, '.title' + end + + view 'app/views/shared/notes/_form.html.haml' do + element :new_note_form, 'new-note' + element :new_note_form, 'attr: :note' + end + + view 'app/views/shared/notes/_comment_button.html.haml' do + element :comment_button, '%strong Comment' + end + + def issue_title + find('.issue-details .title').text + end + + # Adds a comment to an issue + # attachment option should be an absolute path + def comment(text, attachment: nil) + fill_in(with: text, name: 'note[note]') + + unless attachment.nil? + QA::Page::Component::Dropzone.new(self, '.new-note') + .attach_file(attachment) + end + + click_on 'Comment' + end + end + end + end + end +end diff --git a/qa/qa/page/project/new.rb b/qa/qa/page/project/new.rb index 9b1438f76d5..186a4724326 100644 --- a/qa/qa/page/project/new.rb +++ b/qa/qa/page/project/new.rb @@ -4,7 +4,7 @@ module QA class New < Page::Base view 'app/views/projects/_new_project_fields.html.haml' do element :project_namespace_select - element :project_namespace_field, 'select :namespace_id' + element :project_namespace_field, /select :namespace_id.*class: 'select2/ element :project_path, 'text_field :path' element :project_description, 'text_area :description' element :project_create_button, "submit 'Create project'" @@ -13,7 +13,7 @@ module QA def choose_test_namespace click_element :project_namespace_select - first('li', text: Runtime::Namespace.path).click + find('ul.select2-result-sub > li', text: Runtime::Namespace.path).click end def choose_name(name) diff --git a/qa/qa/page/project/settings/advanced.rb b/qa/qa/page/project/settings/advanced.rb new file mode 100644 index 00000000000..5ef00504fdf --- /dev/null +++ b/qa/qa/page/project/settings/advanced.rb @@ -0,0 +1,33 @@ +module QA + module Page + module Project + module Settings + class Advanced < Page::Base + view 'app/views/projects/edit.html.haml' do + element :project_path_field, 'f.text_field :path' + element :project_name_field, 'f.text_field :name' + element :rename_project_button, "f.submit 'Rename project'" + end + + def rename_to(path) + fill_project_name(path) + fill_project_path(path) + rename_project! + end + + def fill_project_path(path) + fill_in :project_path, with: path + end + + def fill_project_name(name) + fill_in :project_name, with: name + end + + def rename_project! + click_on 'Rename project' + end + end + end + end + end +end diff --git a/qa/qa/page/project/settings/common.rb b/qa/qa/page/project/settings/common.rb index 0a2a3488b9a..319cb1045b6 100644 --- a/qa/qa/page/project/settings/common.rb +++ b/qa/qa/page/project/settings/common.rb @@ -3,20 +3,28 @@ module QA module Project module Settings module Common - def expand(element_name) - page.within('#content-body') do - click_element(element_name) - - yield + def self.included(base) + base.class_eval do + view 'app/views/projects/edit.html.haml' do + element :advanced_settings_expand, "= expanded ? 'Collapse' : 'Expand'" + end end end + # Click the Expand button present in the specified section + # + # @param [String] name present in the container in the DOM def expand_section(name) page.within('#content-body') do page.within('section', text: name) do - click_button 'Expand' unless first('button', text: 'Collapse') + # Because it is possible to click the button before the JS toggle code is bound + wait(reload: false) do + click_button 'Expand' unless first('button', text: 'Collapse') + + page.has_content?('Collapse') + end - yield + yield if block_given? end end end diff --git a/qa/qa/page/project/settings/main.rb b/qa/qa/page/project/settings/main.rb new file mode 100644 index 00000000000..5d743f4c9c8 --- /dev/null +++ b/qa/qa/page/project/settings/main.rb @@ -0,0 +1,21 @@ +module QA + module Page + module Project + module Settings + class Main < Page::Base + include Common + + view 'app/views/projects/edit.html.haml' do + element :advanced_settings_section, 'Advanced settings' + end + + def expand_advanced_settings(&block) + expand_section('Advanced settings') do + Advanced.perform(&block) + end + end + end + end + end + end +end diff --git a/qa/qa/page/project/settings/repository.rb b/qa/qa/page/project/settings/repository.rb index 6cc68358c8c..22362164a1a 100644 --- a/qa/qa/page/project/settings/repository.rb +++ b/qa/qa/page/project/settings/repository.rb @@ -6,11 +6,11 @@ module QA include Common view 'app/views/projects/deploy_keys/_index.html.haml' do - element :expand_deploy_keys + element :deploy_keys_section, 'Deploy Keys' end def expand_deploy_keys(&block) - expand(:expand_deploy_keys) do + expand_section('Deploy Keys') do DeployKeys.perform(&block) end end diff --git a/qa/qa/page/project/show.rb b/qa/qa/page/project/show.rb index 5e66e40a0b5..553d35f9579 100644 --- a/qa/qa/page/project/show.rb +++ b/qa/qa/page/project/show.rb @@ -3,23 +3,35 @@ module QA module Project class Show < Page::Base view 'app/views/shared/_clone_panel.html.haml' do + element :clone_holder, '.git-clone-holder' element :clone_dropdown element :clone_options_dropdown, '.clone-options-dropdown' + element :project_repository_location, 'text_field_tag :project_clone' end - view 'app/views/shared/_clone_panel.html.haml' do - element :project_repository_location, 'text_field_tag :project_clone' + view 'app/views/projects/_last_push.html.haml' do + element :create_merge_request end view 'app/views/projects/_home_panel.html.haml' do element :project_name end + view 'app/views/layouts/header/_new_dropdown.haml' do + element :new_menu_toggle + element :new_issue_link, "link_to 'New issue', new_project_issue_path(@project)" + end + def choose_repository_clone_http - click_element :clone_dropdown + wait(reload: false) do + click_element :clone_dropdown + + page.within('.clone-options-dropdown') do + click_link('HTTP') + end - page.within('.clone-options-dropdown') do - click_link('HTTP') + # Ensure git clone textbox was updated to http URI + page.has_css?('.git-clone-holder input#project_clone[value*="http"]') end end @@ -31,10 +43,20 @@ module QA find('.qa-project-name').text end + def new_merge_request + click_element :create_merge_request + end + def wait_for_push sleep 5 refresh end + + def go_to_new_issue + click_element :new_menu_toggle + + click_link 'New issue' + end end end end diff --git a/qa/qa/runtime/browser.rb b/qa/qa/runtime/browser.rb index 7b1be3d5ef3..a12d95683af 100644 --- a/qa/qa/runtime/browser.rb +++ b/qa/qa/runtime/browser.rb @@ -23,11 +23,11 @@ module QA # In case of an address that is a symbol we will try to guess address # based on `Runtime::Scenario#something_address`. # - def visit(address, page, &block) + def visit(address, page = nil, &block) Browser::Session.new(address, page).perform(&block) end - def self.visit(address, page, &block) + def self.visit(address, page = nil, &block) new.visit(address, page, &block) end @@ -84,7 +84,7 @@ module QA config.javascript_driver = :chrome config.default_max_wait_time = 10 # https://github.com/mattheworiordan/capybara-screenshot/issues/164 - config.save_path = 'tmp' + config.save_path = File.expand_path('../../tmp', __dir__) end end diff --git a/qa/qa/specs/features/merge_request/create_spec.rb b/qa/qa/specs/features/merge_request/create_spec.rb new file mode 100644 index 00000000000..fbf9a4d17e5 --- /dev/null +++ b/qa/qa/specs/features/merge_request/create_spec.rb @@ -0,0 +1,17 @@ +module QA + feature 'creates a merge request', :core do + scenario 'user creates a new merge request' do + Runtime::Browser.visit(:gitlab, Page::Main::Login) + Page::Main::Login.act { sign_in_using_credentials } + + Factory::Resource::MergeRequest.fabricate! do |merge_request| + merge_request.title = 'This is a merge request' + merge_request.description = 'Great feature' + end + + expect(page).to have_content('This is a merge request') + expect(page).to have_content('Great feature') + expect(page).to have_content('Opened less than a minute ago') + end + end +end diff --git a/qa/qa/specs/features/project/activity_spec.rb b/qa/qa/specs/features/project/activity_spec.rb new file mode 100644 index 00000000000..ba94ce8cf28 --- /dev/null +++ b/qa/qa/specs/features/project/activity_spec.rb @@ -0,0 +1,20 @@ +module QA + feature 'activity page', :core do + scenario 'push creates an event in the activity page' do + Runtime::Browser.visit(:gitlab, Page::Main::Login) + Page::Main::Login.act { sign_in_using_credentials } + + Factory::Repository::Push.fabricate! do |push| + push.file_name = 'README.md' + push.file_content = '# This is a test project' + push.commit_message = 'Add README.md' + end + + Page::Menu::Side.act { go_to_activity } + + Page::Project::Activity.act { go_to_push_events } + + expect(page).to have_content('pushed new branch master') + end + end +end diff --git a/qa/qa/specs/features/project/create_issue_spec.rb b/qa/qa/specs/features/project/create_issue_spec.rb new file mode 100644 index 00000000000..b73f108c2d9 --- /dev/null +++ b/qa/qa/specs/features/project/create_issue_spec.rb @@ -0,0 +1,18 @@ +module QA + feature 'creates issue', :core do + let(:issue_title) { 'issue title' } + + scenario 'user creates issue' do + Runtime::Browser.visit(:gitlab, Page::Main::Login) + Page::Main::Login.act { sign_in_using_credentials } + + Factory::Resource::Issue.fabricate! do |issue| + issue.title = issue_title + end + + Page::Menu::Side.act { click_issues } + + expect(page).to have_content(issue_title) + end + end +end diff --git a/qa/spec/factory/dependency_spec.rb b/qa/spec/factory/dependency_spec.rb index 32405415126..8aaa6665a18 100644 --- a/qa/spec/factory/dependency_spec.rb +++ b/qa/spec/factory/dependency_spec.rb @@ -54,6 +54,19 @@ describe QA::Factory::Dependency do expect(factory).to have_received(:mydep=).with(dependency) end + + context 'when receives a caller factory as block argument' do + let(:dependency) { QA::Factory::Base } + + it 'calls given block with dependency factory and caller factory' do + allow_any_instance_of(QA::Factory::Base).to receive(:fabricate!).and_return(factory) + allow(QA::Factory::Product).to receive(:populate!).and_return(spy('any')) + + subject.build! + + expect(block).to have_received(:call).with(an_instance_of(QA::Factory::Base), factory) + end + end end end end diff --git a/qa/spec/fixtures/banana_sample.gif b/qa/spec/fixtures/banana_sample.gif Binary files differnew file mode 100644 index 00000000000..1322ac92d14 --- /dev/null +++ b/qa/spec/fixtures/banana_sample.gif diff --git a/qa/spec/runtime/rsa_key.rb b/qa/spec/runtime/rsa_key.rb index ff277b9077b..6d7ab4dcd2e 100644 --- a/qa/spec/runtime/rsa_key.rb +++ b/qa/spec/runtime/rsa_key.rb @@ -3,7 +3,7 @@ describe QA::Runtime::RSAKey do subject { described_class.new.public_key } it 'generates a public RSA key' do - expect(subject).to match(/\Assh\-rsa AAAA[0-9A-Za-z+\/]+={0,3}\z/) + expect(subject).to match(%r{\Assh\-rsa AAAA[0-9A-Za-z+/]+={0,3}\z}) end end end diff --git a/rubocop/cop/migration/update_column_in_batches.rb b/rubocop/cop/migration/update_column_in_batches.rb index db2b5564297..5461abc5ee0 100644 --- a/rubocop/cop/migration/update_column_in_batches.rb +++ b/rubocop/cop/migration/update_column_in_batches.rb @@ -29,7 +29,7 @@ module RuboCop path = Pathname.new(source_name).relative_path_from(rails_root) dirname = File.dirname(path) .sub(%r{\Adb/(migrate|post_migrate)}, 'spec/migrations') - filename = File.basename(source_name, '.rb').sub(%r{\A\d+_}, '') + filename = File.basename(source_name, '.rb').sub(/\A\d+_/, '') File.join(dirname, "#{filename}_spec.rb") end diff --git a/scripts/lint-rugged b/scripts/lint-rugged index 3f8fcb558e3..cabd083e9f9 100755 --- a/scripts/lint-rugged +++ b/scripts/lint-rugged @@ -1,7 +1,7 @@ #!/usr/bin/env ruby ALLOWED = [ - # Can be deleted (?) once rugged is no longer used in production. Doesn't make Rugged calls. + # Can be fixed once Rugged is no longer used in production. Doesn't make Rugged calls. 'config/initializers/8_metrics.rb', # Can be deleted once wiki's are fully (mandatory) migrated @@ -13,14 +13,12 @@ ALLOWED = [ # Needs to be migrated, https://gitlab.com/gitlab-org/gitaly/issues/954 'lib/tasks/gitlab/cleanup.rake', - # https://gitlab.com/gitlab-org/gitaly/issues/961 - 'app/models/repository.rb', - # The only place where Rugged code is still allowed in production 'lib/gitlab/git/' ].freeze rugged_lines = IO.popen(%w[git grep -i -n rugged -- app config lib], &:read).lines +rugged_lines = rugged_lines.select { |l| /^[^:]*\.rb:/ =~ l } rugged_lines = rugged_lines.reject { |l| l.start_with?(*ALLOWED) } rugged_lines = rugged_lines.reject do |line| code, _comment = line.split('# ', 2) diff --git a/scripts/static-analysis b/scripts/static-analysis index 96d08287ded..bdb88f3cb57 100755 --- a/scripts/static-analysis +++ b/scripts/static-analysis @@ -1,6 +1,29 @@ #!/usr/bin/env ruby -require ::File.expand_path('../lib/gitlab/popen', __dir__) +# We don't have auto-loading here +require_relative '../lib/gitlab/popen' +require_relative '../lib/gitlab/popen/runner' + +def emit_warnings(static_analysis) + static_analysis.warned_results.each do |result| + puts + puts "**** #{result.cmd.join(' ')} had the following warnings:" + puts + puts result.stderr + puts + end +end + +def emit_errors(static_analysis) + static_analysis.failed_results.each do |result| + puts + puts "**** #{result.cmd.join(' ')} failed with the following error:" + puts + puts result.stdout + puts result.stderr + puts + end +end tasks = [ %w[bundle exec rake config_lint], @@ -12,23 +35,20 @@ tasks = [ %w[bundle exec rubocop --parallel], %w[bundle exec rake gettext:lint], %w[bundle exec rake lint:static_verification], - %w[scripts/lint-changelog-yaml], %w[scripts/lint-conflicts.sh], %w[scripts/lint-rugged] ] -failed_tasks = tasks.reduce({}) do |failures, task| - start = Time.now - puts - puts "$ #{task.join(' ')}" +static_analysis = Gitlab::Popen::Runner.new - output, status = Gitlab::Popen.popen(task) - puts "==> Finished in #{Time.now - start} seconds" +static_analysis.run(tasks) do |cmd, &run| puts + puts "$ #{cmd.join(' ')}" - failures[task.join(' ')] = output unless status.zero? + result = run.call - failures + puts "==> Finished in #{result.duration} seconds" + puts end puts @@ -36,17 +56,20 @@ puts '===================================================' puts puts -if failed_tasks.empty? +if static_analysis.all_success_and_clean? puts 'All static analyses passed successfully.' +elsif static_analysis.all_success? + puts 'All static analyses passed successfully, but we have warnings:' + puts + + emit_warnings(static_analysis) + + exit 2 else puts 'Some static analyses failed:' - failed_tasks.each do |failed_task, output| - puts - puts "**** #{failed_task} failed with the following error:" - puts - puts output - end + emit_warnings(static_analysis) + emit_errors(static_analysis) exit 1 end diff --git a/spec/controllers/admin/gitaly_servers_controller_spec.rb b/spec/controllers/admin/gitaly_servers_controller_spec.rb new file mode 100644 index 00000000000..b7378aa37d0 --- /dev/null +++ b/spec/controllers/admin/gitaly_servers_controller_spec.rb @@ -0,0 +1,15 @@ +require 'spec_helper' + +describe Admin::GitalyServersController do + describe '#index' do + before do + sign_in(create(:admin)) + end + + it 'shows the gitaly servers page' do + get :index + + expect(response).to have_gitlab_http_status(200) + end + end +end diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb index 67a11e56e94..6a1869d1a48 100644 --- a/spec/controllers/groups/uploads_controller_spec.rb +++ b/spec/controllers/groups/uploads_controller_spec.rb @@ -6,5 +6,7 @@ describe Groups::UploadsController do { group_id: model } end - it_behaves_like 'handle uploads' + it_behaves_like 'handle uploads' do + let(:uploader_class) { NamespaceFileUploader } + end end diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb index a9cfd964dd5..492fed42d31 100644 --- a/spec/controllers/groups_controller_spec.rb +++ b/spec/controllers/groups_controller_spec.rb @@ -85,6 +85,30 @@ describe GroupsController do end end + describe 'GET #activity' do + render_views + + before do + sign_in(user) + project + end + + context 'as json' do + it 'includes all projects in event feed' do + 3.times do + project = create(:project, group: group) + create(:event, project: project) + end + + get :activity, id: group.to_param, format: :json + + expect(response).to have_gitlab_http_status(200) + expect(json_response['count']).to eq(3) + expect(assigns(:projects).limit_value).to be_nil + end + end + end + describe 'POST #create' do context 'when creating subgroups', :nested_groups do [true, false].each do |can_create_group_status| diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb index 12cb7b2647f..25a2e13fe1a 100644 --- a/spec/controllers/projects/artifacts_controller_spec.rb +++ b/spec/controllers/projects/artifacts_controller_spec.rb @@ -145,8 +145,7 @@ describe Projects::ArtifactsController do context 'when using local file storage' do it_behaves_like 'a valid file' do let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } - let(:store) { ObjectStoreUploader::LOCAL_STORE } - let(:archive_path) { JobArtifactUploader.local_store_path } + let(:archive_path) { JobArtifactUploader.root } end end end diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb index 4a2998b4ccd..9656e7f7e74 100644 --- a/spec/controllers/projects/issues_controller_spec.rb +++ b/spec/controllers/projects/issues_controller_spec.rb @@ -102,6 +102,18 @@ describe Projects::IssuesController do expect(response).to redirect_to(namespace_project_issues_path(page: last_page, state: controller.params[:state], scope: controller.params[:scope])) end + + it 'does not use pagination if disabled' do + allow(controller).to receive(:pagination_disabled?).and_return(true) + + get :index, + namespace_id: project.namespace.to_param, + project_id: project, + page: (last_page + 1).to_param + + expect(response).to have_gitlab_http_status(200) + expect(assigns(:issues).size).to eq(2) + end end end diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb index e6a4e7c8257..db595430979 100644 --- a/spec/controllers/projects/jobs_controller_spec.rb +++ b/spec/controllers/projects/jobs_controller_spec.rb @@ -137,8 +137,8 @@ describe Projects::JobsController do it 'exposes needed information' do expect(response).to have_gitlab_http_status(:ok) - expect(json_response['raw_path']).to match(/jobs\/\d+\/raw\z/) - expect(json_response.dig('merge_request', 'path')).to match(/merge_requests\/\d+\z/) + expect(json_response['raw_path']).to match(%r{jobs/\d+/raw\z}) + expect(json_response.dig('merge_request', 'path')).to match(%r{merge_requests/\d+\z}) expect(json_response['new_issue_path']) .to include('/issues/new') end diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb index 3a0c3faa7b4..b7df42168e0 100644 --- a/spec/controllers/projects/raw_controller_spec.rb +++ b/spec/controllers/projects/raw_controller_spec.rb @@ -53,7 +53,7 @@ describe Projects::RawController do end it 'serves the file' do - expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment') + expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment') get(:show, namespace_id: public_project.namespace.to_param, project_id: public_project, diff --git a/spec/controllers/projects/todos_controller_spec.rb b/spec/controllers/projects/todos_controller_spec.rb index e2524be7724..1ce7e84bef9 100644 --- a/spec/controllers/projects/todos_controller_spec.rb +++ b/spec/controllers/projects/todos_controller_spec.rb @@ -36,7 +36,7 @@ describe Projects::TodosController do expect(response).to have_gitlab_http_status(200) expect(json_response['count']).to eq 1 - expect(json_response['delete_path']).to match(/\/dashboard\/todos\/\d{1}/) + expect(json_response['delete_path']).to match(%r{/dashboard/todos/\d{1}}) end end @@ -104,7 +104,7 @@ describe Projects::TodosController do expect(response).to have_gitlab_http_status(200) expect(json_response['count']).to eq 1 - expect(json_response['delete_path']).to match(/\/dashboard\/todos\/\d{1}/) + expect(json_response['delete_path']).to match(%r{/dashboard/todos/\d{1}}) end end diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb index b1f601a19e5..376b229ffc9 100644 --- a/spec/controllers/uploads_controller_spec.rb +++ b/spec/controllers/uploads_controller_spec.rb @@ -180,6 +180,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png' + response end end @@ -196,6 +197,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png' + response end end @@ -220,6 +222,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png' + response end end @@ -239,6 +242,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png' + response end end @@ -291,6 +295,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png' + response end end @@ -322,6 +327,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png' + response end end @@ -341,6 +347,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png' + response end end @@ -384,6 +391,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png' + response end end @@ -420,6 +428,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png' + response end end @@ -439,6 +448,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png' + response end end @@ -491,6 +501,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png' + response end end @@ -522,6 +533,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png' + response end end @@ -541,6 +553,7 @@ describe UploadsController do it_behaves_like 'content not cached without revalidation' do subject do get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png' + response end end diff --git a/spec/factories/commits.rb b/spec/factories/commits.rb index 84a8bc56640..d5d819d862a 100644 --- a/spec/factories/commits.rb +++ b/spec/factories/commits.rb @@ -23,7 +23,7 @@ FactoryBot.define do end after(:build) do |commit, evaluator| - allow(commit).to receive(:author).and_return(evaluator.author || build(:author)) + allow(commit).to receive(:author).and_return(evaluator.author || build_stubbed(:author)) end trait :without_author do diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb index 9d7d5e56611..cac56695319 100644 --- a/spec/factories/deployments.rb +++ b/spec/factories/deployments.rb @@ -3,13 +3,14 @@ FactoryBot.define do sha '97de212e80737a608d939f648d959671fb0a0142' ref 'master' tag false - user + user nil project nil deployable factory: :ci_build environment factory: :environment after(:build) do |deployment, evaluator| deployment.project ||= deployment.environment.project + deployment.user ||= deployment.project.creator unless deployment.project.repository_exists? allow(deployment.project.repository).to receive(:create_ref) diff --git a/spec/factories/events.rb b/spec/factories/events.rb index ed275243ac9..5798b81ecad 100644 --- a/spec/factories/events.rb +++ b/spec/factories/events.rb @@ -1,7 +1,7 @@ FactoryBot.define do factory :event do project - author factory: :user + author(factory: :user) { project.creator } action Event::JOINED trait(:created) { action Event::CREATED } diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb index 1512f5a0e58..8c531cf5909 100644 --- a/spec/factories/groups.rb +++ b/spec/factories/groups.rb @@ -18,7 +18,7 @@ FactoryBot.define do end trait :with_avatar do - avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) } + avatar { fixture_file_upload('spec/fixtures/dk.png') } end trait :access_requestable do diff --git a/spec/factories/issues.rb b/spec/factories/issues.rb index 71dc169c6a2..998080a3dd5 100644 --- a/spec/factories/issues.rb +++ b/spec/factories/issues.rb @@ -1,8 +1,8 @@ FactoryBot.define do factory :issue do title { generate(:title) } - author project + author { project.creator } trait :confidential do confidential true diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb index 40558c88d15..d26cb0c3417 100644 --- a/spec/factories/merge_requests.rb +++ b/spec/factories/merge_requests.rb @@ -1,9 +1,9 @@ FactoryBot.define do factory :merge_request do title { generate(:title) } - author association :source_project, :repository, factory: :project target_project { source_project } + author { source_project.creator } # $ git log --pretty=oneline feature..master # 5937ac0a7beb003549fc5fd26fc247adbce4a52e Add submodule from gitlab.com diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb index 707ecbd6be5..3f4e408b3a6 100644 --- a/spec/factories/notes.rb +++ b/spec/factories/notes.rb @@ -6,7 +6,7 @@ FactoryBot.define do factory :note do project note { generate(:title) } - author + author { project&.creator || create(:user) } on_issue factory :note_on_commit, traits: [:on_commit] @@ -122,11 +122,11 @@ FactoryBot.define do end trait :with_attachment do - attachment { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png") } + attachment { fixture_file_upload(Rails.root.join( "spec/fixtures/dk.png"), "image/png") } end trait :with_svg_attachment do - attachment { fixture_file_upload(Rails.root + "spec/fixtures/unsanitized.svg", "image/svg+xml") } + attachment { fixture_file_upload(Rails.root.join("spec/fixtures/unsanitized.svg"), "image/svg+xml") } end transient do diff --git a/spec/factories/project_wikis.rb b/spec/factories/project_wikis.rb index 89d8248f9f4..db2eb4fc863 100644 --- a/spec/factories/project_wikis.rb +++ b/spec/factories/project_wikis.rb @@ -3,7 +3,7 @@ FactoryBot.define do skip_create project - user factory: :user + user { project.creator } initialize_with { new(project, user) } end end diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb index d0f3911f730..16d328a5bc2 100644 --- a/spec/factories/projects.rb +++ b/spec/factories/projects.rb @@ -90,7 +90,7 @@ FactoryBot.define do end trait :with_avatar do - avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) } + avatar { fixture_file_upload('spec/fixtures/dk.png') } end trait :broken_storage do diff --git a/spec/factories/sent_notifications.rb b/spec/factories/sent_notifications.rb index 80872067233..b0174dd06b7 100644 --- a/spec/factories/sent_notifications.rb +++ b/spec/factories/sent_notifications.rb @@ -1,7 +1,7 @@ FactoryBot.define do factory :sent_notification do project - recipient factory: :user + recipient { project.creator } noteable { create(:issue, project: project) } reply_key { SentNotification.reply_key } end diff --git a/spec/factories/snippets.rb b/spec/factories/snippets.rb index 2ab9a56d255..dc12b562108 100644 --- a/spec/factories/snippets.rb +++ b/spec/factories/snippets.rb @@ -21,6 +21,7 @@ FactoryBot.define do factory :project_snippet, parent: :snippet, class: :ProjectSnippet do project + author { project.creator } end factory :personal_snippet, parent: :snippet, class: :PersonalSnippet do diff --git a/spec/factories/subscriptions.rb b/spec/factories/subscriptions.rb index a4bc4e87b0a..8f7ab74ec70 100644 --- a/spec/factories/subscriptions.rb +++ b/spec/factories/subscriptions.rb @@ -1,7 +1,7 @@ FactoryBot.define do factory :subscription do - user project + user { project.creator } subscribable factory: :issue end end diff --git a/spec/factories/timelogs.rb b/spec/factories/timelogs.rb index af34b0681e2..b45f06b9a0a 100644 --- a/spec/factories/timelogs.rb +++ b/spec/factories/timelogs.rb @@ -3,7 +3,7 @@ FactoryBot.define do factory :timelog do time_spent 3600 - user issue + user { issue.project.creator } end end diff --git a/spec/factories/todos.rb b/spec/factories/todos.rb index 6a6de665dd1..94f8caedfa6 100644 --- a/spec/factories/todos.rb +++ b/spec/factories/todos.rb @@ -1,8 +1,8 @@ FactoryBot.define do factory :todo do project - author - user + author { project.creator } + user { project.creator } target factory: :issue action { Todo::ASSIGNED } diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb index c39500faea1..c8cfe251d27 100644 --- a/spec/factories/uploads.rb +++ b/spec/factories/uploads.rb @@ -1,24 +1,42 @@ FactoryBot.define do factory :upload do model { build(:project) } - path { "uploads/-/system/project/avatar/avatar.jpg" } size 100.kilobytes uploader "AvatarUploader" - trait :personal_snippet do + # we should build a mount agnostic upload by default + transient do + mounted_as :avatar + secret SecureRandom.hex + end + + # this needs to comply with RecordsUpload::Concern#upload_path + path { File.join("uploads/-/system", model.class.to_s.underscore, mounted_as.to_s, 'avatar.jpg') } + + trait :personal_snippet_upload do model { build(:personal_snippet) } + path { File.join(secret, 'myfile.jpg') } uploader "PersonalFileUploader" end trait :issuable_upload do - path { "#{SecureRandom.hex}/myfile.jpg" } + path { File.join(secret, 'myfile.jpg') } uploader "FileUploader" end trait :namespace_upload do - path { "#{SecureRandom.hex}/myfile.jpg" } model { build(:group) } + path { File.join(secret, 'myfile.jpg') } uploader "NamespaceFileUploader" end + + trait :attachment_upload do + transient do + mounted_as :attachment + end + + model { build(:note) } + uploader "AttachmentUploader" + end end end diff --git a/spec/factories/users.rb b/spec/factories/users.rb index e62e0b263ca..769fd656e7a 100644 --- a/spec/factories/users.rb +++ b/spec/factories/users.rb @@ -38,7 +38,7 @@ FactoryBot.define do end trait :with_avatar do - avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) } + avatar { fixture_file_upload('spec/fixtures/dk.png') } end trait :two_factor_via_otp do diff --git a/spec/features/atom/users_spec.rb b/spec/features/atom/users_spec.rb index 782f42aab04..2d074c115dd 100644 --- a/spec/features/atom/users_spec.rb +++ b/spec/features/atom/users_spec.rb @@ -64,7 +64,7 @@ describe "User Feed" do end it 'has XHTML summaries in issue descriptions' do - expect(body).to match /<hr ?\/>/ + expect(body).to match %r{<hr ?/>} end it 'has XHTML summaries in notes' do @@ -72,7 +72,7 @@ describe "User Feed" do end it 'has XHTML summaries in merge request descriptions' do - expect(body).to match /Here is the fix: <a[^>]*><img[^>]*\/><\/a>/ + expect(body).to match %r{Here is the fix: <a[^>]*><img[^>]*/></a>} end it 'has push event commit ID' do diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb index a28b8905b65..62a2ec55b00 100644 --- a/spec/features/commits_spec.rb +++ b/spec/features/commits_spec.rb @@ -194,7 +194,7 @@ describe 'Commits' do end it 'includes the committed_date for each commit' do - commits = project.repository.commits(branch_name) + commits = project.repository.commits(branch_name, limit: 40) commits.each do |commit| expect(page).to have_content("authored #{commit.authored_date.strftime("%b %d, %Y")}") diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb index 991d360ccaf..744041ac425 100644 --- a/spec/features/dashboard/merge_requests_spec.rb +++ b/spec/features/dashboard/merge_requests_spec.rb @@ -44,36 +44,38 @@ feature 'Dashboard Merge Requests' do context 'merge requests exist' do let!(:assigned_merge_request) do - create(:merge_request, assignee: current_user, target_project: project, source_project: project) + create(:merge_request, + assignee: current_user, + source_project: project, + author: create(:user)) end let!(:assigned_merge_request_from_fork) do create(:merge_request, source_branch: 'markdown', assignee: current_user, - target_project: public_project, source_project: forked_project - ) + target_project: public_project, source_project: forked_project, + author: create(:user)) end let!(:authored_merge_request) do create(:merge_request, - source_branch: 'markdown', author: current_user, - target_project: project, source_project: project - ) + source_branch: 'markdown', + source_project: project, + author: current_user) end let!(:authored_merge_request_from_fork) do create(:merge_request, source_branch: 'feature_conflict', author: current_user, - target_project: public_project, source_project: forked_project - ) + target_project: public_project, source_project: forked_project) end let!(:other_merge_request) do create(:merge_request, source_branch: 'fix', - target_project: project, source_project: project - ) + source_project: project, + author: create(:user)) end before do diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb index 1dd7547a7fc..31862b2e8f4 100644 --- a/spec/features/expand_collapse_diffs_spec.rb +++ b/spec/features/expand_collapse_diffs_spec.rb @@ -112,13 +112,6 @@ feature 'Expand and collapse diffs', :js do wait_for_requests end - it 'makes a request to get the content' do - ajax_uris = evaluate_script('ajaxUris') - - expect(ajax_uris).not_to be_empty - expect(ajax_uris.first).to include('large_diff.md') - end - it 'shows the diff content' do expect(large_diff).to have_selector('.code') expect(large_diff).not_to have_selector('.nothing-here-block') diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb index cdf7aceb13c..450bc0ff8cf 100644 --- a/spec/features/groups/issues_spec.rb +++ b/spec/features/groups/issues_spec.rb @@ -3,40 +3,61 @@ require 'spec_helper' feature 'Group issues page' do include FilteredSearchHelpers - let(:path) { issues_group_path(group) } - let(:issuable) { create(:issue, project: project, title: "this is my created issuable")} + context 'with shared examples' do + let(:path) { issues_group_path(group) } + let(:issuable) { create(:issue, project: project, title: "this is my created issuable")} - include_examples 'project features apply to issuables', Issue + include_examples 'project features apply to issuables', Issue - context 'rss feed' do - let(:access_level) { ProjectFeature::ENABLED } + context 'rss feed' do + let(:access_level) { ProjectFeature::ENABLED } - context 'when signed in' do - let(:user) { user_in_group } + context 'when signed in' do + let(:user) { user_in_group } - it_behaves_like "it has an RSS button with current_user's RSS token" - it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" - end + it_behaves_like "it has an RSS button with current_user's RSS token" + it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" + end - context 'when signed out' do - let(:user) { nil } + context 'when signed out' do + let(:user) { nil } - it_behaves_like "it has an RSS button without an RSS token" - it_behaves_like "an autodiscoverable RSS feed without an RSS token" + it_behaves_like "it has an RSS button without an RSS token" + it_behaves_like "an autodiscoverable RSS feed without an RSS token" + end end - end - context 'assignee', :js do - let(:access_level) { ProjectFeature::ENABLED } - let(:user) { user_in_group } - let(:user2) { user_outside_group } - let(:path) { issues_group_path(group) } + context 'assignee', :js do + let(:access_level) { ProjectFeature::ENABLED } + let(:user) { user_in_group } + let(:user2) { user_outside_group } + let(:path) { issues_group_path(group) } + + it 'filters by only group users' do + filtered_search.set('assignee:') - it 'filters by only group users' do - filtered_search.set('assignee:') + expect(find('#js-dropdown-assignee .filter-dropdown')).to have_content(user.name) + expect(find('#js-dropdown-assignee .filter-dropdown')).not_to have_content(user2.name) + end + end + end - expect(find('#js-dropdown-assignee .filter-dropdown')).to have_content(user.name) - expect(find('#js-dropdown-assignee .filter-dropdown')).not_to have_content(user2.name) + context 'issues list', :nested_groups do + let(:group) { create(:group)} + let(:subgroup) { create(:group, parent: group) } + let(:project) { create(:project, :public, group: group)} + let(:subgroup_project) { create(:project, :public, group: subgroup)} + let!(:issue) { create(:issue, project: project, title: 'root group issue') } + let!(:subgroup_issue) { create(:issue, project: subgroup_project, title: 'subgroup issue') } + + it 'returns all group and subgroup issues' do + visit issues_group_path(group) + + page.within('.issuable-list') do + expect(page).to have_selector('li.issue', count: 2) + expect(page).to have_content('root group issue') + expect(page).to have_content('subgroup issue') + end end end end diff --git a/spec/features/merge_request/user_awards_emoji_spec.rb b/spec/features/merge_request/user_awards_emoji_spec.rb index 15a0878fb16..2f24cfbd9e3 100644 --- a/spec/features/merge_request/user_awards_emoji_spec.rb +++ b/spec/features/merge_request/user_awards_emoji_spec.rb @@ -3,7 +3,7 @@ require 'rails_helper' describe 'Merge request > User awards emoji', :js do let(:project) { create(:project, :public, :repository) } let(:user) { project.creator } - let(:merge_request) { create(:merge_request, source_project: project) } + let(:merge_request) { create(:merge_request, source_project: project, author: create(:user)) } describe 'logged in' do before do diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb index 61861d33952..19995559fae 100644 --- a/spec/features/merge_request/user_resolves_conflicts_spec.rb +++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb @@ -100,12 +100,12 @@ describe 'Merge request > User resolves conflicts', :js do end it 'shows a link to the conflict resolution page' do - expect(page).to have_link('conflicts', href: /\/conflicts\Z/) + expect(page).to have_link('conflicts', href: %r{/conflicts\Z}) end context 'in Inline view mode' do before do - click_link('conflicts', href: /\/conflicts\Z/) + click_link('conflicts', href: %r{/conflicts\Z}) end include_examples "conflicts are resolved in Interactive mode" @@ -114,7 +114,7 @@ describe 'Merge request > User resolves conflicts', :js do context 'in Parallel view mode' do before do - click_link('conflicts', href: /\/conflicts\Z/) + click_link('conflicts', href: %r{/conflicts\Z}) click_button 'Side-by-side' end @@ -128,7 +128,7 @@ describe 'Merge request > User resolves conflicts', :js do before do visit project_merge_request_path(project, merge_request) - click_link('conflicts', href: /\/conflicts\Z/) + click_link('conflicts', href: %r{/conflicts\Z}) end it 'conflicts can not be resolved in Interactive mode' do @@ -181,7 +181,7 @@ describe 'Merge request > User resolves conflicts', :js do end it 'does not show a link to the conflict resolution page' do - expect(page).not_to have_link('conflicts', href: /\/conflicts\Z/) + expect(page).not_to have_link('conflicts', href: %r{/conflicts\Z}) end it 'shows an error if the conflicts page is visited directly' do diff --git a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb index fb73ab05f87..dbca279569a 100644 --- a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb +++ b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb @@ -61,7 +61,7 @@ describe 'Merge request > User selects branches for new MR', :js do fill_in "merge_request_title", with: "Orphaned MR test" click_button "Submit merge request" - click_link "Check out branch" + click_button "Check out branch" expect(page).to have_content 'git checkout -b orphaned-branch origin/orphaned-branch' end diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb index 69e4c9f04a1..89d3bd24b89 100644 --- a/spec/features/projects/blobs/edit_spec.rb +++ b/spec/features/projects/blobs/edit_spec.rb @@ -17,12 +17,15 @@ feature 'Editing file blob', :js do sign_in(user) end - def edit_and_commit + def edit_and_commit(commit_changes: true) wait_for_requests find('.js-edit-blob').click find('#editor') execute_script('ace.edit("editor").setValue("class NextFeature\nend\n")') - click_button 'Commit changes' + + if commit_changes + click_button 'Commit changes' + end end context 'from MR diff' do @@ -39,13 +42,26 @@ feature 'Editing file blob', :js do context 'from blob file path' do before do visit project_blob_path(project, tree_join(branch, file_path)) - edit_and_commit end it 'updates content' do + edit_and_commit + expect(page).to have_content 'successfully committed' expect(page).to have_content 'NextFeature' end + + it 'previews content' do + edit_and_commit(commit_changes: false) + click_link 'Preview changes' + wait_for_requests + + old_line_count = page.all('.line_holder.old').size + new_line_count = page.all('.line_holder.new').size + + expect(old_line_count).to be > 0 + expect(new_line_count).to be > 0 + end end end diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb index 8953b30bebf..94bde723e2f 100644 --- a/spec/features/projects/clusters/gcp_spec.rb +++ b/spec/features/projects/clusters/gcp_spec.rb @@ -95,7 +95,7 @@ feature 'Gcp Cluster', :js do context 'when user disables the cluster' do before do - page.find(:css, '.js-toggle-cluster').click + page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click page.within('#cluster-integration') { click_button 'Save changes' } end diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb index a519b9f9c7e..b9ab434c259 100644 --- a/spec/features/projects/clusters/user_spec.rb +++ b/spec/features/projects/clusters/user_spec.rb @@ -62,7 +62,7 @@ feature 'User Cluster', :js do context 'when user disables the cluster' do before do - page.find(:css, '.js-toggle-cluster').click + page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click fill_in 'cluster_name', with: 'dev-cluster' page.within('#cluster-integration') { click_button 'Save changes' } end diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb index eae2910a8f6..497a50bebe4 100644 --- a/spec/features/projects/clusters_spec.rb +++ b/spec/features/projects/clusters_spec.rb @@ -37,13 +37,13 @@ feature 'Clusters', :js do context 'inline update of cluster' do it 'user can update cluster' do - expect(page).to have_selector('.js-toggle-cluster-list') + expect(page).to have_selector('.js-project-feature-toggle') end context 'with sucessfull request' do it 'user sees updated cluster' do expect do - page.find('.js-toggle-cluster-list').click + page.find('.js-project-feature-toggle').click wait_for_requests end.to change { cluster.reload.enabled } @@ -57,7 +57,7 @@ feature 'Clusters', :js do expect_any_instance_of(Clusters::UpdateService).to receive(:execute).and_call_original allow_any_instance_of(Clusters::Cluster).to receive(:valid?) { false } - page.find('.js-toggle-cluster-list').click + page.find('.js-project-feature-toggle').click expect(page).to have_content('Something went wrong on our end.') expect(page).to have_selector('.is-checked') diff --git a/spec/features/projects/members/share_with_group_spec.rb b/spec/features/projects/members/share_with_group_spec.rb index 3198798306c..4cf48098401 100644 --- a/spec/features/projects/members/share_with_group_spec.rb +++ b/spec/features/projects/members/share_with_group_spec.rb @@ -122,7 +122,7 @@ feature 'Project > Members > Share with Group', :js do select2 group.id, from: '#link_group_id' fill_in 'expires_at_groups', with: (Time.now + 4.5.days).strftime('%Y-%m-%d') - page.find('body').click + click_on 'share-with-group-tab' find('.btn-create').click end diff --git a/spec/finders/group_projects_finder_spec.rb b/spec/finders/group_projects_finder_spec.rb index 27a09d7c6f5..be80ee7d767 100644 --- a/spec/finders/group_projects_finder_spec.rb +++ b/spec/finders/group_projects_finder_spec.rb @@ -2,6 +2,7 @@ require 'spec_helper' describe GroupProjectsFinder do let(:group) { create(:group) } + let(:subgroup) { create(:group, parent: group) } let(:current_user) { create(:user) } let(:options) { {} } @@ -12,6 +13,8 @@ describe GroupProjectsFinder do let!(:shared_project_1) { create(:project, :public, path: '3') } let!(:shared_project_2) { create(:project, :private, path: '4') } let!(:shared_project_3) { create(:project, :internal, path: '5') } + let!(:subgroup_project) { create(:project, :public, path: '6', group: subgroup) } + let!(:subgroup_private_project) { create(:project, :private, path: '7', group: subgroup) } before do shared_project_1.project_group_links.create(group_access: Gitlab::Access::MASTER, group: group) @@ -35,11 +38,31 @@ describe GroupProjectsFinder do context "only owned" do let(:options) { { only_owned: true } } - it { is_expected.to match_array([private_project, public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([private_project, public_project, subgroup_project, subgroup_private_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to match_array([private_project, public_project]) } + end end context "all" do - it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, private_project, public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, private_project, public_project, subgroup_project, subgroup_private_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, private_project, public_project]) } + end end end @@ -71,9 +94,20 @@ describe GroupProjectsFinder do context "without external user" do before do private_project.add_master(current_user) + subgroup_private_project.add_master(current_user) end - it { is_expected.to match_array([private_project, public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([private_project, public_project, subgroup_project, subgroup_private_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to match_array([private_project, public_project]) } + end end context "with external user" do @@ -81,12 +115,32 @@ describe GroupProjectsFinder do current_user.update_attributes(external: true) end - it { is_expected.to eq([public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([public_project, subgroup_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to eq([public_project]) } + end end end context "all" do - it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, public_project, subgroup_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to match_array([shared_project_3, shared_project_2, shared_project_1, public_project]) } + end end end @@ -100,7 +154,17 @@ describe GroupProjectsFinder do context "only owned" do let(:options) { { only_owned: true } } - it { is_expected.to eq([public_project]) } + context 'with subgroups projects', :nested_groups do + before do + options[:include_subgroups] = true + end + + it { is_expected.to match_array([public_project, subgroup_project]) } + end + + context 'without subgroups projects' do + it { is_expected.to eq([public_project]) } + end end end end diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb index 47fd98234f9..abb7631d7d7 100644 --- a/spec/finders/issues_finder_spec.rb +++ b/spec/finders/issues_finder_spec.rb @@ -3,13 +3,17 @@ require 'spec_helper' describe IssuesFinder do set(:user) { create(:user) } set(:user2) { create(:user) } - set(:project1) { create(:project) } + set(:group) { create(:group) } + set(:subgroup) { create(:group, parent: group) } + set(:project1) { create(:project, group: group) } set(:project2) { create(:project) } + set(:project3) { create(:project, group: subgroup) } set(:milestone) { create(:milestone, project: project1) } set(:label) { create(:label, project: project2) } set(:issue1) { create(:issue, author: user, assignees: [user], project: project1, milestone: milestone, title: 'gitlab', created_at: 1.week.ago) } set(:issue2) { create(:issue, author: user, assignees: [user], project: project2, description: 'gitlab') } set(:issue3) { create(:issue, author: user2, assignees: [user2], project: project2, title: 'tanuki', description: 'tanuki', created_at: 1.week.from_now) } + set(:issue4) { create(:issue, project: project3) } set(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: issue1) } set(:award_emoji2) { create(:award_emoji, name: 'thumbsup', user: user2, awardable: issue2) } set(:award_emoji3) { create(:award_emoji, name: 'thumbsdown', user: user, awardable: issue3) } @@ -25,10 +29,12 @@ describe IssuesFinder do project1.add_master(user) project2.add_developer(user) project2.add_developer(user2) + project3.add_developer(user) issue1 issue2 issue3 + issue4 award_emoji1 award_emoji2 @@ -39,7 +45,7 @@ describe IssuesFinder do let(:scope) { 'all' } it 'returns all issues' do - expect(issues).to contain_exactly(issue1, issue2, issue3) + expect(issues).to contain_exactly(issue1, issue2, issue3, issue4) end context 'filtering by assignee ID' do @@ -50,6 +56,26 @@ describe IssuesFinder do end end + context 'filtering by group_id' do + let(:params) { { group_id: group.id } } + + context 'when include_subgroup param not set' do + it 'returns all group issues' do + expect(issues).to contain_exactly(issue1) + end + end + + context 'when include_subgroup param is true', :nested_groups do + before do + params[:include_subgroups] = true + end + + it 'returns all group and subgroup issues' do + expect(issues).to contain_exactly(issue1, issue4) + end + end + end + context 'filtering by author ID' do let(:params) { { author_id: user2.id } } @@ -87,7 +113,7 @@ describe IssuesFinder do let(:params) { { milestone_title: Milestone::None.title } } it 'returns issues with no milestone' do - expect(issues).to contain_exactly(issue2, issue3) + expect(issues).to contain_exactly(issue2, issue3, issue4) end end @@ -185,7 +211,7 @@ describe IssuesFinder do let(:params) { { label_name: Label::None.title } } it 'returns issues with no labels' do - expect(issues).to contain_exactly(issue1, issue3) + expect(issues).to contain_exactly(issue1, issue3, issue4) end end @@ -210,7 +236,7 @@ describe IssuesFinder do let(:params) { { state: 'opened' } } it 'returns only opened issues' do - expect(issues).to contain_exactly(issue1, issue2, issue3) + expect(issues).to contain_exactly(issue1, issue2, issue3, issue4) end end @@ -226,7 +252,7 @@ describe IssuesFinder do let(:params) { { state: 'all' } } it 'returns all issues' do - expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue) + expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue, issue4) end end @@ -234,7 +260,7 @@ describe IssuesFinder do let(:params) { { state: 'invalid_state' } } it 'returns all issues' do - expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue) + expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue, issue4) end end end @@ -338,7 +364,7 @@ describe IssuesFinder do end it "doesn't return issues if feature disabled" do - [project1, project2].each do |project| + [project1, project2, project3].each do |project| project.project_feature.update!(issues_access_level: ProjectFeature::DISABLED) end @@ -351,7 +377,7 @@ describe IssuesFinder do it 'returns the number of rows for the default state' do finder = described_class.new(user) - expect(finder.row_count).to eq(3) + expect(finder.row_count).to eq(4) end it 'returns the number of rows for a given state' do diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb index 687ffaec7cc..9385c892c9e 100644 --- a/spec/finders/merge_requests_finder_spec.rb +++ b/spec/finders/merge_requests_finder_spec.rb @@ -6,31 +6,36 @@ describe MergeRequestsFinder do let(:user) { create :user } let(:user2) { create :user } - let(:project1) { create(:project, :public) } + let(:group) { create(:group) } + let(:subgroup) { create(:group, parent: group) } + let(:project1) { create(:project, :public, group: group) } let(:project2) { fork_project(project1, user) } let(:project3) do p = fork_project(project1, user) p.update!(archived: true) p end + let(:project4) { create(:project, :public, group: subgroup) } let!(:merge_request1) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1) } let!(:merge_request2) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1, state: 'closed') } let!(:merge_request3) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project2) } let!(:merge_request4) { create(:merge_request, :simple, author: user, source_project: project3, target_project: project3) } + let!(:merge_request5) { create(:merge_request, :simple, author: user, source_project: project4, target_project: project4) } before do project1.add_master(user) project2.add_developer(user) project3.add_developer(user) project2.add_developer(user2) + project4.add_developer(user) end describe "#execute" do it 'filters by scope' do params = { scope: 'authored', state: 'opened' } merge_requests = described_class.new(user, params).execute - expect(merge_requests.size).to eq(3) + expect(merge_requests.size).to eq(4) end it 'filters by project' do @@ -39,10 +44,26 @@ describe MergeRequestsFinder do expect(merge_requests.size).to eq(1) end + it 'filters by group' do + params = { group_id: group.id } + + merge_requests = described_class.new(user, params).execute + + expect(merge_requests.size).to eq(2) + end + + it 'filters by group including subgroups', :nested_groups do + params = { group_id: group.id, include_subgroups: true } + + merge_requests = described_class.new(user, params).execute + + expect(merge_requests.size).to eq(3) + end + it 'filters by non_archived' do params = { non_archived: true } merge_requests = described_class.new(user, params).execute - expect(merge_requests.size).to eq(3) + expect(merge_requests.size).to eq(4) end it 'filters by iid' do @@ -73,14 +94,14 @@ describe MergeRequestsFinder do end context 'with created_after and created_before params' do - let(:project4) { create(:project, forked_from_project: project1) } + let(:new_project) { create(:project, forked_from_project: project1) } let!(:new_merge_request) do create(:merge_request, :simple, author: user, created_at: 1.week.from_now, - source_project: project4, + source_project: new_project, target_project: project1) end @@ -89,12 +110,12 @@ describe MergeRequestsFinder do :simple, author: user, created_at: 1.week.ago, - source_project: project4, - target_project: project4) + source_project: new_project, + target_project: new_project) end before do - project4.add_master(user) + new_project.add_master(user) end it 'filters by created_after' do @@ -106,7 +127,7 @@ describe MergeRequestsFinder do end it 'filters by created_before' do - params = { project_id: project4.id, created_before: old_merge_request.created_at + 1.second } + params = { project_id: new_project.id, created_before: old_merge_request.created_at + 1.second } merge_requests = described_class.new(user, params).execute @@ -119,7 +140,7 @@ describe MergeRequestsFinder do it 'returns the number of rows for the default state' do finder = described_class.new(user) - expect(finder.row_count).to eq(3) + expect(finder.row_count).to eq(4) end it 'returns the number of rows for a given state' do diff --git a/spec/fixtures/api/schemas/public_api/v4/user/basic.json b/spec/fixtures/api/schemas/public_api/v4/user/basic.json index bf330d8278c..2d815be32a6 100644 --- a/spec/fixtures/api/schemas/public_api/v4/user/basic.json +++ b/spec/fixtures/api/schemas/public_api/v4/user/basic.json @@ -2,12 +2,16 @@ "type": ["object", "null"], "required": [ "id", + "name", + "username", "state", "avatar_url", "web_url" ], "properties": { "id": { "type": "integer" }, + "name": { "type": "string" }, + "username": { "type": "string" }, "state": { "type": "string" }, "avatar_url": { "type": "string" }, "web_url": { "type": "string" } diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb index da0343588ef..f7a4a7afced 100644 --- a/spec/helpers/application_helper_spec.rb +++ b/spec/helpers/application_helper_spec.rb @@ -100,7 +100,7 @@ describe ApplicationHelper do end it 'returns a generic avatar' do - expect(helper.gravatar_icon(user_email)).to match('no_avatar.png') + expect(helper.gravatar_icon(user_email)).to match_asset_path('no_avatar.png') end end @@ -110,7 +110,7 @@ describe ApplicationHelper do end it 'returns a generic avatar when email is blank' do - expect(helper.gravatar_icon('')).to match('no_avatar.png') + expect(helper.gravatar_icon('')).to match_asset_path('no_avatar.png') end it 'returns a valid Gravatar URL' do diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb index 32432ee1e81..5f608fe18d9 100644 --- a/spec/helpers/groups_helper_spec.rb +++ b/spec/helpers/groups_helper_spec.rb @@ -105,7 +105,7 @@ describe GroupsHelper do it 'outputs the groups in the correct order' do expect(helper.group_title(very_deep_nested_group)) - .to match(/<li style="text-indent: 16px;"><a.*>#{deep_nested_group.name}.*<\/li>.*<a.*>#{very_deep_nested_group.name}<\/a>/m) + .to match(%r{<li style="text-indent: 16px;"><a.*>#{deep_nested_group.name}.*</li>.*<a.*>#{very_deep_nested_group.name}</a>}m) end end @@ -120,7 +120,7 @@ describe GroupsHelper do let(:possible_help_texts) do { default_help: "This setting will be applied to all subgroups unless overridden by a group owner", - ancestor_locked_but_you_can_override: /This setting is applied on <a .+>.+<\/a>\. You can override the setting or .+/, + ancestor_locked_but_you_can_override: %r{This setting is applied on <a .+>.+</a>\. You can override the setting or .+}, ancestor_locked_so_ask_the_owner: /This setting is applied on .+\. To share projects in this group with another group, ask the owner to override the setting or remove the share with group lock from .+/, ancestor_locked_and_has_been_overridden: /This setting is applied on .+ and has been overridden on this subgroup/ } diff --git a/spec/helpers/labels_helper_spec.rb b/spec/helpers/labels_helper_spec.rb index 0286d36952c..619baa78bfa 100644 --- a/spec/helpers/labels_helper_spec.rb +++ b/spec/helpers/labels_helper_spec.rb @@ -104,7 +104,7 @@ describe LabelsHelper do context 'with a tooltip argument' do context 'set to false' do it 'does not include the has-tooltip class' do - expect(link_to_label(label, tooltip: false)).not_to match %r{has-tooltip} + expect(link_to_label(label, tooltip: false)).not_to match /has-tooltip/ end end end diff --git a/spec/helpers/version_check_helper_spec.rb b/spec/helpers/version_check_helper_spec.rb index fa8cfda3b86..b5b15726816 100644 --- a/spec/helpers/version_check_helper_spec.rb +++ b/spec/helpers/version_check_helper_spec.rb @@ -27,7 +27,7 @@ describe VersionCheckHelper do end it 'should have a VersionCheck url as the src' do - expect(@image_tag).to match(/src="https:\/\/version\.host\.com\/check\.svg\?gitlab_info=xxx"/) + expect(@image_tag).to match(%r{src="https://version\.host\.com/check\.svg\?gitlab_info=xxx"}) end end end diff --git a/spec/initializers/grape_route_helpers_fix_spec.rb b/spec/initializers/grape_route_helpers_fix_spec.rb new file mode 100644 index 00000000000..2cf5924128f --- /dev/null +++ b/spec/initializers/grape_route_helpers_fix_spec.rb @@ -0,0 +1,14 @@ +require 'spec_helper' +require_relative '../../config/initializers/grape_route_helpers_fix' + +describe 'route shadowing' do + include GrapeRouteHelpers::NamedRouteMatcher + + it 'does not occur' do + path = api_v4_projects_merge_requests_path(id: 1) + expect(path).to eq('/api/v4/projects/1/merge_requests') + + path = api_v4_projects_merge_requests_path(id: 1, merge_request_iid: 3) + expect(path).to eq('/api/v4/projects/1/merge_requests/3') + end +end diff --git a/spec/javascripts/api_spec.js b/spec/javascripts/api_spec.js index cc5fa42aafe..cf3a76d0d2e 100644 --- a/spec/javascripts/api_spec.js +++ b/spec/javascripts/api_spec.js @@ -1,3 +1,5 @@ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import Api from '~/api'; describe('Api', () => { @@ -7,20 +9,17 @@ describe('Api', () => { api_version: dummyApiVersion, relative_url_root: dummyUrlRoot, }; - const dummyResponse = 'hello from outer space!'; - const sendDummyResponse = () => { - const deferred = $.Deferred(); - deferred.resolve(dummyResponse); - return deferred.promise(); - }; let originalGon; + let mock; beforeEach(() => { + mock = new MockAdapter(axios); originalGon = window.gon; window.gon = Object.assign({}, dummyGon); }); afterEach(() => { + mock.restore(); window.gon = originalGon; }); @@ -38,15 +37,13 @@ describe('Api', () => { describe('group', () => { it('fetches a group', (done) => { const groupId = '123456'; - const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}.json`; - spyOn(jQuery, 'ajax').and.callFake((request) => { - expect(request.url).toEqual(expectedUrl); - expect(request.dataType).toEqual('json'); - return sendDummyResponse(); + const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}`; + mock.onGet(expectedUrl).reply(200, { + name: 'test', }); Api.group(groupId, (response) => { - expect(response).toBe(dummyResponse); + expect(response.name).toBe('test'); done(); }); }); @@ -57,19 +54,13 @@ describe('Api', () => { const query = 'dummy query'; const options = { unused: 'option' }; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups.json`; - const expectedData = Object.assign({ - search: query, - per_page: 20, - }, options); - spyOn(jQuery, 'ajax').and.callFake((request) => { - expect(request.url).toEqual(expectedUrl); - expect(request.dataType).toEqual('json'); - expect(request.data).toEqual(expectedData); - return sendDummyResponse(); - }); + mock.onGet(expectedUrl).reply(200, [{ + name: 'test', + }]); Api.groups(query, options, (response) => { - expect(response).toBe(dummyResponse); + expect(response.length).toBe(1); + expect(response[0].name).toBe('test'); done(); }); }); @@ -79,19 +70,13 @@ describe('Api', () => { it('fetches namespaces', (done) => { const query = 'dummy query'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/namespaces.json`; - const expectedData = { - search: query, - per_page: 20, - }; - spyOn(jQuery, 'ajax').and.callFake((request) => { - expect(request.url).toEqual(expectedUrl); - expect(request.dataType).toEqual('json'); - expect(request.data).toEqual(expectedData); - return sendDummyResponse(); - }); + mock.onGet(expectedUrl).reply(200, [{ + name: 'test', + }]); Api.namespaces(query, (response) => { - expect(response).toBe(dummyResponse); + expect(response.length).toBe(1); + expect(response[0].name).toBe('test'); done(); }); }); @@ -103,21 +88,13 @@ describe('Api', () => { const options = { unused: 'option' }; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`; window.gon.current_user_id = 1; - const expectedData = Object.assign({ - search: query, - per_page: 20, - membership: true, - simple: true, - }, options); - spyOn(jQuery, 'ajax').and.callFake((request) => { - expect(request.url).toEqual(expectedUrl); - expect(request.dataType).toEqual('json'); - expect(request.data).toEqual(expectedData); - return sendDummyResponse(); - }); + mock.onGet(expectedUrl).reply(200, [{ + name: 'test', + }]); Api.projects(query, options, (response) => { - expect(response).toBe(dummyResponse); + expect(response.length).toBe(1); + expect(response[0].name).toBe('test'); done(); }); }); @@ -126,20 +103,13 @@ describe('Api', () => { const query = 'dummy query'; const options = { unused: 'option' }; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`; - const expectedData = Object.assign({ - search: query, - per_page: 20, - simple: true, - }, options); - spyOn(jQuery, 'ajax').and.callFake((request) => { - expect(request.url).toEqual(expectedUrl); - expect(request.dataType).toEqual('json'); - expect(request.data).toEqual(expectedData); - return sendDummyResponse(); - }); + mock.onGet(expectedUrl).reply(200, [{ + name: 'test', + }]); Api.projects(query, options, (response) => { - expect(response).toBe(dummyResponse); + expect(response.length).toBe(1); + expect(response[0].name).toBe('test'); done(); }); }); @@ -154,16 +124,16 @@ describe('Api', () => { const expectedData = { label: labelData, }; - spyOn(jQuery, 'ajax').and.callFake((request) => { - expect(request.url).toEqual(expectedUrl); - expect(request.dataType).toEqual('json'); - expect(request.type).toEqual('POST'); - expect(request.data).toEqual(expectedData); - return sendDummyResponse(); + mock.onPost(expectedUrl).reply((config) => { + expect(config.data).toBe(JSON.stringify(expectedData)); + + return [200, { + name: 'test', + }]; }); Api.newLabel(namespace, project, labelData, (response) => { - expect(response).toBe(dummyResponse); + expect(response.name).toBe('test'); done(); }); }); @@ -174,19 +144,13 @@ describe('Api', () => { const groupId = '123456'; const query = 'dummy query'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`; - const expectedData = { - search: query, - per_page: 20, - }; - spyOn(jQuery, 'ajax').and.callFake((request) => { - expect(request.url).toEqual(expectedUrl); - expect(request.dataType).toEqual('json'); - expect(request.data).toEqual(expectedData); - return sendDummyResponse(); - }); + mock.onGet(expectedUrl).reply(200, [{ + name: 'test', + }]); Api.groupProjects(groupId, query, (response) => { - expect(response).toBe(dummyResponse); + expect(response.length).toBe(1); + expect(response[0].name).toBe('test'); done(); }); }); @@ -197,14 +161,10 @@ describe('Api', () => { const licenseKey = "driver's license"; const data = { unused: 'option' }; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/licenses/${licenseKey}`; - spyOn(jQuery, 'ajax').and.callFake((request) => { - expect(request.url).toEqual(expectedUrl); - expect(request.data).toEqual(data); - return sendDummyResponse(); - }); + mock.onGet(expectedUrl).reply(200, 'test'); Api.licenseText(licenseKey, data, (response) => { - expect(response).toBe(dummyResponse); + expect(response).toBe('test'); done(); }); }); @@ -214,13 +174,10 @@ describe('Api', () => { it('fetches a gitignore text', (done) => { const gitignoreKey = 'ignore git'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitignores/${gitignoreKey}`; - spyOn(jQuery, 'get').and.callFake((url, callback) => { - expect(url).toEqual(expectedUrl); - callback(dummyResponse); - }); + mock.onGet(expectedUrl).reply(200, 'test'); Api.gitignoreText(gitignoreKey, (response) => { - expect(response).toBe(dummyResponse); + expect(response).toBe('test'); done(); }); }); @@ -230,13 +187,10 @@ describe('Api', () => { it('fetches a .gitlab-ci.yml', (done) => { const gitlabCiYmlKey = 'Y CI ML'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitlab_ci_ymls/${gitlabCiYmlKey}`; - spyOn(jQuery, 'get').and.callFake((url, callback) => { - expect(url).toEqual(expectedUrl); - callback(dummyResponse); - }); + mock.onGet(expectedUrl).reply(200, 'test'); Api.gitlabCiYml(gitlabCiYmlKey, (response) => { - expect(response).toBe(dummyResponse); + expect(response).toBe('test'); done(); }); }); @@ -246,13 +200,10 @@ describe('Api', () => { it('fetches a Dockerfile', (done) => { const dockerfileYmlKey = 'a giant whale'; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/dockerfiles/${dockerfileYmlKey}`; - spyOn(jQuery, 'get').and.callFake((url, callback) => { - expect(url).toEqual(expectedUrl); - callback(dummyResponse); - }); + mock.onGet(expectedUrl).reply(200, 'test'); Api.dockerfileYml(dockerfileYmlKey, (response) => { - expect(response).toBe(dummyResponse); + expect(response).toBe('test'); done(); }); }); @@ -265,14 +216,10 @@ describe('Api', () => { const templateKey = ' template #%?.key '; const templateType = 'template type'; const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/templates/${templateType}/${encodeURIComponent(templateKey)}`; - spyOn(jQuery, 'ajax').and.callFake((request) => { - expect(request.url).toEqual(expectedUrl); - return sendDummyResponse(); - }); + mock.onGet(expectedUrl).reply(200, 'test'); Api.issueTemplate(namespace, project, templateKey, templateType, (error, response) => { - expect(error).toBe(null); - expect(response).toBe(dummyResponse); + expect(response).toBe('test'); done(); }); }); @@ -283,20 +230,14 @@ describe('Api', () => { const query = 'dummy query'; const options = { unused: 'option' }; const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users.json`; - const expectedData = Object.assign({ - search: query, - per_page: 20, - }, options); - spyOn(jQuery, 'ajax').and.callFake((request) => { - expect(request.url).toEqual(expectedUrl); - expect(request.dataType).toEqual('json'); - expect(request.data).toEqual(expectedData); - return sendDummyResponse(); - }); + mock.onGet(expectedUrl).reply(200, [{ + name: 'test', + }]); Api.users(query, options) - .then((response) => { - expect(response).toBe(dummyResponse); + .then(({ data }) => { + expect(data.length).toBe(1); + expect(data[0].name).toBe('test'); }) .then(done) .catch(done.fail); diff --git a/spec/javascripts/blob/viewer/index_spec.js b/spec/javascripts/blob/viewer/index_spec.js index cfa6650d85f..892411a6a40 100644 --- a/spec/javascripts/blob/viewer/index_spec.js +++ b/spec/javascripts/blob/viewer/index_spec.js @@ -1,28 +1,35 @@ /* eslint-disable no-new */ +import MockAdapter from 'axios-mock-adapter'; import BlobViewer from '~/blob/viewer/index'; +import axios from '~/lib/utils/axios_utils'; describe('Blob viewer', () => { let blob; + let mock; + preloadFixtures('snippets/show.html.raw'); beforeEach(() => { + mock = new MockAdapter(axios); + loadFixtures('snippets/show.html.raw'); $('#modal-upload-blob').remove(); blob = new BlobViewer(); - spyOn($, 'ajax').and.callFake(() => { - const d = $.Deferred(); - - d.resolve({ - html: '<div>testing</div>', - }); + mock.onGet('http://test.host/snippets/1.json?viewer=rich').reply(200, { + html: '<div>testing</div>', + }); - return d.promise(); + mock.onGet('http://test.host/snippets/1.json?viewer=simple').reply(200, { + html: '<div>testing</div>', }); + + spyOn(axios, 'get').and.callThrough(); }); afterEach(() => { + mock.restore(); location.hash = ''; }); @@ -30,7 +37,6 @@ describe('Blob viewer', () => { document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click(); setTimeout(() => { - expect($.ajax).toHaveBeenCalled(); expect( document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]') .classList.contains('hidden'), @@ -46,7 +52,6 @@ describe('Blob viewer', () => { new BlobViewer(); setTimeout(() => { - expect($.ajax).toHaveBeenCalled(); expect( document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]') .classList.contains('hidden'), @@ -64,12 +69,8 @@ describe('Blob viewer', () => { }); asyncClick() + .then(() => asyncClick()) .then(() => { - expect($.ajax).toHaveBeenCalled(); - return asyncClick(); - }) - .then(() => { - expect($.ajax.calls.count()).toBe(1); expect( document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'), ).toBe('true'); @@ -122,7 +123,6 @@ describe('Blob viewer', () => { document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click(); setTimeout(() => { - expect($.ajax).toHaveBeenCalled(); expect( copyButton.classList.contains('disabled'), ).toBeFalsy(); @@ -135,8 +135,6 @@ describe('Blob viewer', () => { document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click(); setTimeout(() => { - expect($.ajax).toHaveBeenCalled(); - expect( copyButton.getAttribute('data-original-title'), ).toBe('Copy source to clipboard'); @@ -171,14 +169,14 @@ describe('Blob viewer', () => { it('sends AJAX request when switching to simple view', () => { blob.switchToViewer('simple'); - expect($.ajax).toHaveBeenCalled(); + expect(axios.get).toHaveBeenCalled(); }); it('does not send AJAX request when switching to rich view', () => { blob.switchToViewer('simple'); blob.switchToViewer('rich'); - expect($.ajax.calls.count()).toBe(1); + expect(axios.get.calls.count()).toBe(1); }); }); }); diff --git a/spec/javascripts/clusters/clusters_bundle_spec.js b/spec/javascripts/clusters/clusters_bundle_spec.js index f5be9ea0fb2..7b38f6b7855 100644 --- a/spec/javascripts/clusters/clusters_bundle_spec.js +++ b/spec/javascripts/clusters/clusters_bundle_spec.js @@ -23,16 +23,24 @@ describe('Clusters', () => { }); describe('toggle', () => { - it('should update the button and the input field on click', () => { - cluster.toggleButton.click(); + it('should update the button and the input field on click', (done) => { + const toggleButton = document.querySelector('.js-cluster-enable-toggle-area .js-project-feature-toggle'); + const toggleInput = document.querySelector('.js-cluster-enable-toggle-area .js-project-feature-toggle-input'); - expect( - cluster.toggleButton.classList, - ).not.toContain('is-checked'); + toggleButton.click(); - expect( - cluster.toggleInput.getAttribute('value'), - ).toEqual('false'); + getSetTimeoutPromise() + .then(() => { + expect( + toggleButton.classList, + ).not.toContain('is-checked'); + + expect( + toggleInput.getAttribute('value'), + ).toEqual('false'); + }) + .then(done) + .catch(done.fail); }); }); diff --git a/spec/javascripts/clusters/clusters_index_spec.js b/spec/javascripts/clusters/clusters_index_spec.js deleted file mode 100644 index 0a8b63ed5b4..00000000000 --- a/spec/javascripts/clusters/clusters_index_spec.js +++ /dev/null @@ -1,58 +0,0 @@ -import MockAdapter from 'axios-mock-adapter'; -import axios from '~/lib/utils/axios_utils'; -import setClusterTableToggles from '~/clusters/clusters_index'; -import { setTimeout } from 'core-js/library/web/timers'; - -describe('Clusters table', () => { - preloadFixtures('clusters/index_cluster.html.raw'); - let mock; - - beforeEach(() => { - loadFixtures('clusters/index_cluster.html.raw'); - mock = new MockAdapter(axios); - setClusterTableToggles(); - }); - - describe('update cluster', () => { - it('renders loading state while request is made', () => { - const button = document.querySelector('.js-toggle-cluster-list'); - - button.click(); - - expect(button.classList).toContain('is-loading'); - expect(button.getAttribute('disabled')).toEqual('true'); - }); - - afterEach(() => { - mock.restore(); - }); - - it('shows updated state after sucessfull request', (done) => { - mock.onPut().reply(200, {}, {}); - const button = document.querySelector('.js-toggle-cluster-list'); - button.click(); - - expect(button.classList).toContain('is-loading'); - - setTimeout(() => { - expect(button.classList).not.toContain('is-loading'); - expect(button.classList).not.toContain('is-checked'); - done(); - }, 0); - }); - - it('shows inital state after failed request', (done) => { - mock.onPut().reply(500, {}, {}); - const button = document.querySelector('.js-toggle-cluster-list'); - - button.click(); - expect(button.classList).toContain('is-loading'); - - setTimeout(() => { - expect(button.classList).not.toContain('is-loading'); - expect(button.classList).toContain('is-checked'); - done(); - }, 0); - }); - }); -}); diff --git a/spec/javascripts/commit/commit_pipeline_status_component_spec.js b/spec/javascripts/commit/commit_pipeline_status_component_spec.js new file mode 100644 index 00000000000..90f290e845e --- /dev/null +++ b/spec/javascripts/commit/commit_pipeline_status_component_spec.js @@ -0,0 +1,104 @@ +import Vue from 'vue'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; +import commitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue'; +import mountComponent from '../helpers/vue_mount_component_helper'; + +describe('Commit pipeline status component', () => { + let vm; + let Component; + let mock; + const mockCiStatus = { + details_path: '/root/hello-world/pipelines/1', + favicon: 'canceled.ico', + group: 'canceled', + has_details: true, + icon: 'status_canceled', + label: 'canceled', + text: 'canceled', + }; + + beforeEach(() => { + Component = Vue.extend(commitPipelineStatus); + }); + + describe('While polling pipeline data succesfully', () => { + beforeEach(() => { + mock = new MockAdapter(axios); + mock.onGet('/dummy/endpoint').reply(() => { + const res = Promise.resolve([200, { + pipelines: [ + { + details: { + status: mockCiStatus, + }, + }, + ], + }]); + return res; + }); + vm = mountComponent(Component, { + endpoint: '/dummy/endpoint', + }); + }); + + afterEach(() => { + vm.poll.stop(); + vm.$destroy(); + mock.restore(); + }); + + it('shows the loading icon when polling is starting', (done) => { + expect(vm.$el.querySelector('.loading-container')).not.toBe(null); + setTimeout(() => { + expect(vm.$el.querySelector('.loading-container')).toBe(null); + done(); + }); + }); + + it('contains a ciStatus when the polling is succesful ', (done) => { + setTimeout(() => { + expect(vm.ciStatus).toEqual(mockCiStatus); + done(); + }); + }); + + it('contains a ci-status icon when polling is succesful', (done) => { + setTimeout(() => { + expect(vm.$el.querySelector('.ci-status-icon')).not.toBe(null); + expect(vm.$el.querySelector('.ci-status-icon').classList).toContain(`ci-status-icon-${mockCiStatus.group}`); + done(); + }); + }); + }); + + describe('When polling data was not succesful', () => { + beforeEach(() => { + mock = new MockAdapter(axios); + mock.onGet('/dummy/endpoint').reply(() => { + const res = Promise.reject([502, { }]); + return res; + }); + vm = new Component({ + props: { + endpoint: '/dummy/endpoint', + }, + }); + }); + + afterEach(() => { + vm.poll.stop(); + vm.$destroy(); + mock.restore(); + }); + + it('calls an errorCallback', (done) => { + spyOn(vm, 'errorCallback').and.callThrough(); + vm.$mount(); + setTimeout(() => { + expect(vm.errorCallback.calls.count()).toEqual(1); + done(); + }); + }); + }); +}); diff --git a/spec/javascripts/commits_spec.js b/spec/javascripts/commits_spec.js index d0176520440..44ec9e4eabf 100644 --- a/spec/javascripts/commits_spec.js +++ b/spec/javascripts/commits_spec.js @@ -1,4 +1,6 @@ import 'vendor/jquery.endless-scroll'; +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import CommitsList from '~/commits'; describe('Commits List', () => { @@ -43,30 +45,47 @@ describe('Commits List', () => { describe('on entering input', () => { let ajaxSpy; + let mock; beforeEach(() => { CommitsList.init(25); CommitsList.searchField.val(''); spyOn(history, 'replaceState').and.stub(); - ajaxSpy = spyOn(jQuery, 'ajax').and.callFake((req) => { - req.success({ - data: '<li>Result</li>', - }); + mock = new MockAdapter(axios); + + mock.onGet('/h5bp/html5-boilerplate/commits/master').reply(200, { + html: '<li>Result</li>', }); + + ajaxSpy = spyOn(axios, 'get').and.callThrough(); + }); + + afterEach(() => { + mock.restore(); }); - it('should save the last search string', () => { + it('should save the last search string', (done) => { CommitsList.searchField.val('GitLab'); - CommitsList.filterResults(); - expect(ajaxSpy).toHaveBeenCalled(); - expect(CommitsList.lastSearch).toEqual('GitLab'); + CommitsList.filterResults() + .then(() => { + expect(ajaxSpy).toHaveBeenCalled(); + expect(CommitsList.lastSearch).toEqual('GitLab'); + + done(); + }) + .catch(done.fail); }); - it('should not make ajax call if the input does not change', () => { - CommitsList.filterResults(); - expect(ajaxSpy).not.toHaveBeenCalled(); - expect(CommitsList.lastSearch).toEqual(''); + it('should not make ajax call if the input does not change', (done) => { + CommitsList.filterResults() + .then(() => { + expect(ajaxSpy).not.toHaveBeenCalled(); + expect(CommitsList.lastSearch).toEqual(''); + + done(); + }) + .catch(done.fail); }); }); }); diff --git a/spec/javascripts/create_item_dropdown_spec.js b/spec/javascripts/create_item_dropdown_spec.js index c8b00a4f553..143137c23ec 100644 --- a/spec/javascripts/create_item_dropdown_spec.js +++ b/spec/javascripts/create_item_dropdown_spec.js @@ -18,54 +18,67 @@ describe('CreateItemDropdown', () => { preloadFixtures('static/create_item_dropdown.html.raw'); let $wrapperEl; + let createItemDropdown; + + function createItemAndClearInput(text) { + // Filter for the new item + $wrapperEl.find('.dropdown-input-field') + .val(text) + .trigger('input'); + + // Create the new item + const $createButton = $wrapperEl.find('.js-dropdown-create-new-item'); + $createButton.click(); + + // Clear out the filter + $wrapperEl.find('.dropdown-input-field') + .val('') + .trigger('input'); + } beforeEach(() => { loadFixtures('static/create_item_dropdown.html.raw'); $wrapperEl = $('.js-create-item-dropdown-fixture-root'); - - // eslint-disable-next-line no-new - new CreateItemDropdown({ - $dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'), - defaultToggleLabel: 'All variables', - fieldName: 'variable[environment]', - getData: (term, callback) => { - callback(DROPDOWN_ITEM_DATA); - }, - }); }); afterEach(() => { $wrapperEl.remove(); }); - it('should have a dropdown item for each piece of data', () => { - // Get the data in the dropdown - $('.js-dropdown-menu-toggle').click(); + describe('items', () => { + beforeEach(() => { + createItemDropdown = new CreateItemDropdown({ + $dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'), + defaultToggleLabel: 'All variables', + fieldName: 'variable[environment]', + getData: (term, callback) => { + callback(DROPDOWN_ITEM_DATA); + }, + }); + }); + + it('should have a dropdown item for each piece of data', () => { + // Get the data in the dropdown + $('.js-dropdown-menu-toggle').click(); - const $itemEls = $wrapperEl.find('.js-dropdown-content a'); - expect($itemEls.length).toEqual(DROPDOWN_ITEM_DATA.length); + const $itemEls = $wrapperEl.find('.js-dropdown-content a'); + expect($itemEls.length).toEqual(DROPDOWN_ITEM_DATA.length); + }); }); describe('created items', () => { const NEW_ITEM_TEXT = 'foobarbaz'; - function createItemAndClearInput(text) { - // Filter for the new item - $wrapperEl.find('.dropdown-input-field') - .val(text) - .trigger('input'); - - // Create the new item - const $createButton = $wrapperEl.find('.js-dropdown-create-new-item'); - $createButton.click(); - - // Clear out the filter - $wrapperEl.find('.dropdown-input-field') - .val('') - .trigger('input'); - } - beforeEach(() => { + createItemDropdown = new CreateItemDropdown({ + $dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'), + defaultToggleLabel: 'All variables', + fieldName: 'variable[environment]', + getData: (term, callback) => { + callback(DROPDOWN_ITEM_DATA); + }, + }); + // Open the dropdown $('.js-dropdown-menu-toggle').click(); @@ -103,4 +116,68 @@ describe('CreateItemDropdown', () => { expect($itemEls.length).toEqual(DROPDOWN_ITEM_DATA.length); }); }); + + describe('clearDropdown()', () => { + beforeEach(() => { + createItemDropdown = new CreateItemDropdown({ + $dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'), + defaultToggleLabel: 'All variables', + fieldName: 'variable[environment]', + getData: (term, callback) => { + callback(DROPDOWN_ITEM_DATA); + }, + }); + }); + + it('should clear all data and filter input', () => { + const filterInput = $wrapperEl.find('.dropdown-input-field'); + + // Get the data in the dropdown + $('.js-dropdown-menu-toggle').click(); + + // Filter for an item + filterInput + .val('one') + .trigger('input'); + + const $itemElsAfterFilter = $wrapperEl.find('.js-dropdown-content a'); + expect($itemElsAfterFilter.length).toEqual(1); + + createItemDropdown.clearDropdown(); + + const $itemElsAfterClear = $wrapperEl.find('.js-dropdown-content a'); + expect($itemElsAfterClear.length).toEqual(0); + expect(filterInput.val()).toEqual(''); + }); + }); + + describe('createNewItemFromValue option', () => { + beforeEach(() => { + createItemDropdown = new CreateItemDropdown({ + $dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'), + defaultToggleLabel: 'All variables', + fieldName: 'variable[environment]', + getData: (term, callback) => { + callback(DROPDOWN_ITEM_DATA); + }, + createNewItemFromValue: newValue => ({ + title: `${newValue}-title`, + id: `${newValue}-id`, + text: `${newValue}-text`, + }), + }); + }); + + it('all items go through createNewItemFromValue', () => { + // Get the data in the dropdown + $('.js-dropdown-menu-toggle').click(); + + createItemAndClearInput('new-item'); + + const $itemEls = $wrapperEl.find('.js-dropdown-content a'); + expect($itemEls.length).toEqual(1 + DROPDOWN_ITEM_DATA.length); + expect($($itemEls[3]).text()).toEqual('new-item-text'); + expect($wrapperEl.find('.dropdown-toggle-text').text()).toEqual('new-item-title'); + }); + }); }); diff --git a/spec/javascripts/fixtures/clusters.rb b/spec/javascripts/fixtures/clusters.rb index d26ea3febe8..8e74c4f859c 100644 --- a/spec/javascripts/fixtures/clusters.rb +++ b/spec/javascripts/fixtures/clusters.rb @@ -31,19 +31,4 @@ describe Projects::ClustersController, '(JavaScript fixtures)', type: :controlle expect(response).to be_success store_frontend_fixture(response, example.description) end - - context 'rendering non-empty state' do - before do - cluster - end - - it 'clusters/index_cluster.html.raw' do |example| - get :index, - namespace_id: namespace, - project_id: project - - expect(response).to be_success - store_frontend_fixture(response, example.description) - end - end end diff --git a/spec/javascripts/integrations/integration_settings_form_spec.js b/spec/javascripts/integrations/integration_settings_form_spec.js index 9033eb9ce02..d0fba908e34 100644 --- a/spec/javascripts/integrations/integration_settings_form_spec.js +++ b/spec/javascripts/integrations/integration_settings_form_spec.js @@ -1,3 +1,5 @@ +import MockAdaptor from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import IntegrationSettingsForm from '~/integrations/integration_settings_form'; describe('IntegrationSettingsForm', () => { @@ -109,91 +111,117 @@ describe('IntegrationSettingsForm', () => { describe('testSettings', () => { let integrationSettingsForm; let formData; + let mock; beforeEach(() => { + mock = new MockAdaptor(axios); + + spyOn(axios, 'put').and.callThrough(); + integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form'); formData = integrationSettingsForm.$form.serialize(); }); - it('should make an ajax request with provided `formData`', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + afterEach(() => { + mock.restore(); + }); - integrationSettingsForm.testSettings(formData); + it('should make an ajax request with provided `formData`', (done) => { + integrationSettingsForm.testSettings(formData) + .then(() => { + expect(axios.put).toHaveBeenCalledWith(integrationSettingsForm.testEndPoint, formData); - expect($.ajax).toHaveBeenCalledWith({ - type: 'PUT', - url: integrationSettingsForm.testEndPoint, - data: formData, - }); + done(); + }) + .catch(done.fail); }); - it('should show error Flash with `Save anyway` action if ajax request responds with error in test', () => { + it('should show error Flash with `Save anyway` action if ajax request responds with error in test', (done) => { const errorMessage = 'Test failed.'; - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); - - integrationSettingsForm.testSettings(formData); + mock.onPut(integrationSettingsForm.testEndPoint).reply(200, { + error: true, + message: errorMessage, + service_response: 'some error', + }); - deferred.resolve({ error: true, message: errorMessage, service_response: 'some error' }); + integrationSettingsForm.testSettings(formData) + .then(() => { + const $flashContainer = $('.flash-container'); + expect($flashContainer.find('.flash-text').text().trim()).toEqual('Test failed. some error'); + expect($flashContainer.find('.flash-action')).toBeDefined(); + expect($flashContainer.find('.flash-action').text().trim()).toEqual('Save anyway'); - const $flashContainer = $('.flash-container'); - expect($flashContainer.find('.flash-text').text().trim()).toEqual('Test failed. some error'); - expect($flashContainer.find('.flash-action')).toBeDefined(); - expect($flashContainer.find('.flash-action').text().trim()).toEqual('Save anyway'); + done(); + }) + .catch(done.fail); }); - it('should submit form if ajax request responds without any error in test', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + it('should submit form if ajax request responds without any error in test', (done) => { + spyOn(integrationSettingsForm.$form, 'submit'); - integrationSettingsForm.testSettings(formData); + mock.onPut(integrationSettingsForm.testEndPoint).reply(200, { + error: false, + }); - spyOn(integrationSettingsForm.$form, 'submit'); - deferred.resolve({ error: false }); + integrationSettingsForm.testSettings(formData) + .then(() => { + expect(integrationSettingsForm.$form.submit).toHaveBeenCalled(); - expect(integrationSettingsForm.$form.submit).toHaveBeenCalled(); + done(); + }) + .catch(done.fail); }); - it('should submit form when clicked on `Save anyway` action of error Flash', () => { - const errorMessage = 'Test failed.'; - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); + it('should submit form when clicked on `Save anyway` action of error Flash', (done) => { + spyOn(integrationSettingsForm.$form, 'submit'); - integrationSettingsForm.testSettings(formData); + const errorMessage = 'Test failed.'; + mock.onPut(integrationSettingsForm.testEndPoint).reply(200, { + error: true, + message: errorMessage, + }); - deferred.resolve({ error: true, message: errorMessage }); + integrationSettingsForm.testSettings(formData) + .then(() => { + const $flashAction = $('.flash-container .flash-action'); + expect($flashAction).toBeDefined(); - const $flashAction = $('.flash-container .flash-action'); - expect($flashAction).toBeDefined(); + $flashAction.get(0).click(); + }) + .then(() => { + expect(integrationSettingsForm.$form.submit).toHaveBeenCalled(); - spyOn(integrationSettingsForm.$form, 'submit'); - $flashAction.get(0).click(); - expect(integrationSettingsForm.$form.submit).toHaveBeenCalled(); + done(); + }) + .catch(done.fail); }); - it('should show error Flash if ajax request failed', () => { + it('should show error Flash if ajax request failed', (done) => { const errorMessage = 'Something went wrong on our end.'; - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); - integrationSettingsForm.testSettings(formData); + mock.onPut(integrationSettingsForm.testEndPoint).networkError(); - deferred.reject(); + integrationSettingsForm.testSettings(formData) + .then(() => { + expect($('.flash-container .flash-text').text().trim()).toEqual(errorMessage); - expect($('.flash-container .flash-text').text().trim()).toEqual(errorMessage); + done(); + }) + .catch(done.fail); }); - it('should always call `toggleSubmitBtnState` with `false` once request is completed', () => { - const deferred = $.Deferred(); - spyOn($, 'ajax').and.returnValue(deferred.promise()); - - integrationSettingsForm.testSettings(formData); + it('should always call `toggleSubmitBtnState` with `false` once request is completed', (done) => { + mock.onPut(integrationSettingsForm.testEndPoint).networkError(); spyOn(integrationSettingsForm, 'toggleSubmitBtnState'); - deferred.reject(); - expect(integrationSettingsForm.toggleSubmitBtnState).toHaveBeenCalledWith(false); + integrationSettingsForm.testSettings(formData) + .then(() => { + expect(integrationSettingsForm.toggleSubmitBtnState).toHaveBeenCalledWith(false); + + done(); + }) + .catch(done.fail); }); }); }); diff --git a/spec/javascripts/issuable_spec.js b/spec/javascripts/issuable_spec.js index 5a9112716f4..d53ffecbd35 100644 --- a/spec/javascripts/issuable_spec.js +++ b/spec/javascripts/issuable_spec.js @@ -1,3 +1,5 @@ +import MockAdaptor from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import IssuableIndex from '~/issuable_index'; describe('Issuable', () => { @@ -19,6 +21,8 @@ describe('Issuable', () => { }); describe('resetIncomingEmailToken', () => { + let mock; + beforeEach(() => { const element = document.createElement('a'); element.classList.add('incoming-email-token-reset'); @@ -30,14 +34,28 @@ describe('Issuable', () => { document.body.appendChild(input); Issuable = new IssuableIndex('issue_'); + + mock = new MockAdaptor(axios); + + mock.onPut('foo').reply(200, { + new_address: 'testing123', + }); }); - it('should send request to reset email token', () => { - spyOn(jQuery, 'ajax').and.callThrough(); + afterEach(() => { + mock.restore(); + }); + + it('should send request to reset email token', (done) => { + spyOn(axios, 'put').and.callThrough(); document.querySelector('.incoming-email-token-reset').click(); - expect(jQuery.ajax).toHaveBeenCalled(); - expect(jQuery.ajax.calls.argsFor(0)[0].url).toEqual('foo'); + setTimeout(() => { + expect(axios.put).toHaveBeenCalledWith('foo'); + expect($('#issuable_email').val()).toBe('testing123'); + + done(); + }); }); }); }); diff --git a/spec/javascripts/job_spec.js b/spec/javascripts/job_spec.js index feb341d22e6..0452934ea9e 100644 --- a/spec/javascripts/job_spec.js +++ b/spec/javascripts/job_spec.js @@ -58,8 +58,7 @@ describe('Job', () => { it('updates the build trace on an interval', function () { const deferred1 = $.Deferred(); const deferred2 = $.Deferred(); - const deferred3 = $.Deferred(); - spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise(), deferred3.promise()); + spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise()); spyOn(urlUtils, 'visitUrl'); deferred1.resolve({ @@ -70,9 +69,7 @@ describe('Job', () => { complete: false, }); - deferred2.resolve(); - - deferred3.resolve({ + deferred2.resolve({ html: '<span>More</span>', status: 'running', state: 'finalstate', @@ -94,9 +91,8 @@ describe('Job', () => { it('replaces the entire build trace', () => { const deferred1 = $.Deferred(); const deferred2 = $.Deferred(); - const deferred3 = $.Deferred(); - spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise(), deferred3.promise()); + spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise()); spyOn(urlUtils, 'visitUrl'); @@ -107,9 +103,7 @@ describe('Job', () => { complete: false, }); - deferred2.resolve(); - - deferred3.resolve({ + deferred2.resolve({ html: '<span>Different</span>', status: 'running', append: false, @@ -170,9 +164,8 @@ describe('Job', () => { it('shows incremented size', () => { const deferred1 = $.Deferred(); const deferred2 = $.Deferred(); - const deferred3 = $.Deferred(); - spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise(), deferred3.promise()); + spyOn($, 'ajax').and.returnValues(deferred1.promise(), deferred2.promise()); spyOn(urlUtils, 'visitUrl'); @@ -184,8 +177,6 @@ describe('Job', () => { total: 100, }); - deferred2.resolve(); - this.job = new Job(); expect( @@ -194,7 +185,7 @@ describe('Job', () => { jasmine.clock().tick(4001); - deferred3.resolve({ + deferred2.resolve({ html: '<span>Update</span>', status: 'success', append: true, diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js index 0a9d815f469..1052b4e7c20 100644 --- a/spec/javascripts/lib/utils/common_utils_spec.js +++ b/spec/javascripts/lib/utils/common_utils_spec.js @@ -1,6 +1,8 @@ /* eslint-disable promise/catch-or-return */ import * as commonUtils from '~/lib/utils/common_utils'; +import axios from '~/lib/utils/axios_utils'; +import MockAdapter from 'axios-mock-adapter'; describe('common_utils', () => { describe('parseUrl', () => { @@ -413,37 +415,48 @@ describe('common_utils', () => { describe('setCiStatusFavicon', () => { const BUILD_URL = `${gl.TEST_HOST}/frontend-fixtures/builds-project/-/jobs/1/status.json`; + let mock; beforeEach(() => { const favicon = document.createElement('link'); favicon.setAttribute('id', 'favicon'); document.body.appendChild(favicon); + mock = new MockAdapter(axios); }); afterEach(() => { + mock.restore(); document.body.removeChild(document.getElementById('favicon')); }); - it('should reset favicon in case of error', () => { - const favicon = document.getElementById('favicon'); - spyOn($, 'ajax').and.callFake(function (options) { - options.error(); - expect(favicon.getAttribute('href')).toEqual('null'); - }); + it('should reset favicon in case of error', (done) => { + mock.onGet(BUILD_URL).networkError(); - commonUtils.setCiStatusFavicon(BUILD_URL); + commonUtils.setCiStatusFavicon(BUILD_URL) + .then(() => { + const favicon = document.getElementById('favicon'); + expect(favicon.getAttribute('href')).toEqual('null'); + done(); + }) + // Error is already caught in catch() block of setCiStatusFavicon, + // It won't throw another error for us to catch + .catch(done.fail); }); - it('should set page favicon to CI status favicon based on provided status', () => { + it('should set page favicon to CI status favicon based on provided status', (done) => { const FAVICON_PATH = '//icon_status_success'; - const favicon = document.getElementById('favicon'); - spyOn($, 'ajax').and.callFake(function (options) { - options.success({ favicon: FAVICON_PATH }); - expect(favicon.getAttribute('href')).toEqual(FAVICON_PATH); + mock.onGet(BUILD_URL).reply(200, { + favicon: FAVICON_PATH, }); - commonUtils.setCiStatusFavicon(BUILD_URL); + commonUtils.setCiStatusFavicon(BUILD_URL) + .then(() => { + const favicon = document.getElementById('favicon'); + expect(favicon.getAttribute('href')).toEqual(FAVICON_PATH); + done(); + }) + .catch(done.fail); }); }); diff --git a/spec/javascripts/lib/utils/users_cache_spec.js b/spec/javascripts/lib/utils/users_cache_spec.js index ec6ea35952b..50371c8c5f6 100644 --- a/spec/javascripts/lib/utils/users_cache_spec.js +++ b/spec/javascripts/lib/utils/users_cache_spec.js @@ -92,7 +92,9 @@ describe('UsersCache', () => { apiSpy = (query, options) => { expect(query).toBe(''); expect(options).toEqual({ username: dummyUsername }); - return Promise.resolve([dummyUser]); + return Promise.resolve({ + data: [dummyUser], + }); }; UsersCache.retrieve(dummyUsername) diff --git a/spec/javascripts/mini_pipeline_graph_dropdown_spec.js b/spec/javascripts/mini_pipeline_graph_dropdown_spec.js index 481b46c3ac6..6fa6f44f953 100644 --- a/spec/javascripts/mini_pipeline_graph_dropdown_spec.js +++ b/spec/javascripts/mini_pipeline_graph_dropdown_spec.js @@ -1,7 +1,9 @@ /* eslint-disable no-new */ +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import MiniPipelineGraph from '~/mini_pipeline_graph_dropdown'; -import '~/flash'; +import timeoutPromise from './helpers/set_timeout_promise_helper'; describe('Mini Pipeline Graph Dropdown', () => { preloadFixtures('static/mini_dropdown_graph.html.raw'); @@ -27,6 +29,16 @@ describe('Mini Pipeline Graph Dropdown', () => { }); describe('When dropdown is clicked', () => { + let mock; + + beforeEach(() => { + mock = new MockAdapter(axios); + }); + + afterEach(() => { + mock.restore(); + }); + it('should call getBuildsList', () => { const getBuildsListSpy = spyOn( MiniPipelineGraph.prototype, @@ -41,46 +53,55 @@ describe('Mini Pipeline Graph Dropdown', () => { }); it('should make a request to the endpoint provided in the html', () => { - const ajaxSpy = spyOn($, 'ajax').and.callFake(function () {}); + const ajaxSpy = spyOn(axios, 'get').and.callThrough(); + + mock.onGet('foobar').reply(200, { + html: '', + }); new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); document.querySelector('.js-builds-dropdown-button').click(); - expect(ajaxSpy.calls.allArgs()[0][0].url).toEqual('foobar'); + expect(ajaxSpy.calls.allArgs()[0][0]).toEqual('foobar'); }); - it('should not close when user uses cmd/ctrl + click', () => { - spyOn($, 'ajax').and.callFake(function (params) { - params.success({ - html: `<li> - <a class="mini-pipeline-graph-dropdown-item" href="#"> - <span class="ci-status-icon ci-status-icon-failed"></span> - <span class="ci-build-text">build</span> - </a> - <a class="ci-action-icon-wrapper js-ci-action-icon" href="#"></a> - </li>`, - }); + it('should not close when user uses cmd/ctrl + click', (done) => { + mock.onGet('foobar').reply(200, { + html: `<li> + <a class="mini-pipeline-graph-dropdown-item" href="#"> + <span class="ci-status-icon ci-status-icon-failed"></span> + <span class="ci-build-text">build</span> + </a> + <a class="ci-action-icon-wrapper js-ci-action-icon" href="#"></a> + </li>`, }); new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); document.querySelector('.js-builds-dropdown-button').click(); - document.querySelector('a.mini-pipeline-graph-dropdown-item').click(); - - expect($('.js-builds-dropdown-list').is(':visible')).toEqual(true); + timeoutPromise() + .then(() => { + document.querySelector('a.mini-pipeline-graph-dropdown-item').click(); + }) + .then(timeoutPromise) + .then(() => { + expect($('.js-builds-dropdown-list').is(':visible')).toEqual(true); + }) + .then(done) + .catch(done.fail); }); - }); - it('should close the dropdown when request returns an error', (done) => { - spyOn($, 'ajax').and.callFake(options => options.error()); + it('should close the dropdown when request returns an error', (done) => { + mock.onGet('foobar').networkError(); - new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); + new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents(); - document.querySelector('.js-builds-dropdown-button').click(); + document.querySelector('.js-builds-dropdown-button').click(); - setTimeout(() => { - expect($('.js-builds-dropdown-tests .dropdown').hasClass('open')).toEqual(false); - done(); - }, 0); + setTimeout(() => { + expect($('.js-builds-dropdown-tests .dropdown').hasClass('open')).toEqual(false); + done(); + }); + }); }); }); diff --git a/spec/javascripts/pager_spec.js b/spec/javascripts/pager_spec.js index 2fd87754238..b09494f0b77 100644 --- a/spec/javascripts/pager_spec.js +++ b/spec/javascripts/pager_spec.js @@ -1,5 +1,6 @@ /* global fixture */ - +import MockAdapter from 'axios-mock-adapter'; +import axios from '~/lib/utils/axios_utils'; import * as utils from '~/lib/utils/url_utility'; import Pager from '~/pager'; @@ -9,7 +10,6 @@ describe('pager', () => { beforeEach(() => { setFixtures('<div class="content_list"></div><div class="loading"></div>'); - spyOn($, 'ajax'); }); afterEach(() => { @@ -47,39 +47,90 @@ describe('pager', () => { }); describe('getOld', () => { + const urlRegex = /(.*)some_list(.*)$/; + let mock; + + function mockSuccess() { + mock.onGet(urlRegex).reply(200, { + count: 0, + html: '', + }); + } + + function mockError() { + mock.onGet(urlRegex).networkError(); + } + beforeEach(() => { setFixtures('<div class="content_list" data-href="/some_list"></div><div class="loading"></div>'); + spyOn(axios, 'get').and.callThrough(); + + mock = new MockAdapter(axios); + Pager.init(); }); - it('shows loader while loading next page', () => { + afterEach(() => { + mock.restore(); + }); + + it('shows loader while loading next page', (done) => { + mockSuccess(); + spyOn(Pager.loading, 'show'); Pager.getOld(); - expect(Pager.loading.show).toHaveBeenCalled(); + + setTimeout(() => { + expect(Pager.loading.show).toHaveBeenCalled(); + + done(); + }); }); - it('hides loader on success', () => { - spyOn($, 'ajax').and.callFake(options => options.success({})); + it('hides loader on success', (done) => { + mockSuccess(); + spyOn(Pager.loading, 'hide'); Pager.getOld(); - expect(Pager.loading.hide).toHaveBeenCalled(); + + setTimeout(() => { + expect(Pager.loading.hide).toHaveBeenCalled(); + + done(); + }); }); - it('hides loader on error', () => { - spyOn($, 'ajax').and.callFake(options => options.error()); + it('hides loader on error', (done) => { + mockError(); + spyOn(Pager.loading, 'hide'); Pager.getOld(); - expect(Pager.loading.hide).toHaveBeenCalled(); + + setTimeout(() => { + expect(Pager.loading.hide).toHaveBeenCalled(); + + done(); + }); }); - it('sends request to url with offset and limit params', () => { - spyOn($, 'ajax'); + it('sends request to url with offset and limit params', (done) => { Pager.offset = 100; Pager.limit = 20; Pager.getOld(); - const [{ data, url }] = $.ajax.calls.argsFor(0); - expect(data).toBe('limit=20&offset=100'); - expect(url).toBe('/some_list'); + + setTimeout(() => { + const [url, params] = axios.get.calls.argsFor(0); + + expect(params).toEqual({ + params: { + limit: 20, + offset: 100, + }, + }); + expect(url).toBe('/some_list'); + + done(); + }); }); }); }); diff --git a/spec/javascripts/pipelines/pipelines_table_row_spec.js b/spec/javascripts/pipelines/pipelines_table_row_spec.js index b3cbf9aba48..de744739e42 100644 --- a/spec/javascripts/pipelines/pipelines_table_row_spec.js +++ b/spec/javascripts/pipelines/pipelines_table_row_spec.js @@ -26,8 +26,8 @@ describe('Pipelines Table Row', () => { const pipelines = getJSONFixture(jsonFixtureName).pipelines; pipeline = pipelines.find(p => p.user !== null && p.commit !== null); - pipelineWithoutAuthor = pipelines.find(p => p.user == null && p.commit !== null); - pipelineWithoutCommit = pipelines.find(p => p.user == null && p.commit == null); + pipelineWithoutAuthor = pipelines.find(p => p.user === null && p.commit !== null); + pipelineWithoutCommit = pipelines.find(p => p.user === null && p.commit === null); }); afterEach(() => { diff --git a/spec/javascripts/repo/components/new_dropdown/modal_spec.js b/spec/javascripts/repo/components/new_dropdown/modal_spec.js index 233cca06ed0..8bbc3100357 100644 --- a/spec/javascripts/repo/components/new_dropdown/modal_spec.js +++ b/spec/javascripts/repo/components/new_dropdown/modal_spec.js @@ -18,8 +18,10 @@ describe('new file modal component', () => { })); spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({ - commit: { - id: '123branch', + data: { + commit: { + id: '123branch', + }, }, })); diff --git a/spec/javascripts/repo/components/new_dropdown/upload_spec.js b/spec/javascripts/repo/components/new_dropdown/upload_spec.js index 788c08e5279..667112ab21a 100644 --- a/spec/javascripts/repo/components/new_dropdown/upload_spec.js +++ b/spec/javascripts/repo/components/new_dropdown/upload_spec.js @@ -17,8 +17,10 @@ describe('new dropdown upload', () => { })); spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({ - commit: { - id: '123branch', + data: { + commit: { + id: '123branch', + }, }, })); diff --git a/spec/javascripts/repo/components/repo_commit_section_spec.js b/spec/javascripts/repo/components/repo_commit_section_spec.js index 676ac09f2c9..93e94b4f24c 100644 --- a/spec/javascripts/repo/components/repo_commit_section_spec.js +++ b/spec/javascripts/repo/components/repo_commit_section_spec.js @@ -87,8 +87,10 @@ describe('RepoCommitSection', () => { changedFiles = JSON.parse(JSON.stringify(vm.$store.getters.changedFiles)); spyOn(service, 'commit').and.returnValue(Promise.resolve({ - short_id: '1', - stats: {}, + data: { + short_id: '1', + stats: {}, + }, })); }); diff --git a/spec/javascripts/repo/stores/actions_spec.js b/spec/javascripts/repo/stores/actions_spec.js index 8d830c67290..f678967b092 100644 --- a/spec/javascripts/repo/stores/actions_spec.js +++ b/spec/javascripts/repo/stores/actions_spec.js @@ -178,7 +178,9 @@ describe('Multi-file store actions', () => { it('calls service', (done) => { spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({ - commit: { id: '123' }, + data: { + commit: { id: '123' }, + }, })); store.dispatch('checkCommitStatus') @@ -192,7 +194,9 @@ describe('Multi-file store actions', () => { it('returns true if current ref does not equal returned ID', (done) => { spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({ - commit: { id: '123' }, + data: { + commit: { id: '123' }, + }, })); store.dispatch('checkCommitStatus') @@ -206,7 +210,9 @@ describe('Multi-file store actions', () => { it('returns false if current ref equals returned ID', (done) => { spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({ - commit: { id: '1' }, + data: { + commit: { id: '1' }, + }, })); store.dispatch('checkCommitStatus') @@ -250,13 +256,15 @@ describe('Multi-file store actions', () => { describe('success', () => { beforeEach(() => { spyOn(service, 'commit').and.returnValue(Promise.resolve({ - id: '123456', - short_id: '123', - message: 'test message', - committed_date: 'date', - stats: { - additions: '1', - deletions: '2', + data: { + id: '123456', + short_id: '123', + message: 'test message', + committed_date: 'date', + stats: { + additions: '1', + deletions: '2', + }, }, })); }); @@ -324,7 +332,9 @@ describe('Multi-file store actions', () => { describe('failed', () => { beforeEach(() => { spyOn(service, 'commit').and.returnValue(Promise.resolve({ - message: 'failed message', + data: { + message: 'failed message', + }, })); }); diff --git a/spec/javascripts/toggle_buttons_spec.js b/spec/javascripts/toggle_buttons_spec.js new file mode 100644 index 00000000000..205e396d682 --- /dev/null +++ b/spec/javascripts/toggle_buttons_spec.js @@ -0,0 +1,120 @@ +import setupToggleButtons from '~/toggle_buttons'; +import getSetTimeoutPromise from './helpers/set_timeout_promise_helper'; + +function generateMarkup(isChecked = true) { + return ` + <button type="button" class="${isChecked ? 'is-checked' : ''} js-project-feature-toggle"> + <input type="hidden" class="js-project-feature-toggle-input" value="${isChecked}" /> + </button> + `; +} + +function setupFixture(isChecked, clickCallback) { + const wrapper = document.createElement('div'); + wrapper.innerHTML = generateMarkup(isChecked); + + setupToggleButtons(wrapper, clickCallback); + + return wrapper; +} + +describe('ToggleButtons', () => { + describe('when input value is true', () => { + it('should initialize as checked', () => { + const wrapper = setupFixture(true); + + expect(wrapper.querySelector('.js-project-feature-toggle').classList.contains('is-checked')).toEqual(true); + expect(wrapper.querySelector('.js-project-feature-toggle-input').value).toEqual('true'); + }); + + it('should toggle to unchecked when clicked', (done) => { + const wrapper = setupFixture(true); + const toggleButton = wrapper.querySelector('.js-project-feature-toggle'); + + toggleButton.click(); + + getSetTimeoutPromise() + .then(() => { + expect(toggleButton.classList.contains('is-checked')).toEqual(false); + expect(wrapper.querySelector('.js-project-feature-toggle-input').value).toEqual('false'); + }) + .then(done) + .catch(done.fail); + }); + }); + + describe('when input value is false', () => { + it('should initialize as unchecked', () => { + const wrapper = setupFixture(false); + + expect(wrapper.querySelector('.js-project-feature-toggle').classList.contains('is-checked')).toEqual(false); + expect(wrapper.querySelector('.js-project-feature-toggle-input').value).toEqual('false'); + }); + + it('should toggle to checked when clicked', (done) => { + const wrapper = setupFixture(false); + const toggleButton = wrapper.querySelector('.js-project-feature-toggle'); + + toggleButton.click(); + + getSetTimeoutPromise() + .then(() => { + expect(toggleButton.classList.contains('is-checked')).toEqual(true); + expect(wrapper.querySelector('.js-project-feature-toggle-input').value).toEqual('true'); + }) + .then(done) + .catch(done.fail); + }); + }); + + it('should emit `trigger-change` event', (done) => { + const changeSpy = jasmine.createSpy('changeEventHandler'); + const wrapper = setupFixture(false); + const toggleButton = wrapper.querySelector('.js-project-feature-toggle'); + const input = wrapper.querySelector('.js-project-feature-toggle-input'); + + $(input).on('trigger-change', changeSpy); + + toggleButton.click(); + + getSetTimeoutPromise() + .then(() => { + expect(changeSpy).toHaveBeenCalled(); + }) + .then(done) + .catch(done.fail); + }); + + describe('clickCallback', () => { + it('should show loading indicator while waiting', (done) => { + const isChecked = true; + const clickCallback = (newValue, toggleButton) => { + const input = toggleButton.querySelector('.js-project-feature-toggle-input'); + + expect(newValue).toEqual(false); + + // Check for the loading state + expect(toggleButton.classList.contains('is-checked')).toEqual(false); + expect(toggleButton.classList.contains('is-loading')).toEqual(true); + expect(toggleButton.disabled).toEqual(true); + expect(input.value).toEqual('true'); + + // After the callback finishes, check that the loading state is gone + getSetTimeoutPromise() + .then(() => { + expect(toggleButton.classList.contains('is-checked')).toEqual(false); + expect(toggleButton.classList.contains('is-loading')).toEqual(false); + expect(toggleButton.disabled).toEqual(false); + expect(input.value).toEqual('false'); + }) + .then(done) + .catch(done.fail); + }; + + const wrapper = setupFixture(isChecked, clickCallback); + const toggleButton = wrapper.querySelector('.js-project-feature-toggle'); + + toggleButton.click(); + }); + }); +}); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js index a750bc78f36..f14d5f6f76c 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_spec.js @@ -1,39 +1,39 @@ import Vue from 'vue'; -import authorComponent from '~/vue_merge_request_widget/components/mr_widget_author'; - -const author = { - webUrl: 'http://foo.bar', - avatarUrl: 'http://gravatar.com/foo', - name: 'fatihacet', -}; -const createComponent = () => { - const Component = Vue.extend(authorComponent); - - return new Component({ - el: document.createElement('div'), - propsData: { author }, - }); -}; +import authorComponent from '~/vue_merge_request_widget/components/mr_widget_author.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('MRWidgetAuthor', () => { - describe('props', () => { - it('should have props', () => { - const authorProp = authorComponent.props.author; + let vm; + + beforeEach(() => { + const Component = Vue.extend(authorComponent); + + vm = mountComponent(Component, { + author: { + name: 'Administrator', + username: 'root', + webUrl: 'http://localhost:3000/root', + avatarUrl: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + }, - expect(authorProp).toBeDefined(); - expect(authorProp.type instanceof Object).toBeTruthy(); - expect(authorProp.required).toBeTruthy(); }); }); - describe('template', () => { - it('should have correct elements', () => { - const el = createComponent().$el; + afterEach(() => { + vm.$destroy(); + }); - expect(el.tagName).toEqual('A'); - expect(el.getAttribute('href')).toEqual(author.webUrl); - expect(el.querySelector('img').getAttribute('src')).toEqual(author.avatarUrl); - expect(el.querySelector('.author').innerText.trim()).toEqual(author.name); - }); + it('renders link with the author web url', () => { + expect(vm.$el.getAttribute('href')).toEqual('http://localhost:3000/root'); + }); + + it('renders image with avatar url', () => { + expect( + vm.$el.querySelector('img').getAttribute('src'), + ).toEqual('http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon'); + }); + + it('renders author name', () => { + expect(vm.$el.textContent.trim()).toEqual('Administrator'); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js index 515ddcbb875..8c55622b15e 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_author_time_spec.js @@ -1,61 +1,40 @@ import Vue from 'vue'; -import authorTimeComponent from '~/vue_merge_request_widget/components/mr_widget_author_time'; - -const props = { - actionText: 'Merged by', - author: { - webUrl: 'http://foo.bar', - avatarUrl: 'http://gravatar.com/foo', - name: 'fatihacet', - }, - dateTitle: '2017-03-23T23:02:00.807Z', - dateReadable: '12 hours ago', -}; -const createComponent = () => { - const Component = Vue.extend(authorTimeComponent); - - return new Component({ - el: document.createElement('div'), - propsData: props, - }); -}; +import authorTimeComponent from '~/vue_merge_request_widget/components/mr_widget_author_time.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('MRWidgetAuthorTime', () => { - describe('props', () => { - it('should have props', () => { - const { actionText, author, dateTitle, dateReadable } = authorTimeComponent.props; - const ActionTextClass = actionText.type; - const DateTitleClass = dateTitle.type; - const DateReadableClass = dateReadable.type; - - expect(new ActionTextClass() instanceof String).toBeTruthy(); - expect(actionText.required).toBeTruthy(); - - expect(author.type instanceof Object).toBeTruthy(); - expect(author.required).toBeTruthy(); - - expect(new DateTitleClass() instanceof String).toBeTruthy(); - expect(dateTitle.required).toBeTruthy(); - - expect(new DateReadableClass() instanceof String).toBeTruthy(); - expect(dateReadable.required).toBeTruthy(); + let vm; + + beforeEach(() => { + const Component = Vue.extend(authorTimeComponent); + + vm = mountComponent(Component, { + actionText: 'Merged by', + author: { + name: 'Administrator', + username: 'root', + webUrl: 'http://localhost:3000/root', + avatarUrl: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + }, + dateTitle: '2017-03-23T23:02:00.807Z', + dateReadable: '12 hours ago', }); }); - describe('components', () => { - it('should have components', () => { - expect(authorTimeComponent.components['mr-widget-author']).toBeDefined(); - }); + afterEach(() => { + vm.$destroy(); + }); + + it('renders provided action text', () => { + expect(vm.$el.textContent).toContain('Merged by'); }); - describe('template', () => { - it('should have correct elements', () => { - const el = createComponent().$el; + it('renders author', () => { + expect(vm.$el.textContent).toContain('Administrator'); + }); - expect(el.tagName).toEqual('H4'); - expect(el.querySelector('a').getAttribute('href')).toEqual(props.author.webUrl); - expect(el.querySelector('time').innerText).toContain(props.dateReadable); - expect(el.querySelector('time').getAttribute('title')).toEqual(props.dateTitle); - }); + it('renders provided time', () => { + expect(vm.$el.querySelector('time').getAttribute('title')).toEqual('2017-03-23T23:02:00.807Z'); + expect(vm.$el.querySelector('time').textContent.trim()).toEqual('12 hours ago'); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js index 06f89fabf42..13e5595bbfc 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_header_spec.js @@ -1,104 +1,219 @@ import Vue from 'vue'; -import headerComponent from '~/vue_merge_request_widget/components/mr_widget_header'; - -const createComponent = (mr) => { - const Component = Vue.extend(headerComponent); - return new Component({ - el: document.createElement('div'), - propsData: { mr }, - }); -}; +import headerComponent from '~/vue_merge_request_widget/components/mr_widget_header.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('MRWidgetHeader', () => { - describe('props', () => { - it('should have props', () => { - const { mr } = headerComponent.props; + let vm; + let Component; - expect(mr.type instanceof Object).toBeTruthy(); - expect(mr.required).toBeTruthy(); - }); + beforeEach(() => { + Component = Vue.extend(headerComponent); + }); + + afterEach(() => { + vm.$destroy(); }); describe('computed', () => { - let vm; - beforeEach(() => { - vm = createComponent({ - divergedCommitsCount: 12, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: '/foo/bar/mr-widget-refactor', - targetBranch: 'master', + describe('shouldShowCommitsBehindText', () => { + it('return true when there are divergedCommitsCount', () => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + } }); + + expect(vm.shouldShowCommitsBehindText).toEqual(true); + }); + + it('returns false where there are no divergedComits count', () => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 0, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + } }); + expect(vm.shouldShowCommitsBehindText).toEqual(false); }); }); - it('shouldShowCommitsBehindText', () => { - expect(vm.shouldShowCommitsBehindText).toBeTruthy(); + describe('commitsText', () => { + it('returns singular when there is one commit', () => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 1, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + } }); - vm.mr.divergedCommitsCount = 0; - expect(vm.shouldShowCommitsBehindText).toBeFalsy(); - }); + expect(vm.commitsText).toEqual('1 commit behind'); + }); - it('commitsText', () => { - expect(vm.commitsText).toEqual('commits'); + it('returns plural when there is more than one commit', () => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 2, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>', + targetBranch: 'master', + } }); - vm.mr.divergedCommitsCount = 1; - expect(vm.commitsText).toEqual('commit'); + expect(vm.commitsText).toEqual('2 commits behind'); + }); }); }); describe('template', () => { - let vm; - let el; - const sourceBranchPath = '/foo/bar/mr-widget-refactor'; - const mr = { - divergedCommitsCount: 12, - sourceBranch: 'mr-widget-refactor', - sourceBranchLink: `<a href="${sourceBranchPath}">mr-widget-refactor</a>`, - targetBranchPath: 'foo/bar/commits-path', - targetBranchTreePath: 'foo/bar/tree/path', - targetBranch: 'master', - isOpen: true, - emailPatchesPath: '/mr/email-patches', - plainDiffPath: '/mr/plainDiffPath', - }; - - beforeEach(() => { - vm = createComponent(mr); - el = vm.$el; + describe('common elements', () => { + beforeEach(() => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + } }); + }); + + it('renders source branch link', () => { + expect( + vm.$el.querySelector('.js-source-branch').innerHTML, + ).toEqual('<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>'); + }); + + it('renders clipboard button', () => { + expect(vm.$el.querySelector('.btn-clipboard')).not.toEqual(null); + }); + + it('renders target branch', () => { + expect(vm.$el.querySelector('.js-target-branch').textContent.trim()).toEqual('master'); + }); + }); + + describe('with an open merge request', () => { + afterEach(() => { + vm.$destroy(); + }); + + beforeEach(() => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + } }); + }); + + it('renders checkout branch button with modal trigger', () => { + const button = vm.$el.querySelector('.js-check-out-branch'); + + expect(button.textContent.trim()).toEqual('Check out branch'); + expect(button.getAttribute('data-target')).toEqual('#modal_merge_info'); + expect(button.getAttribute('data-toggle')).toEqual('modal'); + }); + + it('renders download dropdown with links', () => { + expect( + vm.$el.querySelector('.js-download-email-patches').textContent.trim(), + ).toEqual('Email patches'); + + expect( + vm.$el.querySelector('.js-download-email-patches').getAttribute('href'), + ).toEqual('/mr/email-patches'); + + expect( + vm.$el.querySelector('.js-download-plain-diff').textContent.trim(), + ).toEqual('Plain diff'); + + expect( + vm.$el.querySelector('.js-download-plain-diff').getAttribute('href'), + ).toEqual('/mr/plainDiffPath'); + }); }); - it('should render template elements correctly', () => { - expect(el.classList.contains('mr-source-target')).toBeTruthy(); - const sourceBranchLink = el.querySelectorAll('.label-branch')[0]; - const targetBranchLink = el.querySelectorAll('.label-branch')[1]; - const commitsCount = el.querySelector('.diverged-commits-count'); - - expect(sourceBranchLink.textContent).toContain(mr.sourceBranch); - expect(targetBranchLink.textContent).toContain(mr.targetBranch); - expect(sourceBranchLink.querySelector('a').getAttribute('href')).toEqual(sourceBranchPath); - expect(targetBranchLink.querySelector('a').getAttribute('href')).toEqual(mr.targetBranchTreePath); - expect(commitsCount.textContent).toContain('12 commits behind'); - expect(commitsCount.querySelector('a').getAttribute('href')).toEqual(mr.targetBranchPath); - - expect(el.textContent).toContain('Check out branch'); - expect(el.querySelectorAll('.dropdown li a')[0].getAttribute('href')).toEqual(mr.emailPatchesPath); - expect(el.querySelectorAll('.dropdown li a')[1].getAttribute('href')).toEqual(mr.plainDiffPath); + describe('with a closed merge request', () => { + beforeEach(() => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: false, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + } }); + }); + + it('does not render checkout branch button with modal trigger', () => { + const button = vm.$el.querySelector('.js-check-out-branch'); + + expect(button).toEqual(null); + }); + + it('does not render download dropdown with links', () => { + expect( + vm.$el.querySelector('.js-download-email-patches'), + ).toEqual(null); + + expect( + vm.$el.querySelector('.js-download-plain-diff'), + ).toEqual(null); + }); }); - it('should not have right action links if the MR state is not open', (done) => { - vm.mr.isOpen = false; - Vue.nextTick(() => { - expect(el.textContent).not.toContain('Check out branch'); - expect(el.querySelectorAll('.dropdown li a').length).toEqual(0); - done(); + describe('without diverged commits', () => { + beforeEach(() => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 0, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + } }); + }); + + it('does not render diverged commits info', () => { + expect(vm.$el.querySelector('.diverged-commits-count')).toEqual(null); }); }); - it('should not render diverged commits count if the MR has no diverged commits', (done) => { - vm.mr.divergedCommitsCount = null; - Vue.nextTick(() => { - expect(el.textContent).not.toContain('commits behind'); - expect(el.querySelectorAll('.diverged-commits-count').length).toEqual(0); - done(); + describe('with diverged commits', () => { + beforeEach(() => { + vm = mountComponent(Component, { mr: { + divergedCommitsCount: 12, + sourceBranch: 'mr-widget-refactor', + sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>', + sourceBranchRemoved: false, + targetBranchPath: 'foo/bar/commits-path', + targetBranchTreePath: 'foo/bar/tree/path', + targetBranch: 'master', + isOpen: true, + emailPatchesPath: '/mr/email-patches', + plainDiffPath: '/mr/plainDiffPath', + } }); + }); + + it('renders diverged commits info', () => { + expect(vm.$el.querySelector('.diverged-commits-count').textContent.trim()).toEqual('(12 commits behind)'); }); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js index 4da4fc82c26..cc43639f576 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_merge_help_spec.js @@ -1,51 +1,56 @@ import Vue from 'vue'; -import mergeHelpComponent from '~/vue_merge_request_widget/components/mr_widget_merge_help'; - -const props = { - missingBranch: 'this-is-not-the-branch-you-are-looking-for', -}; -const text = `If the ${props.missingBranch} branch exists in your local repository`; - -const createComponent = () => { - const Component = Vue.extend(mergeHelpComponent); - return new Component({ - el: document.createElement('div'), - propsData: props, - }); -}; +import mergeHelpComponent from '~/vue_merge_request_widget/components/mr_widget_merge_help.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('MRWidgetMergeHelp', () => { - describe('props', () => { - it('should have props', () => { - const { missingBranch } = mergeHelpComponent.props; - const MissingBranchTypeClass = missingBranch.type; - - expect(new MissingBranchTypeClass() instanceof String).toBeTruthy(); - expect(missingBranch.required).toBeFalsy(); - expect(missingBranch.default).toEqual(''); - }); + let vm; + let Component; + + beforeEach(() => { + Component = Vue.extend(mergeHelpComponent); }); - describe('template', () => { - let vm; - let el; + afterEach(() => { + vm.$destroy(); + }); + describe('with missing branch', () => { beforeEach(() => { - vm = createComponent(); - el = vm.$el; + vm = mountComponent(Component, { + missingBranch: 'this-is-not-the-branch-you-are-looking-for', + }); }); - it('should have the correct elements', () => { - expect(el.classList.contains('mr-widget-help')).toBeTruthy(); - expect(el.textContent).toContain(text); + it('renders missing branch information', () => { + expect( + vm.$el.textContent.trim().replace(/[\r\n]+/g, ' ').replace(/\s\s+/g, ' '), + ).toEqual( + 'If the this-is-not-the-branch-you-are-looking-for branch exists in your local repository, you can merge this merge request manually using the command line', + ); }); - it('should not show missing branch name if missingBranch props is not provided', (done) => { - vm.missingBranch = null; - Vue.nextTick(() => { - expect(el.textContent).not.toContain(text); - done(); - }); + it('renders button to open help modal', () => { + expect(vm.$el.querySelector('.js-open-modal-help').getAttribute('data-target')).toEqual('#modal_merge_info'); + expect(vm.$el.querySelector('.js-open-modal-help').getAttribute('data-toggle')).toEqual('modal'); + }); + }); + + describe('without missing branch', () => { + beforeEach(() => { + vm = mountComponent(Component); + }); + + it('renders information about how to merge manually', () => { + expect( + vm.$el.textContent.trim().replace(/[\r\n]+/g, ' ').replace(/\s\s+/g, ' '), + ).toEqual( + 'You can merge this merge request manually using the command line', + ); + }); + + it('renders element to open a modal', () => { + expect(vm.$el.querySelector('.js-open-modal-help').getAttribute('data-target')).toEqual('#modal_merge_info'); + expect(vm.$el.querySelector('.js-open-modal-help').getAttribute('data-toggle')).toEqual('modal'); }); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js index f86fb6a0b4b..637bf483deb 100644 --- a/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js +++ b/spec/javascripts/vue_mr_widget/components/mr_widget_related_links_spec.js @@ -1,117 +1,82 @@ import Vue from 'vue'; -import relatedLinksComponent from '~/vue_merge_request_widget/components/mr_widget_related_links'; +import relatedLinksComponent from '~/vue_merge_request_widget/components/mr_widget_related_links.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; -const createComponent = (data) => { - const Component = Vue.extend(relatedLinksComponent); +describe('MRWidgetRelatedLinks', () => { + let vm; - return new Component({ - el: document.createElement('div'), - propsData: data, - }); -}; + const createComponent = (data) => { + const Component = Vue.extend(relatedLinksComponent); -describe('MRWidgetRelatedLinks', () => { - describe('props', () => { - it('should have props', () => { - const { relatedLinks } = relatedLinksComponent.props; + return mountComponent(Component, data); + }; - expect(relatedLinks).toBeDefined(); - expect(relatedLinks.type instanceof Object).toBeTruthy(); - expect(relatedLinks.required).toBeTruthy(); - }); + afterEach(() => { + vm.$destroy(); }); describe('computed', () => { - const data = { - relatedLinks: { - closing: '/foo', - mentioned: '/foo', - assignToMe: '/foo', - }, - }; - - describe('hasLinks', () => { - it('should return correct value when we have links reference', () => { - const vm = createComponent(data); - expect(vm.hasLinks).toBeTruthy(); - - vm.relatedLinks.closing = null; - expect(vm.hasLinks).toBeTruthy(); - - vm.relatedLinks.mentioned = null; - expect(vm.hasLinks).toBeTruthy(); - - vm.relatedLinks.assignToMe = null; - expect(vm.hasLinks).toBeFalsy(); - }); - }); - describe('closesText', () => { - it('returns correct text for open merge request', () => { - data.state = 'open'; - const vm = createComponent(data); + it('returns Closes text for open merge request', () => { + vm = createComponent({ state: 'open', relatedLinks: {} }); expect(vm.closesText).toEqual('Closes'); }); it('returns correct text for closed merge request', () => { - data.state = 'closed'; - const vm = createComponent(data); + vm = createComponent({ state: 'closed', relatedLinks: {} }); expect(vm.closesText).toEqual('Did not close'); }); it('returns correct tense for merged request', () => { - data.state = 'merged'; - const vm = createComponent(data); + vm = createComponent({ state: 'merged', relatedLinks: {} }); expect(vm.closesText).toEqual('Closed'); }); }); }); - describe('template', () => { - it('should have only have closing issues text', () => { - const vm = createComponent({ - relatedLinks: { - closing: '<a href="#">#23</a> and <a>#42</a>', - }, - }); - const content = vm.$el.textContent.replace(/\n(\s)+/g, ' ').trim(); - - expect(content).toContain('Closes #23 and #42'); - expect(content).not.toContain('Mentions'); + it('should have only have closing issues text', () => { + vm = createComponent({ + relatedLinks: { + closing: '<a href="#">#23</a> and <a>#42</a>', + }, }); + const content = vm.$el.textContent.replace(/\n(\s)+/g, ' ').trim(); - it('should have only have mentioned issues text', () => { - const vm = createComponent({ - relatedLinks: { - mentioned: '<a href="#">#7</a>', - }, - }); + expect(content).toContain('Closes #23 and #42'); + expect(content).not.toContain('Mentions'); + }); - expect(vm.$el.innerText).toContain('Mentions #7'); - expect(vm.$el.innerText).not.toContain('Closes'); + it('should have only have mentioned issues text', () => { + vm = createComponent({ + relatedLinks: { + mentioned: '<a href="#">#7</a>', + }, }); - it('should have closing and mentioned issues at the same time', () => { - const vm = createComponent({ - relatedLinks: { - closing: '<a href="#">#7</a>', - mentioned: '<a href="#">#23</a> and <a>#42</a>', - }, - }); - const content = vm.$el.textContent.replace(/\n(\s)+/g, ' ').trim(); + expect(vm.$el.innerText).toContain('Mentions #7'); + expect(vm.$el.innerText).not.toContain('Closes'); + }); - expect(content).toContain('Closes #7'); - expect(content).toContain('Mentions #23 and #42'); + it('should have closing and mentioned issues at the same time', () => { + vm = createComponent({ + relatedLinks: { + closing: '<a href="#">#7</a>', + mentioned: '<a href="#">#23</a> and <a>#42</a>', + }, }); + const content = vm.$el.textContent.replace(/\n(\s)+/g, ' ').trim(); - it('should have assing issues link', () => { - const vm = createComponent({ - relatedLinks: { - assignToMe: '<a href="#">Assign yourself to these issues</a>', - }, - }); + expect(content).toContain('Closes #7'); + expect(content).toContain('Mentions #23 and #42'); + }); - expect(vm.$el.innerText).toContain('Assign yourself to these issues'); + it('should have assing issues link', () => { + vm = createComponent({ + relatedLinks: { + assignToMe: '<a href="#">Assign yourself to these issues</a>', + }, }); + + expect(vm.$el.innerText).toContain('Assign yourself to these issues'); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js index cef365eec8a..a57b9811e08 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js @@ -1,45 +1,37 @@ import Vue from 'vue'; -import failedToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_failed_to_merge'; +import failedToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.vue'; import eventHub from '~/vue_merge_request_widget/event_hub'; - -const mr = { - mergeError: 'Merge error happened.', -}; -const createComponent = () => { - const Component = Vue.extend(failedToMergeComponent); - return new Component({ - el: document.createElement('div'), - propsData: { mr }, - }); -}; +import mountComponent from '../../../helpers/vue_mount_component_helper'; describe('MRWidgetFailedToMerge', () => { - describe('data', () => { - it('should have default data', () => { - const data = failedToMergeComponent.data(); + let Component; + let vm; + + beforeEach(() => { + Component = Vue.extend(failedToMergeComponent); + spyOn(eventHub, '$emit'); + vm = mountComponent(Component, { mr: { + mergeError: 'Merge error happened.', + } }); + }); - expect(data.timer).toEqual(10); - expect(data.isRefreshing).toBeFalsy(); - }); + afterEach(() => { + vm.$destroy(); }); describe('computed', () => { describe('timerText', () => { it('should return correct timer text', () => { - const vm = createComponent(); - expect(vm.timerText).toEqual('10 seconds'); + expect(vm.timerText).toEqual('Refreshing in 10 seconds to show the updated status...'); vm.timer = 1; - expect(vm.timerText).toEqual('a second'); + expect(vm.timerText).toEqual('Refreshing in a second to show the updated status...'); }); }); }); describe('created', () => { it('should disable polling', () => { - spyOn(eventHub, '$emit'); - createComponent(); - expect(eventHub.$emit).toHaveBeenCalledWith('DisablePolling'); }); }); @@ -47,13 +39,10 @@ describe('MRWidgetFailedToMerge', () => { describe('methods', () => { describe('refresh', () => { it('should emit event to request component refresh', () => { - spyOn(eventHub, '$emit'); - const vm = createComponent(); - - expect(vm.isRefreshing).toBeFalsy(); + expect(vm.isRefreshing).toEqual(false); vm.refresh(); - expect(vm.isRefreshing).toBeTruthy(); + expect(vm.isRefreshing).toEqual(true); expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested'); expect(eventHub.$emit).toHaveBeenCalledWith('EnablePolling'); }); @@ -61,12 +50,11 @@ describe('MRWidgetFailedToMerge', () => { describe('updateTimer', () => { it('should update timer and emit event when timer end', () => { - const vm = createComponent(); spyOn(vm, 'refresh'); expect(vm.timer).toEqual(10); - for (let i = 0; i < 10; i++) { // eslint-disable-line + for (let i = 0; i < 10; i += 1) { expect(vm.timer).toEqual(10 - i); vm.updateTimer(); } @@ -76,47 +64,54 @@ describe('MRWidgetFailedToMerge', () => { }); }); - describe('template', () => { - let vm; - let el; + describe('while it is refreshing', () => { + it('renders Refresing now', (done) => { + vm.isRefreshing = true; - beforeEach(() => { - vm = createComponent(); - el = vm.$el; + Vue.nextTick(() => { + expect(vm.$el.querySelector('.js-refresh-label').textContent.trim()).toEqual('Refreshing now'); + done(); + }); }); + }); - it('should have correct elements', (done) => { - expect(el.classList.contains('mr-widget-body')).toBeTruthy(); - expect(el.innerText).toContain('Merge error happened.'); - expect(el.innerText).toContain('Refreshing in 10 seconds'); - expect(el.innerText).not.toContain('Merge failed.'); - expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy(); - expect(el.querySelector('button').innerText).toContain('Merge'); - expect(el.querySelector('.js-refresh-button').innerText).toContain('Refresh now'); - expect(el.querySelector('.js-refresh-label')).toEqual(null); - expect(el.innerText).not.toContain('Refreshing now'); - setTimeout(() => { - expect(el.innerText).toContain('Refreshing in 9 seconds'); - done(); - }, 1010); + describe('while it is not regresing', () => { + it('renders warning icon and disabled merge button', () => { + expect(vm.$el.querySelector('.js-ci-status-icon-warning')).not.toBeNull(); + expect(vm.$el.querySelector('.js-disabled-merge-button').getAttribute('disabled')).toEqual('disabled'); + }); + + it('renders given error', () => { + expect(vm.$el.querySelector('.has-error-message').textContent.trim()).toEqual('Merge error happened..'); }); - it('should just generic merge failed message if merge_error is not available', (done) => { - vm.mr.mergeError = null; + it('renders refresh button', () => { + expect(vm.$el.querySelector('.js-refresh-button').textContent.trim()).toEqual('Refresh now'); + }); - Vue.nextTick(() => { - expect(el.innerText).toContain('Merge failed.'); - expect(el.innerText).not.toContain('Merge error happened.'); - done(); - }); + it('renders remaining time', () => { + expect( + vm.$el.querySelector('.has-custom-error').textContent.trim(), + ).toEqual('Refreshing in 10 seconds to show the updated status...'); + }); + }); + + it('should just generic merge failed message if merge_error is not available', (done) => { + vm.mr.mergeError = null; + + Vue.nextTick(() => { + expect(vm.$el.innerText).toContain('Merge failed.'); + expect(vm.$el.innerText).not.toContain('Merge error happened.'); + done(); }); + }); - it('should show refresh label when refresh requested', () => { - vm.refresh(); - Vue.nextTick(() => { - expect(el.innerText).not.toContain('Merge failed. Refreshing'); - expect(el.innerText).toContain('Refreshing now'); - }); + it('should show refresh label when refresh requested', (done) => { + vm.refresh(); + Vue.nextTick(() => { + expect(vm.$el.innerText).not.toContain('Merge failed. Refreshing'); + expect(vm.$el.innerText).toContain('Refreshing now'); + done(); }); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js index 5f4df15bcd6..df56c4e2c5c 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js @@ -1,77 +1,50 @@ import Vue from 'vue'; -import mwpsComponent from '~/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds'; +import mwpsComponent from '~/vue_merge_request_widget/components/states/mr_widget_merge_when_pipeline_succeeds.vue'; import eventHub from '~/vue_merge_request_widget/event_hub'; - -const targetBranchPath = '/foo/bar'; -const targetBranch = 'foo'; -const sha = '1EA2EZ34'; - -const createComponent = () => { - const Component = Vue.extend(mwpsComponent); - const mr = { - shouldRemoveSourceBranch: false, - canRemoveSourceBranch: true, - canCancelAutomaticMerge: true, - mergeUserId: 1, - currentUserId: 1, - setToMWPSBy: {}, - sha, - targetBranchPath, - targetBranch, - }; - - const service = { - cancelAutomaticMerge() {}, - mergeResource: { - save() {}, - }, - }; - - return new Component({ - el: document.createElement('div'), - propsData: { mr, service }, - }); -}; +import mountComponent from '../../../helpers/vue_mount_component_helper'; describe('MRWidgetMergeWhenPipelineSucceeds', () => { - describe('props', () => { - it('should have props', () => { - const { mr, service } = mwpsComponent.props; - - expect(mr.type instanceof Object).toBeTruthy(); - expect(mr.required).toBeTruthy(); - - expect(service.type instanceof Object).toBeTruthy(); - expect(service.required).toBeTruthy(); + let vm; + const targetBranchPath = '/foo/bar'; + const targetBranch = 'foo'; + const sha = '1EA2EZ34'; + + beforeEach(() => { + const Component = Vue.extend(mwpsComponent); + spyOn(eventHub, '$emit'); + + vm = mountComponent(Component, { + mr: { + shouldRemoveSourceBranch: false, + canRemoveSourceBranch: true, + canCancelAutomaticMerge: true, + mergeUserId: 1, + currentUserId: 1, + setToMWPSBy: {}, + sha, + targetBranchPath, + targetBranch, + }, + service: { + cancelAutomaticMerge() {}, + mergeResource: { + save() {}, + }, + }, }); }); - describe('components', () => { - it('should have components added', () => { - expect(mwpsComponent.components['mr-widget-author']).toBeDefined(); - }); - }); - - describe('data', () => { - it('should have default data', () => { - const data = mwpsComponent.data(); - - expect(data.isCancellingAutoMerge).toBeFalsy(); - expect(data.isRemovingSourceBranch).toBeFalsy(); - }); + afterEach(() => { + vm.$destroy(); }); describe('computed', () => { describe('canRemoveSourceBranch', () => { it('should return true when user is able to remove source branch', () => { - const vm = createComponent(); - expect(vm.canRemoveSourceBranch).toBeTruthy(); }); it('should return false when user id is not the same with who set the MWPS', () => { - const vm = createComponent(); - vm.mr.mergeUserId = 2; expect(vm.canRemoveSourceBranch).toBeFalsy(); @@ -83,15 +56,11 @@ describe('MRWidgetMergeWhenPipelineSucceeds', () => { }); it('should return false when shouldRemoveSourceBranch set to false', () => { - const vm = createComponent(); - vm.mr.shouldRemoveSourceBranch = true; expect(vm.canRemoveSourceBranch).toBeFalsy(); }); it('should return false if user is not able to remove the source branch', () => { - const vm = createComponent(); - vm.mr.canRemoveSourceBranch = false; expect(vm.canRemoveSourceBranch).toBeFalsy(); }); @@ -101,11 +70,9 @@ describe('MRWidgetMergeWhenPipelineSucceeds', () => { describe('methods', () => { describe('cancelAutomaticMerge', () => { it('should set flag and call service then tell main component to update the widget with data', (done) => { - const vm = createComponent(); const mrObj = { is_new_mr_data: true, }; - spyOn(eventHub, '$emit'); spyOn(vm.service, 'cancelAutomaticMerge').and.returnValue(new Promise((resolve) => { resolve({ data: mrObj, @@ -123,8 +90,6 @@ describe('MRWidgetMergeWhenPipelineSucceeds', () => { describe('removeSourceBranch', () => { it('should set flag and call service then request main component to update the widget', (done) => { - const vm = createComponent(); - spyOn(eventHub, '$emit'); spyOn(vm.service.mergeResource, 'save').and.returnValue(new Promise((resolve) => { resolve({ data: { @@ -148,31 +113,23 @@ describe('MRWidgetMergeWhenPipelineSucceeds', () => { }); describe('template', () => { - let vm; - let el; - - beforeEach(() => { - vm = createComponent(); - el = vm.$el; - }); - it('should have correct elements', () => { - expect(el.classList.contains('mr-widget-body')).toBeTruthy(); - expect(el.innerText).toContain('to be merged automatically when the pipeline succeeds'); - expect(el.innerText).toContain('The changes will be merged into'); - expect(el.innerText).toContain(targetBranch); - expect(el.innerText).toContain('The source branch will not be removed'); - expect(el.querySelector('.js-cancel-auto-merge').innerText).toContain('Cancel automatic merge'); - expect(el.querySelector('.js-cancel-auto-merge').getAttribute('disabled')).toBeFalsy(); - expect(el.querySelector('.js-remove-source-branch').innerText).toContain('Remove source branch'); - expect(el.querySelector('.js-remove-source-branch').getAttribute('disabled')).toBeFalsy(); + expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy(); + expect(vm.$el.innerText).toContain('to be merged automatically when the pipeline succeeds'); + expect(vm.$el.innerText).toContain('The changes will be merged into'); + expect(vm.$el.innerText).toContain(targetBranch); + expect(vm.$el.innerText).toContain('The source branch will not be removed'); + expect(vm.$el.querySelector('.js-cancel-auto-merge').innerText).toContain('Cancel automatic merge'); + expect(vm.$el.querySelector('.js-cancel-auto-merge').getAttribute('disabled')).toBeFalsy(); + expect(vm.$el.querySelector('.js-remove-source-branch').innerText).toContain('Remove source branch'); + expect(vm.$el.querySelector('.js-remove-source-branch').getAttribute('disabled')).toBeFalsy(); }); it('should disable cancel auto merge button when the action is in progress', (done) => { vm.isCancellingAutoMerge = true; Vue.nextTick(() => { - expect(el.querySelector('.js-cancel-auto-merge').getAttribute('disabled')).toBeTruthy(); + expect(vm.$el.querySelector('.js-cancel-auto-merge').getAttribute('disabled')).toBeTruthy(); done(); }); }); @@ -181,7 +138,7 @@ describe('MRWidgetMergeWhenPipelineSucceeds', () => { vm.mr.shouldRemoveSourceBranch = true; Vue.nextTick(() => { - const normalizedText = el.innerText.replace(/\s+/g, ' '); + const normalizedText = vm.$el.innerText.replace(/\s+/g, ' '); expect(normalizedText).toContain('The source branch will be removed'); expect(normalizedText).not.toContain('The source branch will not be removed'); done(); @@ -192,7 +149,7 @@ describe('MRWidgetMergeWhenPipelineSucceeds', () => { vm.mr.currentUserId = 4; Vue.nextTick(() => { - expect(el.querySelector('.js-remove-source-branch')).toEqual(null); + expect(vm.$el.querySelector('.js-remove-source-branch')).toEqual(null); done(); }); }); @@ -201,7 +158,7 @@ describe('MRWidgetMergeWhenPipelineSucceeds', () => { vm.isRemovingSourceBranch = true; Vue.nextTick(() => { - expect(el.querySelector('.js-remove-source-branch').getAttribute('disabled')).toBeTruthy(); + expect(vm.$el.querySelector('.js-remove-source-branch').getAttribute('disabled')).toBeTruthy(); done(); }); }); diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js index 2dc3b72ea40..43a989393ba 100644 --- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js +++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js @@ -1,108 +1,99 @@ import Vue from 'vue'; -import mergedComponent from '~/vue_merge_request_widget/components/states/mr_widget_merged'; +import mergedComponent from '~/vue_merge_request_widget/components/states/mr_widget_merged.vue'; import eventHub from '~/vue_merge_request_widget/event_hub'; - -const targetBranch = 'foo'; - -const createComponent = () => { - const Component = Vue.extend(mergedComponent); - const mr = { - isRemovingSourceBranch: false, - cherryPickInForkPath: false, - canCherryPickInCurrentMR: true, - revertInForkPath: false, - canRevertInCurrentMR: true, - canRemoveSourceBranch: true, - sourceBranchRemoved: true, - metrics: { - mergedBy: {}, - mergedAt: 'mergedUpdatedAt', - readableMergedAt: '', - closedBy: {}, - closedAt: 'mergedUpdatedAt', - readableClosedAt: '', - }, - updatedAt: 'mrUpdatedAt', - targetBranch, - }; - - const service = { - removeSourceBranch() {}, - }; - - return new Component({ - el: document.createElement('div'), - propsData: { mr, service }, - }); -}; +import mountComponent from '../../../helpers/vue_mount_component_helper'; describe('MRWidgetMerged', () => { - describe('props', () => { - it('should have props', () => { - const { mr, service } = mergedComponent.props; - - expect(mr.type instanceof Object).toBeTruthy(); - expect(mr.required).toBeTruthy(); - - expect(service.type instanceof Object).toBeTruthy(); - expect(service.required).toBeTruthy(); - }); - }); - - describe('components', () => { - it('should have components added', () => { - expect(mergedComponent.components['mr-widget-author-and-time']).toBeDefined(); - }); + let vm; + const targetBranch = 'foo'; + + beforeEach(() => { + const Component = Vue.extend(mergedComponent); + const mr = { + isRemovingSourceBranch: false, + cherryPickInForkPath: false, + canCherryPickInCurrentMR: true, + revertInForkPath: false, + canRevertInCurrentMR: true, + canRemoveSourceBranch: true, + sourceBranchRemoved: true, + metrics: { + mergedBy: { + name: 'Administrator', + username: 'root', + webUrl: 'http://localhost:3000/root', + avatarUrl: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + }, + mergedAt: 'Jan 24, 2018 1:02pm GMT+0000', + readableMergedAt: '', + closedBy: {}, + closedAt: 'Jan 24, 2018 1:02pm GMT+0000', + readableClosedAt: '', + }, + updatedAt: 'mergedUpdatedAt', + targetBranch, + }; + + const service = { + removeSourceBranch() {}, + }; + + spyOn(eventHub, '$emit'); + + vm = mountComponent(Component, { mr, service }); }); - describe('data', () => { - it('should have default data', () => { - const data = mergedComponent.data(); - - expect(data.isMakingRequest).toBeFalsy(); - }); + afterEach(() => { + vm.$destroy(); }); describe('computed', () => { describe('shouldShowRemoveSourceBranch', () => { - it('should correct value when fields changed', () => { - const vm = createComponent(); + it('returns true when sourceBranchRemoved is false', () => { vm.mr.sourceBranchRemoved = false; - expect(vm.shouldShowRemoveSourceBranch).toBeTruthy(); + expect(vm.shouldShowRemoveSourceBranch).toEqual(true); + }); + it('returns false wehn sourceBranchRemoved is true', () => { vm.mr.sourceBranchRemoved = true; - expect(vm.shouldShowRemoveSourceBranch).toBeFalsy(); + expect(vm.shouldShowRemoveSourceBranch).toEqual(false); + }); + it('returns false when canRemoveSourceBranch is false', () => { vm.mr.sourceBranchRemoved = false; vm.mr.canRemoveSourceBranch = false; - expect(vm.shouldShowRemoveSourceBranch).toBeFalsy(); + expect(vm.shouldShowRemoveSourceBranch).toEqual(false); + }); + it('returns false when is making request', () => { vm.mr.canRemoveSourceBranch = true; vm.isMakingRequest = true; - expect(vm.shouldShowRemoveSourceBranch).toBeFalsy(); + expect(vm.shouldShowRemoveSourceBranch).toEqual(false); + }); + it('returns true when all are true', () => { vm.mr.isRemovingSourceBranch = true; vm.mr.canRemoveSourceBranch = true; vm.isMakingRequest = true; - expect(vm.shouldShowRemoveSourceBranch).toBeFalsy(); + expect(vm.shouldShowRemoveSourceBranch).toEqual(false); }); }); + describe('shouldShowSourceBranchRemoving', () => { it('should correct value when fields changed', () => { - const vm = createComponent(); vm.mr.sourceBranchRemoved = false; - expect(vm.shouldShowSourceBranchRemoving).toBeFalsy(); + expect(vm.shouldShowSourceBranchRemoving).toEqual(false); vm.mr.sourceBranchRemoved = true; - expect(vm.shouldShowRemoveSourceBranch).toBeFalsy(); + expect(vm.shouldShowRemoveSourceBranch).toEqual(false); vm.mr.sourceBranchRemoved = false; vm.isMakingRequest = true; - expect(vm.shouldShowSourceBranchRemoving).toBeTruthy(); + expect(vm.shouldShowSourceBranchRemoving).toEqual(true); vm.isMakingRequest = false; vm.mr.isRemovingSourceBranch = true; - expect(vm.shouldShowSourceBranchRemoving).toBeTruthy(); + expect(vm.shouldShowSourceBranchRemoving).toEqual(true); }); }); }); @@ -110,8 +101,6 @@ describe('MRWidgetMerged', () => { describe('methods', () => { describe('removeSourceBranch', () => { it('should set flag and call service then request main component to update the widget', (done) => { - const vm = createComponent(); - spyOn(eventHub, '$emit'); spyOn(vm.service, 'removeSourceBranch').and.returnValue(new Promise((resolve) => { resolve({ data: { @@ -123,7 +112,7 @@ describe('MRWidgetMerged', () => { vm.removeSourceBranch(); setTimeout(() => { const args = eventHub.$emit.calls.argsFor(0); - expect(vm.isMakingRequest).toBeTruthy(); + expect(vm.isMakingRequest).toEqual(true); expect(args[0]).toEqual('MRWidgetUpdateRequested'); expect(args[1]).not.toThrow(); done(); @@ -132,53 +121,50 @@ describe('MRWidgetMerged', () => { }); }); - describe('template', () => { - let vm; - let el; + it('has merged by information', () => { + expect(vm.$el.textContent).toContain('Merged by'); + expect(vm.$el.textContent).toContain('Administrator'); + }); - beforeEach(() => { - vm = createComponent(); - el = vm.$el; - }); + it('renders branch information', () => { + expect(vm.$el.textContent).toContain('The changes were merged into'); + expect(vm.$el.textContent).toContain(targetBranch); + }); - it('should have correct elements', () => { - expect(el.classList.contains('mr-widget-body')).toBeTruthy(); - expect(el.querySelector('.js-mr-widget-author')).toBeDefined(); - expect(el.innerText).toContain('The changes were merged into'); - expect(el.innerText).toContain(targetBranch); - expect(el.innerText).toContain('The source branch has been removed'); - expect(el.innerText).toContain('Revert'); - expect(el.innerText).toContain('Cherry-pick'); - expect(el.innerText).not.toContain('You can remove source branch now'); - expect(el.innerText).not.toContain('The source branch is being removed'); - }); + it('renders information about branch being removed', () => { + expect(vm.$el.textContent).toContain('The source branch has been removed'); + }); - it('should not show source branch removed text', (done) => { - vm.mr.sourceBranchRemoved = false; + it('shows revert and cherry-pick buttons', () => { + expect(vm.$el.textContent).toContain('Revert'); + expect(vm.$el.textContent).toContain('Cherry-pick'); + }); - Vue.nextTick(() => { - expect(el.innerText).toContain('You can remove source branch now'); - expect(el.innerText).not.toContain('The source branch has been removed'); - done(); - }); + it('should not show source branch removed text', (done) => { + vm.mr.sourceBranchRemoved = false; + + Vue.nextTick(() => { + expect(vm.$el.innerText).toContain('You can remove source branch now'); + expect(vm.$el.innerText).not.toContain('The source branch has been removed'); + done(); }); + }); - it('should show source branch removing text', (done) => { - vm.mr.isRemovingSourceBranch = true; - vm.mr.sourceBranchRemoved = false; + it('should show source branch removing text', (done) => { + vm.mr.isRemovingSourceBranch = true; + vm.mr.sourceBranchRemoved = false; - Vue.nextTick(() => { - expect(el.innerText).toContain('The source branch is being removed'); - expect(el.innerText).not.toContain('You can remove source branch now'); - expect(el.innerText).not.toContain('The source branch has been removed'); - done(); - }); + Vue.nextTick(() => { + expect(vm.$el.innerText).toContain('The source branch is being removed'); + expect(vm.$el.innerText).not.toContain('You can remove source branch now'); + expect(vm.$el.innerText).not.toContain('The source branch has been removed'); + done(); }); + }); - it('should use mergedEvent updatedAt as tooltip title', () => { - expect( - el.querySelector('time').getAttribute('title'), - ).toBe('mergedUpdatedAt'); - }); + it('should use mergedEvent mergedAt as tooltip title', () => { + expect( + vm.$el.querySelector('time').getAttribute('title'), + ).toBe('Jan 24, 2018 1:02pm GMT+0000'); }); }); diff --git a/spec/javascripts/vue_shared/components/confirmation_input_spec.js b/spec/javascripts/vue_shared/components/confirmation_input_spec.js new file mode 100644 index 00000000000..a6a12614e77 --- /dev/null +++ b/spec/javascripts/vue_shared/components/confirmation_input_spec.js @@ -0,0 +1,63 @@ +import Vue from 'vue'; +import confirmationInput from '~/vue_shared/components/confirmation_input.vue'; +import mountComponent from '../../helpers/vue_mount_component_helper'; + +describe('Confirmation input component', () => { + const Component = Vue.extend(confirmationInput); + const props = { + inputId: 'dummy-id', + confirmationKey: 'confirmation-key', + confirmationValue: 'confirmation-value', + }; + let vm; + + afterEach(() => { + vm.$destroy(); + }); + + describe('props', () => { + beforeEach(() => { + vm = mountComponent(Component, props); + }); + + it('sets id of the input field to inputId', () => { + expect(vm.$refs.enteredValue.id).toBe(props.inputId); + }); + + it('sets name of the input field to confirmationKey', () => { + expect(vm.$refs.enteredValue.name).toBe(props.confirmationKey); + }); + }); + + describe('computed', () => { + describe('inputLabel', () => { + it('escapes confirmationValue by default', () => { + vm = mountComponent(Component, { ...props, confirmationValue: 'n<e></e>ds escap"ng' }); + expect(vm.inputLabel).toBe('Type <code>n<e></e>ds escap"ng</code> to confirm:'); + }); + + it('does not escape confirmationValue if escapeValue is false', () => { + vm = mountComponent(Component, { ...props, confirmationValue: 'n<e></e>ds escap"ng', shouldEscapeConfirmationValue: false }); + expect(vm.inputLabel).toBe('Type <code>n<e></e>ds escap"ng</code> to confirm:'); + }); + }); + }); + + describe('methods', () => { + describe('hasCorrectValue', () => { + beforeEach(() => { + vm = mountComponent(Component, props); + }); + + it('returns false if entered value is incorrect', () => { + vm.$refs.enteredValue.value = 'incorrect'; + expect(vm.hasCorrectValue()).toBe(false); + }); + + it('returns true if entered value is correct', () => { + vm.$refs.enteredValue.value = props.confirmationValue; + expect(vm.hasCorrectValue()).toBe(true); + }); + }); + }); +}); diff --git a/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb b/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb index 935146c17fc..a41a28a56f1 100644 --- a/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb @@ -53,7 +53,7 @@ describe Banzai::Filter::CommitRangeReferenceFilter do doc = reference_filter("See (#{reference}.)") exp = Regexp.escape(range.reference_link_text) - expect(doc.to_html).to match(/\(<a.+>#{exp}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{exp}</a>\.\)}) end it 'ignores invalid commit IDs' do @@ -222,7 +222,7 @@ describe Banzai::Filter::CommitRangeReferenceFilter do doc = reference_filter("Fixed (#{reference}.)") exp = Regexp.escape(range.reference_link_text(project)) - expect(doc.to_html).to match(/\(<a.+>#{exp}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{exp}</a>\.\)}) end it 'ignores invalid commit IDs on the referenced project' do diff --git a/spec/lib/banzai/filter/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/commit_reference_filter_spec.rb index 080a5f57da9..35f8792ff35 100644 --- a/spec/lib/banzai/filter/commit_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/commit_reference_filter_spec.rb @@ -42,7 +42,7 @@ describe Banzai::Filter::CommitReferenceFilter do it 'links with adjacent text' do doc = reference_filter("See (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{commit.short_id}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{commit.short_id}</a>\.\)}) end it 'ignores invalid commit IDs' do @@ -199,12 +199,12 @@ describe Banzai::Filter::CommitReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Fixed (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{commit.reference_link_text(project)}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{commit.reference_link_text(project)}</a>\.\)}) end it 'ignores invalid commit IDs on the referenced project' do act = "Committed #{invalidate_reference(reference)}" - expect(reference_filter(act).to_html).to match(/<a.+>#{Regexp.escape(invalidate_reference(reference))}<\/a>/) + expect(reference_filter(act).to_html).to match(%r{<a.+>#{Regexp.escape(invalidate_reference(reference))}</a>}) end end end diff --git a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb index a0d391d981c..d9018a7e4fe 100644 --- a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb @@ -49,7 +49,7 @@ describe Banzai::Filter::ExternalIssueReferenceFilter do it 'links with adjacent text' do doc = filter("Issue (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{reference}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{reference}</a>\.\)}) end it 'includes a title attribute' do diff --git a/spec/lib/banzai/filter/image_link_filter_spec.rb b/spec/lib/banzai/filter/image_link_filter_spec.rb index 51920869545..c84b98eb225 100644 --- a/spec/lib/banzai/filter/image_link_filter_spec.rb +++ b/spec/lib/banzai/filter/image_link_filter_spec.rb @@ -14,7 +14,7 @@ describe Banzai::Filter::ImageLinkFilter do it 'does not wrap a duplicate link' do doc = filter(%Q(<a href="/whatever">#{image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg')}</a>)) - expect(doc.to_html).to match /^<a href="\/whatever"><img[^>]*><\/a>$/ + expect(doc.to_html).to match %r{^<a href="/whatever"><img[^>]*></a>$} end it 'works with external images' do @@ -24,6 +24,6 @@ describe Banzai::Filter::ImageLinkFilter do it 'works with inline images' do doc = filter(%Q(<p>test #{image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg')} inline</p>)) - expect(doc.to_html).to match /^<p>test <a[^>]*><img[^>]*><\/a> inline<\/p>$/ + expect(doc.to_html).to match %r{^<p>test <a[^>]*><img[^>]*></a> inline</p>$} end end diff --git a/spec/lib/banzai/filter/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/issue_reference_filter_spec.rb index 3a5f52ea23f..905fbb9434b 100644 --- a/spec/lib/banzai/filter/issue_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/issue_reference_filter_spec.rb @@ -288,7 +288,7 @@ describe Banzai::Filter::IssueReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Fixed (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(issue.to_reference(project))} \(comment 123\)<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(issue.to_reference(project))} \(comment 123\)</a>\.\)}) end it 'includes default classes' do @@ -317,7 +317,7 @@ describe Banzai::Filter::IssueReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Fixed (#{reference_link}.)") - expect(doc.to_html).to match(/\(<a.+>Reference<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>Reference</a>\.\)}) end it 'includes default classes' do @@ -346,7 +346,7 @@ describe Banzai::Filter::IssueReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Fixed (#{reference_link}.)") - expect(doc.to_html).to match(/\(<a.+>Reference<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>Reference</a>\.\)}) end it 'includes default classes' do diff --git a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb index 158844e25ae..eeb82822f68 100644 --- a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb @@ -42,7 +42,7 @@ describe Banzai::Filter::MergeRequestReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Merge (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(reference)}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(reference)}</a>\.\)}) end it 'ignores invalid merge IDs' do @@ -211,7 +211,7 @@ describe Banzai::Filter::MergeRequestReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Merge (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(merge.to_reference(project))} \(diffs, comment 123\)<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(merge.to_reference(project))} \(diffs, comment 123\)</a>\.\)}) end end diff --git a/spec/lib/banzai/filter/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/snippet_reference_filter_spec.rb index 3a07a6dc179..e068e02d4fc 100644 --- a/spec/lib/banzai/filter/snippet_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/snippet_reference_filter_spec.rb @@ -28,7 +28,7 @@ describe Banzai::Filter::SnippetReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Snippet (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(reference)}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(reference)}</a>\.\)}) end it 'ignores invalid snippet IDs' do @@ -192,13 +192,13 @@ describe Banzai::Filter::SnippetReferenceFilter do it 'links with adjacent text' do doc = reference_filter("See (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{Regexp.escape(snippet.to_reference(project))}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(snippet.to_reference(project))}</a>\.\)}) end it 'ignores invalid snippet IDs on the referenced project' do act = "See #{invalidate_reference(reference)}" - expect(reference_filter(act).to_html).to match(/<a.+>#{Regexp.escape(invalidate_reference(reference))}<\/a>/) + expect(reference_filter(act).to_html).to match(%r{<a.+>#{Regexp.escape(invalidate_reference(reference))}</a>}) end end diff --git a/spec/lib/banzai/filter/user_reference_filter_spec.rb b/spec/lib/banzai/filter/user_reference_filter_spec.rb index c76adc262fc..2f86a046d28 100644 --- a/spec/lib/banzai/filter/user_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/user_reference_filter_spec.rb @@ -146,7 +146,7 @@ describe Banzai::Filter::UserReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Mention me (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>#{reference}<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>#{reference}</a>\.\)}) end it 'includes default classes' do @@ -172,7 +172,7 @@ describe Banzai::Filter::UserReferenceFilter do it 'links with adjacent text' do doc = reference_filter("Mention me (#{reference}.)") - expect(doc.to_html).to match(/\(<a.+>User<\/a>\.\)/) + expect(doc.to_html).to match(%r{\(<a.+>User</a>\.\)}) end it 'includes a data-user attribute' do diff --git a/spec/lib/gitaly/server_spec.rb b/spec/lib/gitaly/server_spec.rb new file mode 100644 index 00000000000..ed5d56e91d4 --- /dev/null +++ b/spec/lib/gitaly/server_spec.rb @@ -0,0 +1,30 @@ +require 'spec_helper' + +describe Gitaly::Server do + describe '.all' do + let(:storages) { Gitlab.config.repositories.storages } + + it 'includes all storages' do + expect(storages.count).to eq(described_class.all.count) + expect(storages.keys).to eq(described_class.all.map(&:storage)) + end + end + + subject { described_class.all.first } + + it { is_expected.to respond_to(:server_version) } + it { is_expected.to respond_to(:git_binary_version) } + it { is_expected.to respond_to(:up_to_date?) } + it { is_expected.to respond_to(:address) } + + describe 'request memoization' do + context 'when requesting multiple properties', :request_store do + it 'uses memoization for the info request' do + expect do + subject.server_version + subject.up_to_date? + end.to change { Gitlab::GitalyClient.get_request_count }.by(1) + end + end + end +end diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb index 8bb9ebe0419..370c2490b97 100644 --- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb +++ b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb @@ -23,6 +23,27 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do end end + # E.g. The installation is in use at the time of migration, and someone has + # just uploaded a file + shared_examples 'does not add files in /uploads/tmp' do + let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') } + + before do + FileUtils.mkdir(File.dirname(tmp_file)) + FileUtils.touch(tmp_file) + end + + after do + FileUtils.rm(tmp_file) + end + + it 'does not add files from /uploads/tmp' do + described_class.new.perform + + expect(untracked_files_for_uploads.count).to eq(5) + end + end + it 'ensures the untracked_files_for_uploads table exists' do expect do described_class.new.perform @@ -109,24 +130,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do end end - # E.g. The installation is in use at the time of migration, and someone has - # just uploaded a file context 'when there are files in /uploads/tmp' do - let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') } - - before do - FileUtils.touch(tmp_file) - end - - after do - FileUtils.rm(tmp_file) - end - - it 'does not add files from /uploads/tmp' do - described_class.new.perform - - expect(untracked_files_for_uploads.count).to eq(5) - end + it_behaves_like 'does not add files in /uploads/tmp' end end end @@ -197,24 +202,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do end end - # E.g. The installation is in use at the time of migration, and someone has - # just uploaded a file context 'when there are files in /uploads/tmp' do - let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') } - - before do - FileUtils.touch(tmp_file) - end - - after do - FileUtils.rm(tmp_file) - end - - it 'does not add files from /uploads/tmp' do - described_class.new.perform - - expect(untracked_files_for_uploads.count).to eq(5) - end + it_behaves_like 'does not add files in /uploads/tmp' end end end diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb index 39e3b875c49..326ed2f2ecf 100644 --- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb +++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb @@ -17,7 +17,7 @@ describe Gitlab::Gfm::UploadsRewriter do end let(:text) do - "Text and #{image_uploader.to_markdown} and #{zip_uploader.to_markdown}" + "Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}" end describe '#rewrite' do diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index 3db04d99855..96a442f782f 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -2,6 +2,7 @@ require "spec_helper" describe Gitlab::Git::Repository, seed_helper: true do include Gitlab::EncodingHelper + using RSpec::Parameterized::TableSyntax shared_examples 'wrapping gRPC errors' do |gitaly_client_class, gitaly_client_method| it 'wraps gRPC not found error' do @@ -19,6 +20,7 @@ describe Gitlab::Git::Repository, seed_helper: true do let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') } let(:storage_path) { TestEnv.repos_path } + let(:user) { build(:user) } describe '.create_hooks' do let(:repo_path) { File.join(storage_path, 'hook-test.git') } @@ -248,7 +250,7 @@ describe Gitlab::Git::Repository, seed_helper: true do end shared_examples 'archive check' do |extenstion| - it { expect(metadata['ArchivePath']).to match(/tmp\/gitlab-git-test.git\/gitlab-git-test-master-#{SeedRepo::LastCommit::ID}/) } + it { expect(metadata['ArchivePath']).to match(%r{tmp/gitlab-git-test.git/gitlab-git-test-master-#{SeedRepo::LastCommit::ID}}) } it { expect(metadata['ArchivePath']).to end_with extenstion } end @@ -442,6 +444,7 @@ describe Gitlab::Git::Repository, seed_helper: true do shared_examples 'simple commit counting' do it { expect(repository.commit_count("master")).to eq(25) } it { expect(repository.commit_count("feature")).to eq(9) } + it { expect(repository.commit_count("does-not-exist")).to eq(0) } end context 'when Gitaly commit_count feature is enabled' do @@ -560,35 +563,39 @@ describe Gitlab::Git::Repository, seed_helper: true do end describe '#delete_refs' do - before(:all) do - @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') - end + shared_examples 'deleting refs' do + let(:repo) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') } - it 'deletes the ref' do - @repo.delete_refs('refs/heads/feature') + after do + ensure_seeds + end - expect(@repo.rugged.references['refs/heads/feature']).to be_nil - end + it 'deletes the ref' do + repo.delete_refs('refs/heads/feature') + + expect(repo.rugged.references['refs/heads/feature']).to be_nil + end - it 'deletes all refs' do - refs = %w[refs/heads/wip refs/tags/v1.1.0] - @repo.delete_refs(*refs) + it 'deletes all refs' do + refs = %w[refs/heads/wip refs/tags/v1.1.0] + repo.delete_refs(*refs) - refs.each do |ref| - expect(@repo.rugged.references[ref]).to be_nil + refs.each do |ref| + expect(repo.rugged.references[ref]).to be_nil + end end - end - it 'raises an error if it failed' do - expect(@repo).to receive(:popen).and_return(['Error', 1]) + it 'raises an error if it failed' do + expect { repo.delete_refs('refs\heads\fix') }.to raise_error(Gitlab::Git::Repository::GitError) + end + end - expect do - @repo.delete_refs('refs/heads/fix') - end.to raise_error(Gitlab::Git::Repository::GitError) + context 'when Gitaly delete_refs feature is enabled' do + it_behaves_like 'deleting refs' end - after(:all) do - ensure_seeds + context 'when Gitaly delete_refs feature is disabled', :disable_gitaly do + it_behaves_like 'deleting refs' end end @@ -687,7 +694,6 @@ describe Gitlab::Git::Repository, seed_helper: true do describe '#remote_tags' do let(:remote_name) { 'upstream' } let(:target_commit_id) { SeedRepo::Commit::ID } - let(:user) { create(:user) } let(:tag_name) { 'v0.0.1' } let(:tag_message) { 'My tag' } let(:remote_repository) do @@ -899,44 +905,6 @@ describe Gitlab::Git::Repository, seed_helper: true do end end - context "compare results between log_by_walk and log_by_shell" do - let(:options) { { ref: "master" } } - let(:commits_by_walk) { repository.log(options).map(&:id) } - let(:commits_by_shell) { repository.log(options.merge({ disable_walk: true })).map(&:id) } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - - context "with limit" do - let(:options) { { ref: "master", limit: 1 } } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - end - - context "with offset" do - let(:options) { { ref: "master", offset: 1 } } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - end - - context "with skip_merges" do - let(:options) { { ref: "master", skip_merges: true } } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - end - - context "with path" do - let(:options) { { ref: "master", path: "encoding" } } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - - context "with follow" do - let(:options) { { ref: "master", path: "encoding", follow: true } } - - it { expect(commits_by_walk).to eq(commits_by_shell) } - end - end - end - context "where provides 'after' timestamp" do options = { after: Time.iso8601('2014-03-03T20:15:01+00:00') } @@ -981,6 +949,16 @@ describe Gitlab::Git::Repository, seed_helper: true do end end end + + context 'limit validation' do + where(:limit) do + [0, nil, '', 'foo'] + end + + with_them do + it { expect { repository.log(limit: limit) }.to raise_error(ArgumentError) } + end + end end describe "#rugged_commits_between" do @@ -1022,6 +1000,29 @@ describe Gitlab::Git::Repository, seed_helper: true do it { is_expected.to eq(17) } end + describe '#merge_base' do + shared_examples '#merge_base' do + where(:from, :to, :result) do + '570e7b2abdd848b95f2f578043fc23bd6f6fd24d' | '40f4a7a617393735a95a0bb67b08385bc1e7c66d' | '570e7b2abdd848b95f2f578043fc23bd6f6fd24d' + '40f4a7a617393735a95a0bb67b08385bc1e7c66d' | '570e7b2abdd848b95f2f578043fc23bd6f6fd24d' | '570e7b2abdd848b95f2f578043fc23bd6f6fd24d' + '40f4a7a617393735a95a0bb67b08385bc1e7c66d' | 'foobar' | nil + 'foobar' | '40f4a7a617393735a95a0bb67b08385bc1e7c66d' | nil + end + + with_them do + it { expect(repository.merge_base(from, to)).to eq(result) } + end + end + + context 'with gitaly' do + it_behaves_like '#merge_base' + end + + context 'without gitaly', :skip_gitaly_mock do + it_behaves_like '#merge_base' + end + end + describe '#count_commits' do shared_examples 'extended commit counting' do context 'with after timestamp' do @@ -1710,7 +1711,6 @@ describe Gitlab::Git::Repository, seed_helper: true do shared_examples "user deleting a branch" do let(:project) { create(:project, :repository) } let(:repository) { project.repository.raw } - let(:user) { create(:user) } let(:branch_name) { "to-be-deleted-soon" } before do @@ -1751,12 +1751,49 @@ describe Gitlab::Git::Repository, seed_helper: true do end end + describe '#write_config' do + before do + repository.rugged.config["gitlab.fullpath"] = repository.path + end + + shared_examples 'writing repo config' do + context 'is given a path' do + it 'writes it to disk' do + repository.write_config(full_path: "not-the/real-path.git") + + config = File.read(File.join(repository.path, "config")) + + expect(config).to include("[gitlab]") + expect(config).to include("fullpath = not-the/real-path.git") + end + end + + context 'it is given an empty path' do + it 'does not write it to disk' do + repository.write_config(full_path: "") + + config = File.read(File.join(repository.path, "config")) + + expect(config).to include("[gitlab]") + expect(config).to include("fullpath = #{repository.path}") + end + end + end + + context "when gitaly_write_config is enabled" do + it_behaves_like "writing repo config" + end + + context "when gitaly_write_config is disabled", :disable_gitaly do + it_behaves_like "writing repo config" + end + end + describe '#merge' do let(:repository) do Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') end let(:source_sha) { '913c66a37b4a45b9769037c55c2d238bd0942d2e' } - let(:user) { build(:user) } let(:target_branch) { 'test-merge-target-branch' } before do @@ -1809,7 +1846,6 @@ describe Gitlab::Git::Repository, seed_helper: true do end let(:branch_head) { '6d394385cf567f80a8fd85055db1ab4c5295806f' } let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' } - let(:user) { build(:user) } let(:target_branch) { 'test-ff-target-branch' } before do @@ -2128,6 +2164,47 @@ describe Gitlab::Git::Repository, seed_helper: true do expect { subject }.to raise_error(Gitlab::Git::CommandError, 'error') end end + + describe '#squash' do + let(:squash_id) { '1' } + let(:branch_name) { 'fix' } + let(:start_sha) { '4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6' } + let(:end_sha) { '12d65c8dd2b2676fa3ac47d955accc085a37a9c1' } + + subject do + opts = { + branch: branch_name, + start_sha: start_sha, + end_sha: end_sha, + author: user, + message: 'Squash commit message' + } + + repository.squash(user, squash_id, opts) + end + + context 'sparse checkout' do + let(:expected_files) { %w(files files/js files/js/application.js) } + + before do + allow(repository).to receive(:with_worktree).and_wrap_original do |m, *args| + m.call(*args) do + worktree_path = args[0] + files_pattern = File.join(worktree_path, '**', '*') + expected = expected_files.map do |path| + File.expand_path(path, worktree_path) + end + + expect(Dir[files_pattern]).to eq(expected) + end + end + end + + it 'checkouts only the files in the diff' do + subject + end + end + end end def create_remote_branch(repository, remote_name, branch_name, source_branch_name) diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb index 86f7bcb8e38..001e406a930 100644 --- a/spec/lib/gitlab/git/tree_spec.rb +++ b/spec/lib/gitlab/git/tree_spec.rb @@ -80,22 +80,18 @@ describe Gitlab::Git::Tree, seed_helper: true do end describe '#where' do - context 'with gitaly disabled' do - before do - allow(Gitlab::GitalyClient).to receive(:feature_enabled?).and_return(false) - end - - it 'calls #tree_entries_from_rugged' do - expect(described_class).to receive(:tree_entries_from_rugged) - - described_class.where(repository, SeedRepo::Commit::ID, '/') + shared_examples '#where' do + it 'returns an empty array when called with an invalid ref' do + expect(described_class.where(repository, 'foobar-does-not-exist')).to eq([]) end end - it 'gets the tree entries from GitalyClient' do - expect_any_instance_of(Gitlab::GitalyClient::CommitService).to receive(:tree_entries) + context 'with gitaly' do + it_behaves_like '#where' + end - described_class.where(repository, SeedRepo::Commit::ID, '/') + context 'without gitaly', :skip_gitaly_mock do + it_behaves_like '#where' end end end diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb index 951e146a30a..257e4c50f2d 100644 --- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb @@ -112,7 +112,7 @@ describe Gitlab::GitalyClient::RefService do expect_any_instance_of(Gitaly::RefService::Stub) .to receive(:delete_refs) .with(gitaly_request_with_params(except_with_prefix: prefixes), kind_of(Hash)) - .and_return(double('delete_refs_response')) + .and_return(double('delete_refs_response', git_error: "")) client.delete_refs(except_with_prefixes: prefixes) end diff --git a/spec/lib/gitlab/import_export/project.json b/spec/lib/gitlab/import_export/project.json index 4cf33778d15..b6c1f0c81cb 100644 --- a/spec/lib/gitlab/import_export/project.json +++ b/spec/lib/gitlab/import_export/project.json @@ -7096,7 +7096,7 @@ "project_id": 5, "created_at": "2016-06-14T15:01:51.232Z", "updated_at": "2016-06-14T15:01:51.232Z", - "active": false, + "active": true, "properties": { }, diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb index 9dfd879a1bc..d076007e4bc 100644 --- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb @@ -236,12 +236,14 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do labels = project.issues.first.labels expect(labels.where(type: "ProjectLabel").count).to eq(results.fetch(:first_issue_labels, 0)) + expect(labels.where(type: "ProjectLabel").where.not(group_id: nil).count).to eq(0) end end shared_examples 'restores group correctly' do |**results| it 'has group label' do expect(project.group.labels.size).to eq(results.fetch(:labels, 0)) + expect(project.group.labels.where(type: "GroupLabel").where.not(project_id: nil).count).to eq(0) end it 'has group milestone' do diff --git a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb index 08e5bbbd400..5804c45871e 100644 --- a/spec/lib/gitlab/import_export/project_tree_saver_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_saver_spec.rb @@ -164,6 +164,10 @@ describe Gitlab::ImportExport::ProjectTreeSaver do expect(saved_project_json['services'].first['type']).to eq('CustomIssueTrackerService') end + it 'saves the properties for a service' do + expect(saved_project_json['services'].first['properties']).to eq('one' => 'value') + end + it 'has project feature' do project_feature = saved_project_json['project_feature'] expect(project_feature).not_to be_empty @@ -279,7 +283,7 @@ describe Gitlab::ImportExport::ProjectTreeSaver do commit_id: ci_build.pipeline.sha) create(:event, :created, target: milestone, project: project, author: user) - create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker') + create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' }) create(:project_custom_attribute, project: project) create(:project_custom_attribute, project: project) diff --git a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb index 63992ea8ab8..a685521cbf0 100644 --- a/spec/lib/gitlab/import_export/uploads_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/uploads_restorer_spec.rb @@ -4,7 +4,6 @@ describe Gitlab::ImportExport::UploadsRestorer do describe 'bundle a project Git repo' do let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" } let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) } - let(:uploads_path) { FileUploader.dynamic_path_segment(project) } before do allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path) @@ -26,9 +25,9 @@ describe Gitlab::ImportExport::UploadsRestorer do end it 'copies the uploads to the project path' do - restorer.restore + subject.restore - uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) } + uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) } expect(uploads).to include('dummy.txt') end @@ -44,9 +43,9 @@ describe Gitlab::ImportExport::UploadsRestorer do end it 'copies the uploads to the project path' do - restorer.restore + subject.restore - uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) } + uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) } expect(uploads).to include('dummy.txt') end diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb index e8948de1f3a..959779523f4 100644 --- a/spec/lib/gitlab/import_export/uploads_saver_spec.rb +++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb @@ -30,7 +30,7 @@ describe Gitlab::ImportExport::UploadsSaver do it 'copies the uploads to the export path' do saver.save - uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) } + uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) } expect(uploads).to include('banana_sample.gif') end @@ -52,7 +52,7 @@ describe Gitlab::ImportExport::UploadsSaver do it 'copies the uploads to the export path' do saver.save - uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) } + uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) } expect(uploads).to include('banana_sample.gif') end diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb index 41a9d1d9c90..d9379cfe674 100644 --- a/spec/lib/gitlab/metrics/method_call_spec.rb +++ b/spec/lib/gitlab/metrics/method_call_spec.rb @@ -5,6 +5,10 @@ describe Gitlab::Metrics::MethodCall do let(:method_call) { described_class.new('Foo#bar', :Foo, '#bar', transaction) } describe '#measure' do + after do + described_class.reload_metric!(:gitlab_method_call_duration_seconds) + end + it 'measures the performance of the supplied block' do method_call.measure { 'foo' } @@ -20,8 +24,6 @@ describe Gitlab::Metrics::MethodCall do context 'prometheus instrumentation is enabled' do before do - allow(Feature.get(:prometheus_metrics_method_instrumentation)).to receive(:enabled?).and_call_original - described_class.measurement_enabled_cache_expires_at.value = Time.now.to_i - 1 Feature.get(:prometheus_metrics_method_instrumentation).enable end @@ -31,30 +33,12 @@ describe Gitlab::Metrics::MethodCall do end end - it 'caches subsequent invocations of feature check' do - 10.times do - method_call.measure { 'foo' } - end - - expect(Feature.get(:prometheus_metrics_method_instrumentation)).to have_received(:enabled?).once - end - - it 'expires feature check cache after 1 minute' do - method_call.measure { 'foo' } - - Timecop.travel(1.minute.from_now) do - method_call.measure { 'foo' } - end - - Timecop.travel(1.minute.from_now + 1.second) do - method_call.measure { 'foo' } - end - - expect(Feature.get(:prometheus_metrics_method_instrumentation)).to have_received(:enabled?).twice + it 'metric is not a NullMetric' do + expect(described_class).not_to be_instance_of(Gitlab::Metrics::NullMetric) end it 'observes the performance of the supplied block' do - expect(described_class.call_duration_histogram) + expect(described_class.gitlab_method_call_duration_seconds) .to receive(:observe) .with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric)) @@ -64,14 +48,12 @@ describe Gitlab::Metrics::MethodCall do context 'prometheus instrumentation is disabled' do before do - described_class.measurement_enabled_cache_expires_at.value = Time.now.to_i - 1 - Feature.get(:prometheus_metrics_method_instrumentation).disable end - it 'does not observe the performance' do - expect(described_class.call_duration_histogram) - .not_to receive(:observe) + it 'observes using NullMetric' do + expect(described_class.gitlab_method_call_duration_seconds).to be_instance_of(Gitlab::Metrics::NullMetric) + expect(described_class.gitlab_method_call_duration_seconds).to receive(:observe) method_call.measure { 'foo' } end @@ -81,12 +63,10 @@ describe Gitlab::Metrics::MethodCall do context 'when measurement is below threshold' do before do allow(method_call).to receive(:above_threshold?).and_return(false) - - Feature.get(:prometheus_metrics_method_instrumentation).enable end it 'does not observe the performance' do - expect(described_class.call_duration_histogram) + expect(described_class.gitlab_method_call_duration_seconds) .not_to receive(:observe) method_call.measure { 'foo' } @@ -96,7 +76,7 @@ describe Gitlab::Metrics::MethodCall do describe '#to_metric' do it 'returns a Metric instance' do - expect(method_call).to receive(:real_time).and_return(4.0001) + expect(method_call).to receive(:real_time).and_return(4.0001).twice expect(method_call).to receive(:cpu_time).and_return(3.0001) method_call.measure { 'foo' } diff --git a/spec/lib/gitlab/metrics/methods_spec.rb b/spec/lib/gitlab/metrics/methods_spec.rb new file mode 100644 index 00000000000..9d41ed2442b --- /dev/null +++ b/spec/lib/gitlab/metrics/methods_spec.rb @@ -0,0 +1,137 @@ +require 'spec_helper' + +describe Gitlab::Metrics::Methods do + subject { Class.new { include Gitlab::Metrics::Methods } } + + shared_context 'metric' do |metric_type, *args| + let(:docstring) { 'description' } + let(:metric_name) { :sample_metric } + + describe "#define_#{metric_type}" do + define_method(:call_define_metric_method) do |**args| + subject.__send__("define_#{metric_type}", metric_name, **args) + end + + context 'metrics access method not defined' do + it "defines metrics accessing method" do + expect(subject).not_to respond_to(metric_name) + + call_define_metric_method(docstring: docstring) + + expect(subject).to respond_to(metric_name) + end + end + + context 'metrics access method defined' do + before do + call_define_metric_method(docstring: docstring) + end + + it 'raises error when trying to redefine method' do + expect { call_define_metric_method(docstring: docstring) }.to raise_error(ArgumentError) + end + + context 'metric is not cached' do + it 'calls fetch_metric' do + expect(subject).to receive(:init_metric).with(metric_type, metric_name, docstring: docstring) + + subject.public_send(metric_name) + end + end + + context 'metric is cached' do + before do + subject.public_send(metric_name) + end + + it 'returns cached metric' do + expect(subject).not_to receive(:init_metric) + + subject.public_send(metric_name) + end + end + end + end + + describe "#fetch_#{metric_type}" do + let(:null_metric) { Gitlab::Metrics::NullMetric.instance } + + define_method(:call_fetch_metric_method) do |**args| + subject.__send__("fetch_#{metric_type}", metric_name, **args) + end + + context "when #{metric_type} is not cached" do + it 'initializes counter metric' do + allow(Gitlab::Metrics).to receive(metric_type).and_return(null_metric) + + call_fetch_metric_method(docstring: docstring) + + expect(Gitlab::Metrics).to have_received(metric_type).with(metric_name, docstring, *args) + end + end + + context "when #{metric_type} is cached" do + before do + call_fetch_metric_method(docstring: docstring) + end + + it 'uses class metric cache' do + expect(Gitlab::Metrics).not_to receive(metric_type) + + call_fetch_metric_method(docstring: docstring) + end + + context 'when metric is reloaded' do + before do + subject.reload_metric!(metric_name) + end + + it "initializes #{metric_type} metric" do + allow(Gitlab::Metrics).to receive(metric_type).and_return(null_metric) + + call_fetch_metric_method(docstring: docstring) + + expect(Gitlab::Metrics).to have_received(metric_type).with(metric_name, docstring, *args) + end + end + end + + context 'when metric is configured with feature' do + let(:feature_name) { :some_metric_feature } + let(:metric) { call_fetch_metric_method(docstring: docstring, with_feature: feature_name) } + + context 'when feature is enabled' do + before do + Feature.get(feature_name).enable + end + + it "initializes #{metric_type} metric" do + allow(Gitlab::Metrics).to receive(metric_type).and_return(null_metric) + + metric + + expect(Gitlab::Metrics).to have_received(metric_type).with(metric_name, docstring, *args) + end + end + + context 'when feature is disabled' do + before do + Feature.get(feature_name).disable + end + + it "returns NullMetric" do + allow(Gitlab::Metrics).to receive(metric_type) + + expect(metric).to be_instance_of(Gitlab::Metrics::NullMetric) + + expect(Gitlab::Metrics).not_to have_received(metric_type) + end + end + end + end + end + + include_examples 'metric', :counter, {} + include_examples 'metric', :gauge, {}, :all + include_examples 'metric', :histogram, {}, [0.005, 0.01, 0.1, 1, 10] +end diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb index 375cbf8a9ca..54781dd52fc 100644 --- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb +++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb @@ -2,6 +2,11 @@ require 'spec_helper' describe Gitlab::Metrics::Samplers::RubySampler do let(:sampler) { described_class.new(5) } + let(:null_metric) { double('null_metric', set: nil, observe: nil) } + + before do + allow(Gitlab::Metrics::NullMetric).to receive(:instance).and_return(null_metric) + end after do Allocations.stop if Gitlab::Metrics.mri? @@ -17,12 +22,9 @@ describe Gitlab::Metrics::Samplers::RubySampler do end it 'adds a metric containing the memory usage' do - expect(Gitlab::Metrics::System).to receive(:memory_usage) - .and_return(9000) + expect(Gitlab::Metrics::System).to receive(:memory_usage).and_return(9000) - expect(sampler.metrics[:memory_usage]).to receive(:set) - .with({}, 9000) - .and_call_original + expect(sampler.metrics[:memory_usage]).to receive(:set).with({}, 9000) sampler.sample end @@ -31,9 +33,7 @@ describe Gitlab::Metrics::Samplers::RubySampler do expect(Gitlab::Metrics::System).to receive(:file_descriptor_count) .and_return(4) - expect(sampler.metrics[:file_descriptors]).to receive(:set) - .with({}, 4) - .and_call_original + expect(sampler.metrics[:file_descriptors]).to receive(:set).with({}, 4) sampler.sample end @@ -49,16 +49,14 @@ describe Gitlab::Metrics::Samplers::RubySampler do it 'adds a metric containing garbage collection time statistics' do expect(GC::Profiler).to receive(:total_time).and_return(0.24) - expect(sampler.metrics[:total_time]).to receive(:set) - .with({}, 240) - .and_call_original + expect(sampler.metrics[:total_time]).to receive(:set).with({}, 240) sampler.sample end it 'adds a metric containing garbage collection statistics' do GC.stat.keys.each do |key| - expect(sampler.metrics[key]).to receive(:set).with({}, anything).and_call_original + expect(sampler.metrics[key]).to receive(:set).with({}, anything) end sampler.sample diff --git a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb index eca75a4fac1..9f3af1acef7 100644 --- a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb @@ -32,7 +32,7 @@ describe Gitlab::Metrics::Subscribers::ActionView do end it 'observes view rendering time' do - expect(subscriber.send(:metric_view_rendering_duration_seconds)) + expect(described_class.gitlab_view_rendering_duration_seconds) .to receive(:observe) .with({ view: 'app/views/x.html.haml' }, 2.1) diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb index 9b3698fb4a8..4e7bd433a9c 100644 --- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb @@ -25,7 +25,7 @@ describe Gitlab::Metrics::Subscribers::ActiveRecord do expect(subscriber).to receive(:current_transaction) .at_least(:once) .and_return(transaction) - expect(subscriber.send(:metric_sql_duration_seconds)).to receive(:observe).with({}, 0.002) + expect(described_class.send(:gitlab_sql_duration_seconds)).to receive(:observe).with({}, 0.002) subscriber.sql(event) end diff --git a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb index 58e28592cf9..6795c1ab56b 100644 --- a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb @@ -144,7 +144,10 @@ describe Gitlab::Metrics::Subscribers::RailsCache do end context 'with a transaction' do + let(:metric_cache_misses_total) { double('metric_cache_misses_total', increment: nil) } + before do + allow(subscriber).to receive(:metric_cache_misses_total).and_return(metric_cache_misses_total) allow(subscriber).to receive(:current_transaction) .and_return(transaction) end @@ -157,9 +160,9 @@ describe Gitlab::Metrics::Subscribers::RailsCache do end it 'increments the cache_read_miss total' do - expect(subscriber.send(:metric_cache_misses_total)).to receive(:increment).with({}) - subscriber.cache_generate(event) + + expect(metric_cache_misses_total).to have_received(:increment).with({}) end end end diff --git a/spec/lib/gitlab/metrics_spec.rb b/spec/lib/gitlab/metrics_spec.rb index 1619fbd88b1..9e405e9f736 100644 --- a/spec/lib/gitlab/metrics_spec.rb +++ b/spec/lib/gitlab/metrics_spec.rb @@ -20,7 +20,7 @@ describe Gitlab::Metrics do context 'prometheus metrics enabled in config' do before do - allow(described_class).to receive(:current_application_settings).and_return(prometheus_metrics_enabled: true) + allow(Gitlab::CurrentSettings).to receive(:current_application_settings).and_return(prometheus_metrics_enabled: true) end context 'when metrics folder is present' do diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/o_auth/user_spec.rb index 45fff4c5787..03e0a9e2a03 100644 --- a/spec/lib/gitlab/o_auth/user_spec.rb +++ b/spec/lib/gitlab/o_auth/user_spec.rb @@ -44,6 +44,18 @@ describe Gitlab::OAuth::User do let(:provider) { 'twitter' } + describe 'when account exists on server' do + it 'does not mark the user as external' do + create(:omniauth_user, extern_uid: 'my-uid', provider: provider) + stub_omniauth_config(allow_single_sign_on: [provider], external_providers: [provider]) + + oauth_user.save + + expect(gl_user).to be_valid + expect(gl_user.external).to be_falsey + end + end + describe 'signup' do context 'when signup is disabled' do before do @@ -51,7 +63,7 @@ describe Gitlab::OAuth::User do end it 'creates the user' do - stub_omniauth_config(allow_single_sign_on: ['twitter']) + stub_omniauth_config(allow_single_sign_on: [provider]) oauth_user.save @@ -65,7 +77,7 @@ describe Gitlab::OAuth::User do end it 'creates and confirms the user anyway' do - stub_omniauth_config(allow_single_sign_on: ['twitter']) + stub_omniauth_config(allow_single_sign_on: [provider]) oauth_user.save @@ -75,7 +87,7 @@ describe Gitlab::OAuth::User do end it 'marks user as having password_automatically_set' do - stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['twitter']) + stub_omniauth_config(allow_single_sign_on: [provider], external_providers: [provider]) oauth_user.save @@ -86,7 +98,7 @@ describe Gitlab::OAuth::User do shared_examples 'to verify compliance with allow_single_sign_on' do context 'provider is marked as external' do it 'marks user as external' do - stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['twitter']) + stub_omniauth_config(allow_single_sign_on: [provider], external_providers: [provider]) oauth_user.save expect(gl_user).to be_valid expect(gl_user.external).to be_truthy @@ -95,8 +107,8 @@ describe Gitlab::OAuth::User do context 'provider was external, now has been removed' do it 'does not mark external user as internal' do - create(:omniauth_user, extern_uid: 'my-uid', provider: 'twitter', external: true) - stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['facebook']) + create(:omniauth_user, extern_uid: 'my-uid', provider: provider, external: true) + stub_omniauth_config(allow_single_sign_on: [provider], external_providers: ['facebook']) oauth_user.save expect(gl_user).to be_valid expect(gl_user.external).to be_truthy @@ -118,7 +130,7 @@ describe Gitlab::OAuth::User do context 'with new allow_single_sign_on enabled syntax' do before do - stub_omniauth_config(allow_single_sign_on: ['twitter']) + stub_omniauth_config(allow_single_sign_on: [provider]) end it "creates a user from Omniauth" do @@ -127,7 +139,7 @@ describe Gitlab::OAuth::User do expect(gl_user).to be_valid identity = gl_user.identities.first expect(identity.extern_uid).to eql uid - expect(identity.provider).to eql 'twitter' + expect(identity.provider).to eql provider end end @@ -142,7 +154,7 @@ describe Gitlab::OAuth::User do expect(gl_user).to be_valid identity = gl_user.identities.first expect(identity.extern_uid).to eql uid - expect(identity.provider).to eql 'twitter' + expect(identity.provider).to eql provider end end diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb index 0ae90069b7f..85991c38363 100644 --- a/spec/lib/gitlab/path_regex_spec.rb +++ b/spec/lib/gitlab/path_regex_spec.rb @@ -121,7 +121,7 @@ describe Gitlab::PathRegex do STARTING_WITH_NAMESPACE = %r{^/\*namespace_id/:(project_)?id} NON_PARAM_PARTS = %r{[^:*][a-z\-_/]*} ANY_OTHER_PATH_PART = %r{[a-z\-_/:]*} - WILDCARD_SEGMENT = %r{\*} + WILDCARD_SEGMENT = /\*/ let(:namespaced_wildcard_routes) do routes_without_format.select do |p| p =~ %r{#{STARTING_WITH_NAMESPACE}/#{NON_PARAM_PARTS}/#{ANY_OTHER_PATH_PART}#{WILDCARD_SEGMENT}} diff --git a/spec/lib/gitlab/popen/runner_spec.rb b/spec/lib/gitlab/popen/runner_spec.rb new file mode 100644 index 00000000000..2e2cb4ca28f --- /dev/null +++ b/spec/lib/gitlab/popen/runner_spec.rb @@ -0,0 +1,139 @@ +require 'spec_helper' + +describe Gitlab::Popen::Runner do + subject { described_class.new } + + describe '#run' do + it 'runs the command and returns the result' do + run_command + + expect(Gitlab::Popen).to have_received(:popen_with_detail) + end + end + + describe '#all_success_and_clean?' do + it 'returns true when exit status is 0 and stderr is empty' do + run_command + + expect(subject).to be_all_success_and_clean + end + + it 'returns false when exit status is not 0' do + run_command(exitstatus: 1) + + expect(subject).not_to be_all_success_and_clean + end + + it 'returns false when exit stderr has something' do + run_command(stderr: 'stderr') + + expect(subject).not_to be_all_success_and_clean + end + end + + describe '#all_success?' do + it 'returns true when exit status is 0' do + run_command + + expect(subject).to be_all_success + end + + it 'returns false when exit status is not 0' do + run_command(exitstatus: 1) + + expect(subject).not_to be_all_success + end + + it 'returns true' do + run_command(stderr: 'stderr') + + expect(subject).to be_all_success + end + end + + describe '#all_stderr_empty?' do + it 'returns true when stderr is empty' do + run_command + + expect(subject).to be_all_stderr_empty + end + + it 'returns true when exit status is not 0' do + run_command(exitstatus: 1) + + expect(subject).to be_all_stderr_empty + end + + it 'returns false when exit stderr has something' do + run_command(stderr: 'stderr') + + expect(subject).not_to be_all_stderr_empty + end + end + + describe '#failed_results' do + it 'returns [] when everything is passed' do + run_command + + expect(subject.failed_results).to be_empty + end + + it 'returns the result when exit status is not 0' do + result = run_command(exitstatus: 1) + + expect(subject.failed_results).to contain_exactly(result) + end + + it 'returns [] when exit stderr has something' do + run_command(stderr: 'stderr') + + expect(subject.failed_results).to be_empty + end + end + + describe '#warned_results' do + it 'returns [] when everything is passed' do + run_command + + expect(subject.warned_results).to be_empty + end + + it 'returns [] when exit status is not 0' do + run_command(exitstatus: 1) + + expect(subject.warned_results).to be_empty + end + + it 'returns the result when exit stderr has something' do + result = run_command(stderr: 'stderr') + + expect(subject.warned_results).to contain_exactly(result) + end + end + + def run_command( + command: 'command', + stdout: 'stdout', + stderr: '', + exitstatus: 0, + status: double(exitstatus: exitstatus, success?: exitstatus.zero?), + duration: 0.1) + + result = + Gitlab::Popen::Result.new(command, stdout, stderr, status, duration) + + allow(Gitlab::Popen) + .to receive(:popen_with_detail) + .and_return(result) + + subject.run([command]) do |cmd, &run| + expect(cmd).to eq(command) + + cmd_result = run.call + + expect(cmd_result).to eq(result) + end + + subject.results.first + end +end diff --git a/spec/lib/gitlab/popen_spec.rb b/spec/lib/gitlab/popen_spec.rb index b145ca36f26..1dbead16d5b 100644 --- a/spec/lib/gitlab/popen_spec.rb +++ b/spec/lib/gitlab/popen_spec.rb @@ -1,11 +1,23 @@ require 'spec_helper' -describe 'Gitlab::Popen' do +describe Gitlab::Popen do let(:path) { Rails.root.join('tmp').to_s } before do @klass = Class.new(Object) - @klass.send(:include, Gitlab::Popen) + @klass.send(:include, described_class) + end + + describe '.popen_with_detail' do + subject { @klass.new.popen_with_detail(cmd) } + + let(:cmd) { %W[#{Gem.ruby} -e $stdout.puts(1);$stderr.puts(2);exit(3)] } + + it { expect(subject.cmd).to eq(cmd) } + it { expect(subject.stdout).to eq("1\n") } + it { expect(subject.stderr).to eq("2\n") } + it { expect(subject.status.exitstatus).to eq(3) } + it { expect(subject.duration).to be_kind_of(Numeric) } end context 'zero status' do diff --git a/spec/lib/gitlab/slash_commands/issue_search_spec.rb b/spec/lib/gitlab/slash_commands/issue_search_spec.rb index e41e5254dde..35d01efc1bd 100644 --- a/spec/lib/gitlab/slash_commands/issue_search_spec.rb +++ b/spec/lib/gitlab/slash_commands/issue_search_spec.rb @@ -5,7 +5,7 @@ describe Gitlab::SlashCommands::IssueSearch do let!(:issue) { create(:issue, project: project, title: 'find me') } let!(:confidential) { create(:issue, :confidential, project: project, title: 'mepmep find') } let(:project) { create(:project) } - let(:user) { issue.author } + let(:user) { create(:user) } let(:regex_match) { described_class.match("issue search find") } subject do diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb index 7a8e798e3c9..59eda025108 100644 --- a/spec/mailers/notify_spec.rb +++ b/spec/mailers/notify_spec.rb @@ -1357,7 +1357,7 @@ describe Notify do matcher :have_part_with do |expected| match do |actual| - actual.body.parts.any? { |part| part.content_type.try(:match, %r(#{expected})) } + actual.body.parts.any? { |part| part.content_type.try(:match, /#{expected}/) } end end end diff --git a/spec/migrations/remove_project_labels_group_id_spec.rb b/spec/migrations/remove_project_labels_group_id_spec.rb new file mode 100644 index 00000000000..d80d61af20b --- /dev/null +++ b/spec/migrations/remove_project_labels_group_id_spec.rb @@ -0,0 +1,21 @@ +# encoding: utf-8 + +require 'spec_helper' +require Rails.root.join('db', 'post_migrate', '20180202111106_remove_project_labels_group_id.rb') + +describe RemoveProjectLabelsGroupId, :delete do + let(:migration) { described_class.new } + let(:group) { create(:group) } + let!(:project_label) { create(:label, group_id: group.id) } + let!(:group_label) { create(:group_label) } + + describe '#up' do + it 'updates the project labels group ID' do + expect { migration.up }.to change { project_label.reload.group_id }.to(nil) + end + + it 'keeps the group labels group ID' do + expect { migration.up }.not_to change { group_label.reload.group_id } + end + end +end diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index 45a606c1ea8..f5b3b4a9fc5 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -277,7 +277,7 @@ describe Ci::Build do allow_any_instance_of(Project).to receive(:jobs_cache_index).and_return(1) end - it { is_expected.to be_an(Array).and all(include(key: "key:1")) } + it { is_expected.to be_an(Array).and all(include(key: "key_1")) } end context 'when project does not have jobs_cache_index' do diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb index f8a98b43e46..959383ff0b7 100644 --- a/spec/models/commit_spec.rb +++ b/spec/models/commit_spec.rb @@ -228,7 +228,7 @@ eos it { expect(data).to be_a(Hash) } it { expect(data[:message]).to include('adds bar folder and branch-test text file to check Repository merged_to_root_ref method') } it { expect(data[:timestamp]).to eq('2016-09-27T14:37:46Z') } - it { expect(data[:added]).to eq(["bar/branch-test.txt"]) } + it { expect(data[:added]).to contain_exactly("bar/branch-test.txt") } it { expect(data[:modified]).to eq([]) } it { expect(data[:removed]).to eq([]) } end @@ -532,8 +532,8 @@ eos let(:commit2) { merge_request1.merge_request_diff.commits.first } it 'returns merge_requests that introduced that commit' do - expect(commit1.merge_requests).to eq([merge_request1, merge_request2]) - expect(commit2.merge_requests).to eq([merge_request1]) + expect(commit1.merge_requests).to contain_exactly(merge_request1, merge_request2) + expect(commit2.merge_requests).to contain_exactly(merge_request1) end end end diff --git a/spec/models/concerns/discussion_on_diff_spec.rb b/spec/models/concerns/discussion_on_diff_spec.rb index 2322eb206fb..30572ce9332 100644 --- a/spec/models/concerns/discussion_on_diff_spec.rb +++ b/spec/models/concerns/discussion_on_diff_spec.rb @@ -20,6 +20,16 @@ describe DiscussionOnDiff do expect(truncated_lines).not_to include(be_meta) end end + + context "when the diff line does not exist on a legacy diff note" do + it "returns an empty array" do + legacy_note = LegacyDiffNote.new + + allow(subject).to receive(:first_note).and_return(legacy_note) + + expect(truncated_lines).to eq([]) + end + end end describe '#line_code_in_diffs' do diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index 429b6615131..243eeddc7a8 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -1064,16 +1064,6 @@ describe MergeRequest do end describe '#can_be_reverted?' do - context 'when there is no merged_at for the MR' do - before do - subject.metrics.update!(merged_at: nil) - end - - it 'returns false' do - expect(subject.can_be_reverted?(nil)).to be_falsey - end - end - context 'when there is no merge_commit for the MR' do before do subject.metrics.update!(merged_at: Time.now.utc) @@ -1097,6 +1087,16 @@ describe MergeRequest do end end + context 'when there is no merged_at for the MR' do + before do + subject.metrics.update!(merged_at: nil) + end + + it 'returns true' do + expect(subject.can_be_reverted?(nil)).to be_truthy + end + end + context 'when there is a revert commit' do let(:current_user) { subject.author } let(:branch) { subject.target_branch } @@ -1127,6 +1127,16 @@ describe MergeRequest do end end + context 'when there is no merged_at for the MR' do + before do + subject.metrics.update!(merged_at: nil) + end + + it 'returns false' do + expect(subject.can_be_reverted?(current_user)).to be_falsey + end + end + context 'when the revert commit is mentioned in a note just before the MR was merged' do before do subject.notes.last.update!(created_at: subject.metrics.merged_at - 30.seconds) @@ -1329,6 +1339,10 @@ describe MergeRequest do it 'returns false' do expect(subject.mergeable_state?).to be_falsey end + + it 'returns true when skipping discussions check' do + expect(subject.mergeable_state?(skip_discussions_check: true)).to be(true) + end end end end @@ -1529,7 +1543,7 @@ describe MergeRequest do expect { subject.reload_diff }.to change { subject.merge_request_diffs.count }.by(1) end - it "executs diff cache service" do + it "executes diff cache service" do expect_any_instance_of(MergeRequests::MergeRequestDiffCacheService).to receive(:execute).with(subject) subject.reload_diff diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index c3673a0e2a3..6b7dbad128c 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -204,7 +204,7 @@ describe Namespace do let(:parent) { create(:group, name: 'parent', path: 'parent') } let(:child) { create(:group, name: 'child', path: 'child', parent: parent) } let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) } - let(:uploads_dir) { File.join(CarrierWave.root, FileUploader.base_dir) } + let(:uploads_dir) { FileUploader.root } let(:pages_dir) { File.join(TestEnv.pages_path) } before do diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb index c9b3c6cf602..748c366efca 100644 --- a/spec/models/project_services/jira_service_spec.rb +++ b/spec/models/project_services/jira_service_spec.rb @@ -3,6 +3,29 @@ require 'spec_helper' describe JiraService do include Gitlab::Routing + describe '#options' do + let(:service) do + described_class.new( + project: build_stubbed(:project), + active: true, + username: 'username', + password: 'test', + jira_issue_transition_id: 24, + url: 'http://jira.test.com/path/' + ) + end + + it 'sets the URL properly' do + # jira-ruby gem parses the URI and handles trailing slashes + # fine: https://github.com/sumoheavy/jira-ruby/blob/v1.4.1/lib/jira/http_client.rb#L59 + expect(service.options[:site]).to eq('http://jira.test.com/') + end + + it 'leaves out trailing slashes in context' do + expect(service.options[:context_path]).to eq('/path') + end + end + describe "Associations" do it { is_expected.to belong_to :project } it { is_expected.to have_one :service_hook } @@ -182,7 +205,7 @@ describe JiraService do @jira_service.close_issue(merge_request, ExternalIssue.new("JIRA-123", project)) expect(WebMock).to have_requested(:post, @comment_url).with( - body: /#{custom_base_url}\/#{project.full_path}\/commit\/#{merge_request.diff_head_sha}/ + body: %r{#{custom_base_url}/#{project.full_path}/commit/#{merge_request.diff_head_sha}} ).once end @@ -197,7 +220,7 @@ describe JiraService do @jira_service.close_issue(merge_request, ExternalIssue.new("JIRA-123", project)) expect(WebMock).to have_requested(:post, @comment_url).with( - body: /#{Gitlab.config.gitlab.url}\/#{project.full_path}\/commit\/#{merge_request.diff_head_sha}/ + body: %r{#{Gitlab.config.gitlab.url}/#{project.full_path}/commit/#{merge_request.diff_head_sha}} ).once end diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index 7c61c6b7299..1102b1c9006 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -222,20 +222,20 @@ describe Repository do it 'sets follow when path is a single path' do expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: true)).and_call_original.twice - repository.commits('master', path: 'README.md') - repository.commits('master', path: ['README.md']) + repository.commits('master', limit: 1, path: 'README.md') + repository.commits('master', limit: 1, path: ['README.md']) end it 'does not set follow when path is multiple paths' do expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original - repository.commits('master', path: ['README.md', 'CHANGELOG']) + repository.commits('master', limit: 1, path: ['README.md', 'CHANGELOG']) end it 'does not set follow when there are no paths' do expect(Gitlab::Git::Commit).to receive(:where).with(a_hash_including(follow: false)).and_call_original - repository.commits('master') + repository.commits('master', limit: 1) end end @@ -455,7 +455,7 @@ describe Repository do expect do repository.create_dir(user, 'newdir', message: 'Create newdir', branch_name: 'master') - end.to change { repository.commits('master').count }.by(1) + end.to change { repository.count_commits(ref: 'master') }.by(1) newdir = repository.tree('master', 'newdir') expect(newdir.path).to eq('newdir') @@ -469,7 +469,7 @@ describe Repository do repository.create_dir(user, 'newdir', message: 'Create newdir', branch_name: 'patch', start_branch_name: 'master', start_project: forked_project) - end.to change { repository.commits('master').count }.by(0) + end.to change { repository.count_commits(ref: 'master') }.by(0) expect(repository.branch_exists?('patch')).to be_truthy expect(forked_project.repository.branch_exists?('patch')).to be_falsy @@ -486,7 +486,7 @@ describe Repository do message: 'Add newdir', branch_name: 'master', author_email: author_email, author_name: author_name) - end.to change { repository.commits('master').count }.by(1) + end.to change { repository.count_commits(ref: 'master') }.by(1) last_commit = repository.commit @@ -502,7 +502,7 @@ describe Repository do repository.create_file(user, 'NEWCHANGELOG', 'Changelog!', message: 'Create changelog', branch_name: 'master') - end.to change { repository.commits('master').count }.by(1) + end.to change { repository.count_commits(ref: 'master') }.by(1) blob = repository.blob_at('master', 'NEWCHANGELOG') @@ -514,7 +514,7 @@ describe Repository do repository.create_file(user, 'new_dir/new_file.txt', 'File!', message: 'Create new_file with new_dir', branch_name: 'master') - end.to change { repository.commits('master').count }.by(1) + end.to change { repository.count_commits(ref: 'master') }.by(1) expect(repository.tree('master', 'new_dir').path).to eq('new_dir') expect(repository.blob_at('master', 'new_dir/new_file.txt').data).to eq('File!') @@ -538,7 +538,7 @@ describe Repository do branch_name: 'master', author_email: author_email, author_name: author_name) - end.to change { repository.commits('master').count }.by(1) + end.to change { repository.count_commits(ref: 'master') }.by(1) last_commit = repository.commit @@ -554,7 +554,7 @@ describe Repository do repository.update_file(user, 'CHANGELOG', 'Changelog!', message: 'Update changelog', branch_name: 'master') - end.to change { repository.commits('master').count }.by(1) + end.to change { repository.count_commits(ref: 'master') }.by(1) blob = repository.blob_at('master', 'CHANGELOG') @@ -567,7 +567,7 @@ describe Repository do branch_name: 'master', previous_path: 'LICENSE', message: 'Changes filename') - end.to change { repository.commits('master').count }.by(1) + end.to change { repository.count_commits(ref: 'master') }.by(1) files = repository.ls_files('master') @@ -584,7 +584,7 @@ describe Repository do message: 'Update README', author_email: author_email, author_name: author_name) - end.to change { repository.commits('master').count }.by(1) + end.to change { repository.count_commits(ref: 'master') }.by(1) last_commit = repository.commit @@ -599,7 +599,7 @@ describe Repository do expect do repository.delete_file(user, 'README', message: 'Remove README', branch_name: 'master') - end.to change { repository.commits('master').count }.by(1) + end.to change { repository.count_commits(ref: 'master') }.by(1) expect(repository.blob_at('master', 'README')).to be_nil end @@ -610,7 +610,7 @@ describe Repository do repository.delete_file(user, 'README', message: 'Remove README', branch_name: 'master', author_email: author_email, author_name: author_name) - end.to change { repository.commits('master').count }.by(1) + end.to change { repository.count_commits(ref: 'master') }.by(1) last_commit = repository.commit @@ -772,8 +772,7 @@ describe Repository do user, 'LICENSE', 'Copyright!', message: 'Add LICENSE', branch_name: 'master') - allow(repository).to receive(:file_on_head) - .and_raise(Rugged::ReferenceError) + allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository) expect(repository.license_blob).to be_nil end @@ -885,7 +884,7 @@ describe Repository do end it 'returns nil for empty repository' do - allow(repository).to receive(:file_on_head).and_raise(Rugged::ReferenceError) + allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository) expect(repository.gitlab_ci_yml).to be_nil end end @@ -1937,8 +1936,7 @@ describe Repository do describe '#avatar' do it 'returns nil if repo does not exist' do - expect(repository).to receive(:file_on_head) - .and_raise(Rugged::ReferenceError) + allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository) expect(repository.avatar).to eq(nil) end diff --git a/spec/models/upload_spec.rb b/spec/models/upload_spec.rb index 345382ea8c7..42f3d609770 100644 --- a/spec/models/upload_spec.rb +++ b/spec/models/upload_spec.rb @@ -45,51 +45,6 @@ describe Upload do end end - describe '.remove_path' do - it 'removes all records at the given path' do - described_class.create!( - size: File.size(__FILE__), - path: __FILE__, - model: build_stubbed(:user), - uploader: 'AvatarUploader' - ) - - expect { described_class.remove_path(__FILE__) } - .to change { described_class.count }.from(1).to(0) - end - end - - describe '.record' do - let(:fake_uploader) do - double( - file: double(size: 12_345), - relative_path: 'foo/bar.jpg', - model: build_stubbed(:user), - class: 'AvatarUploader' - ) - end - - it 'removes existing paths before creation' do - expect(described_class).to receive(:remove_path) - .with(fake_uploader.relative_path) - - described_class.record(fake_uploader) - end - - it 'creates a new record and assigns size, path, model, and uploader' do - upload = described_class.record(fake_uploader) - - aggregate_failures do - expect(upload).to be_persisted - expect(upload.size).to eq fake_uploader.file.size - expect(upload.path).to eq fake_uploader.relative_path - expect(upload.model_id).to eq fake_uploader.model.id - expect(upload.model_type).to eq fake_uploader.model.class.to_s - expect(upload.uploader).to eq fake_uploader.class - end - end - end - describe '#absolute_path' do it 'returns the path directly when already absolute' do path = '/path/to/namespace/project/secret/file.jpg' @@ -111,27 +66,27 @@ describe Upload do end end - describe '#calculate_checksum' do - it 'calculates the SHA256 sum' do - upload = described_class.new( - path: __FILE__, - size: described_class::CHECKSUM_THRESHOLD - 1.megabyte - ) + describe '#calculate_checksum!' do + let(:upload) do + described_class.new(path: __FILE__, + size: described_class::CHECKSUM_THRESHOLD - 1.megabyte) + end + + it 'sets `checksum` to SHA256 sum of the file' do expected = Digest::SHA256.file(__FILE__).hexdigest - expect { upload.calculate_checksum } + expect { upload.calculate_checksum! } .to change { upload.checksum }.from(nil).to(expected) end - it 'returns nil for a non-existant file' do - upload = described_class.new( - path: __FILE__, - size: described_class::CHECKSUM_THRESHOLD - 1.megabyte - ) - + it 'sets `checksum` to nil for a non-existant file' do expect(upload).to receive(:exist?).and_return(false) - expect(upload.calculate_checksum).to be_nil + checksum = Digest::SHA256.file(__FILE__).hexdigest + upload.checksum = checksum + + expect { upload.calculate_checksum! } + .to change { upload.checksum }.from(checksum).to(nil) end end diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb index cc9d79da708..9840afe6c4e 100644 --- a/spec/models/wiki_page_spec.rb +++ b/spec/models/wiki_page_spec.rb @@ -387,13 +387,23 @@ describe WikiPage do end describe '#formatted_content' do - it 'returns processed content of the page', :disable_gitaly do - subject.create({ title: "RDoc", content: "*bold*", format: "rdoc" }) - page = wiki.find_page('RDoc') + shared_examples 'fetching page formatted content' do + it 'returns processed content of the page' do + subject.create({ title: "RDoc", content: "*bold*", format: "rdoc" }) + page = wiki.find_page('RDoc') - expect(page.formatted_content).to eq("\n<p><strong>bold</strong></p>\n") + expect(page.formatted_content).to eq("\n<p><strong>bold</strong></p>\n") - destroy_page('RDoc') + destroy_page('RDoc') + end + end + + context 'when Gitaly wiki_page_formatted_data is enabled' do + it_behaves_like 'fetching page formatted content' + end + + context 'when Gitaly wiki_page_formatted_data is disabled', :disable_gitaly do + it_behaves_like 'fetching page formatted content' end end diff --git a/spec/policies/ci/pipeline_schedule_policy_spec.rb b/spec/policies/ci/pipeline_schedule_policy_spec.rb index 1b0e9fac355..c0c3eda4911 100644 --- a/spec/policies/ci/pipeline_schedule_policy_spec.rb +++ b/spec/policies/ci/pipeline_schedule_policy_spec.rb @@ -88,5 +88,19 @@ describe Ci::PipelineSchedulePolicy, :models do expect(policy).to be_allowed :admin_pipeline_schedule end end + + describe 'rules for non-owner of schedule' do + let(:owner) { create(:user) } + + before do + project.add_master(owner) + project.add_master(user) + pipeline_schedule.update(owner: owner) + end + + it 'includes abilities to take ownership' do + expect(policy).to be_allowed :take_ownership_pipeline_schedule + end + end end end diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb index f2593a1a75c..129344f105f 100644 --- a/spec/policies/project_policy_spec.rb +++ b/spec/policies/project_policy_spec.rb @@ -92,7 +92,7 @@ describe ProjectPolicy do it 'does not include the read_issue permission when the issue author is not a member of the private project' do project = create(:project, :private) - issue = create(:issue, project: project) + issue = create(:issue, project: project, author: create(:user)) user = issue.author expect(project.team.member?(issue.author)).to be false diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb index 34db50dc082..ff5f207487b 100644 --- a/spec/requests/api/commits_spec.rb +++ b/spec/requests/api/commits_spec.rb @@ -62,7 +62,7 @@ describe API::Commits do context "since optional parameter" do it "returns project commits since provided parameter" do - commits = project.repository.commits("master") + commits = project.repository.commits("master", limit: 2) after = commits.second.created_at get api("/projects/#{project_id}/repository/commits?since=#{after.utc.iso8601}", user) @@ -73,7 +73,7 @@ describe API::Commits do end it 'include correct pagination headers' do - commits = project.repository.commits("master") + commits = project.repository.commits("master", limit: 2) after = commits.second.created_at commit_count = project.repository.count_commits(ref: 'master', after: after).to_s @@ -87,12 +87,12 @@ describe API::Commits do context "until optional parameter" do it "returns project commits until provided parameter" do - commits = project.repository.commits("master") + commits = project.repository.commits("master", limit: 20) before = commits.second.created_at get api("/projects/#{project_id}/repository/commits?until=#{before.utc.iso8601}", user) - if commits.size >= 20 + if commits.size == 20 expect(json_response.size).to eq(20) else expect(json_response.size).to eq(commits.size - 1) @@ -103,7 +103,7 @@ describe API::Commits do end it 'include correct pagination headers' do - commits = project.repository.commits("master") + commits = project.repository.commits("master", limit: 2) before = commits.second.created_at commit_count = project.repository.count_commits(ref: 'master', before: before).to_s @@ -181,7 +181,7 @@ describe API::Commits do let(:page) { 3 } it 'returns the third 5 commits' do - commit = project.repository.commits('HEAD', offset: (page - 1) * per_page).first + commit = project.repository.commits('HEAD', limit: per_page, offset: (page - 1) * per_page).first expect(json_response.size).to eq(per_page) expect(json_response.first['id']).to eq(commit.id) diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb index 43218755f4f..13db40d21a5 100644 --- a/spec/requests/api/issues_spec.rb +++ b/spec/requests/api/issues_spec.rb @@ -1441,7 +1441,7 @@ describe API::Issues, :mailer do context 'when source project does not exist' do it 'returns 404 when trying to move an issue' do - post api("/projects/123/issues/#{issue.iid}/move", user), + post api("/projects/12345/issues/#{issue.iid}/move", user), to_project_id: target_project.id expect(response).to have_gitlab_http_status(404) @@ -1452,7 +1452,7 @@ describe API::Issues, :mailer do context 'when target project does not exist' do it 'returns 404 when trying to move an issue' do post api("/projects/#{project.id}/issues/#{issue.iid}/move", user), - to_project_id: 123 + to_project_id: 12345 expect(response).to have_gitlab_http_status(404) end diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb index 4dd8deb6404..f8d0b63afec 100644 --- a/spec/requests/api/jobs_spec.rb +++ b/spec/requests/api/jobs_spec.rb @@ -300,44 +300,53 @@ describe API::Jobs do end describe 'GET /projects/:id/jobs/:job_id/artifacts' do - before do - get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + shared_examples 'downloads artifact' do + let(:download_headers) do + { 'Content-Transfer-Encoding' => 'binary', + 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } + end + + it 'returns specific job artifacts' do + expect(response).to have_gitlab_http_status(200) + expect(response.headers).to include(download_headers) + expect(response.body).to match_file(job.artifacts_file.file.file) + end end - context 'job with artifacts' do - let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) } + context 'normal authentication' do + context 'job with artifacts' do + context 'when artifacts are stored locally' do + let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) } - context 'authorized user' do - let(:download_headers) do - { 'Content-Transfer-Encoding' => 'binary', - 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } - end + before do + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) + end - it 'returns specific job artifacts' do - expect(response).to have_gitlab_http_status(200) - expect(response.headers).to include(download_headers) - expect(response.body).to match_file(job.artifacts_file.file.file) + context 'authorized user' do + it_behaves_like 'downloads artifact' + end + + context 'unauthorized user' do + let(:api_user) { nil } + + it 'does not return specific job artifacts' do + expect(response).to have_gitlab_http_status(404) + end + end end - end - context 'when anonymous user is accessing private artifacts' do - let(:api_user) { nil } + it 'does not return job artifacts if not uploaded' do + get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) - it 'hides artifacts and rejects request' do - expect(project).to be_private expect(response).to have_gitlab_http_status(404) end end end - - it 'does not return job artifacts if not uploaded' do - expect(response).to have_gitlab_http_status(404) - end end describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do let(:api_user) { reporter } - let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) } + let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) } before do job.success @@ -396,14 +405,16 @@ describe API::Jobs do context 'find proper job' do shared_examples 'a valid file' do - let(:download_headers) do - { 'Content-Transfer-Encoding' => 'binary', - 'Content-Disposition' => - "attachment; filename=#{job.artifacts_file.filename}" } - end + context 'when artifacts are stored locally' do + let(:download_headers) do + { 'Content-Transfer-Encoding' => 'binary', + 'Content-Disposition' => + "attachment; filename=#{job.artifacts_file.filename}" } + end - it { expect(response).to have_gitlab_http_status(200) } - it { expect(response.headers).to include(download_headers) } + it { expect(response).to have_gitlab_http_status(200) } + it { expect(response.headers).to include(download_headers) } + end end context 'with regular branch' do diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb index 73bd4785b11..ec500838eb2 100644 --- a/spec/requests/api/members_spec.rb +++ b/spec/requests/api/members_spec.rb @@ -44,6 +44,21 @@ describe API::Members do end end + it 'avoids N+1 queries' do + # Establish baseline + get api("/#{source_type.pluralize}/#{source.id}/members", master) + + control = ActiveRecord::QueryRecorder.new do + get api("/#{source_type.pluralize}/#{source.id}/members", master) + end + + project.add_developer(create(:user)) + + expect do + get api("/#{source_type.pluralize}/#{source.id}/members", master) + end.not_to exceed_query_limit(control) + end + it 'does not return invitees' do create(:"#{source_type}_member", invite_token: '123', invite_email: 'test@abc.com', source: source, user: nil) diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb index 8e2982f1a5d..14dd9da119d 100644 --- a/spec/requests/api/merge_requests_spec.rb +++ b/spec/requests/api/merge_requests_spec.rb @@ -198,6 +198,8 @@ describe API::MergeRequests do create(:merge_request, state: 'closed', milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: "Test", created_at: base_time) + create(:merge_request, milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: "Test", created_at: base_time) + expect do get api("/projects/#{project.id}/merge_requests", user) end.not_to exceed_query_limit(control) diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb index cb66d23b77c..c5c0b0c2867 100644 --- a/spec/requests/api/runner_spec.rb +++ b/spec/requests/api/runner_spec.rb @@ -945,7 +945,7 @@ describe API::Runner do context 'when artifacts are being stored inside of tmp path' do before do # by configuring this path we allow to pass temp file from any path - allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/') + allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/') end context 'when job has been erased' do @@ -1122,7 +1122,7 @@ describe API::Runner do # by configuring this path we allow to pass file from @tmpdir only # but all temporary files are stored in system tmp directory @tmpdir = Dir.mktmpdir - allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir) + allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir) end after do diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb index af9e36a3b29..3f92288fef0 100644 --- a/spec/requests/api/v3/builds_spec.rb +++ b/spec/requests/api/v3/builds_spec.rb @@ -4,16 +4,18 @@ describe API::V3::Builds do set(:user) { create(:user) } let(:api_user) { user } set(:project) { create(:project, :repository, creator: user, public_builds: false) } - set(:developer) { create(:project_member, :developer, user: user, project: project) } - set(:reporter) { create(:project_member, :reporter, project: project) } - set(:guest) { create(:project_member, :guest, project: project) } - set(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) } - let!(:build) { create(:ci_build, pipeline: pipeline) } + let!(:developer) { create(:project_member, :developer, user: user, project: project) } + let(:reporter) { create(:project_member, :reporter, project: project) } + let(:guest) { create(:project_member, :guest, project: project) } + let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) } + let(:build) { create(:ci_build, pipeline: pipeline) } describe 'GET /projects/:id/builds ' do let(:query) { '' } before do |example| + build + create(:ci_build, :skipped, pipeline: pipeline) unless example.metadata[:skip_before_request] @@ -110,6 +112,10 @@ describe API::V3::Builds do end describe 'GET /projects/:id/repository/commits/:sha/builds' do + before do + build + end + context 'when commit does not exist in repository' do before do get v3_api("/projects/#{project.id}/repository/commits/1a271fd1/builds", api_user) @@ -214,18 +220,20 @@ describe API::V3::Builds do end context 'job with artifacts' do - let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } + context 'when artifacts are stored locally' do + let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) } - context 'authorized user' do - let(:download_headers) do - { 'Content-Transfer-Encoding' => 'binary', - 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } - end + context 'authorized user' do + let(:download_headers) do + { 'Content-Transfer-Encoding' => 'binary', + 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' } + end - it 'returns specific job artifacts' do - expect(response).to have_gitlab_http_status(200) - expect(response.headers).to include(download_headers) - expect(response.body).to match_file(build.artifacts_file.file.file) + it 'returns specific job artifacts' do + expect(response).to have_gitlab_http_status(200) + expect(response.headers).to include(download_headers) + expect(response.body).to match_file(build.artifacts_file.file.file) + end end end @@ -303,14 +311,16 @@ describe API::V3::Builds do context 'find proper job' do shared_examples 'a valid file' do - let(:download_headers) do - { 'Content-Transfer-Encoding' => 'binary', - 'Content-Disposition' => - "attachment; filename=#{build.artifacts_file.filename}" } - end + context 'when artifacts are stored locally' do + let(:download_headers) do + { 'Content-Transfer-Encoding' => 'binary', + 'Content-Disposition' => + "attachment; filename=#{build.artifacts_file.filename}" } + end - it { expect(response).to have_gitlab_http_status(200) } - it { expect(response.headers).to include(download_headers) } + it { expect(response).to have_gitlab_http_status(200) } + it { expect(response.headers).to include(download_headers) } + end end context 'with regular branch' do diff --git a/spec/requests/api/v3/commits_spec.rb b/spec/requests/api/v3/commits_spec.rb index 34c543bffe8..9ef3b859001 100644 --- a/spec/requests/api/v3/commits_spec.rb +++ b/spec/requests/api/v3/commits_spec.rb @@ -36,7 +36,7 @@ describe API::V3::Commits do context "since optional parameter" do it "returns project commits since provided parameter" do - commits = project.repository.commits("master") + commits = project.repository.commits("master", limit: 2) since = commits.second.created_at get v3_api("/projects/#{project.id}/repository/commits?since=#{since.utc.iso8601}", user) @@ -49,12 +49,12 @@ describe API::V3::Commits do context "until optional parameter" do it "returns project commits until provided parameter" do - commits = project.repository.commits("master") + commits = project.repository.commits("master", limit: 20) before = commits.second.created_at get v3_api("/projects/#{project.id}/repository/commits?until=#{before.utc.iso8601}", user) - if commits.size >= 20 + if commits.size == 20 expect(json_response.size).to eq(20) else expect(json_response.size).to eq(commits.size - 1) diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb index bee918a20aa..930ef49b7f3 100644 --- a/spec/requests/lfs_http_spec.rb +++ b/spec/requests/lfs_http_spec.rb @@ -958,7 +958,7 @@ describe 'Git LFS API and storage' do end it 'responds with status 200, location of lfs store and object details' do - expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload") + expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path) expect(json_response['LfsOid']).to eq(sample_oid) expect(json_response['LfsSize']).to eq(sample_size) end @@ -1075,7 +1075,7 @@ describe 'Git LFS API and storage' do end it 'with location of lfs store and object details' do - expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload") + expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path) expect(json_response['LfsOid']).to eq(sample_oid) expect(json_response['LfsSize']).to eq(sample_size) end diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb index 8897a64a138..47c1ebbeb81 100644 --- a/spec/services/issues/close_service_spec.rb +++ b/spec/services/issues/close_service_spec.rb @@ -4,7 +4,7 @@ describe Issues::CloseService do let(:user) { create(:user) } let(:user2) { create(:user) } let(:guest) { create(:user) } - let(:issue) { create(:issue, assignees: [user2]) } + let(:issue) { create(:issue, assignees: [user2], author: create(:user)) } let(:project) { issue.project } let!(:todo) { create(:todo, :assigned, user: user, project: project, target: issue, author: user2) } diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb index 388c9d63c7b..322c91065e7 100644 --- a/spec/services/issues/move_service_spec.rb +++ b/spec/services/issues/move_service_spec.rb @@ -6,7 +6,7 @@ describe Issues::MoveService do let(:title) { 'Some issue' } let(:description) { 'Some issue description' } let(:old_project) { create(:project) } - let(:new_project) { create(:project) } + let(:new_project) { create(:project, group: create(:group)) } let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') } let(:old_issue) do @@ -250,7 +250,7 @@ describe Issues::MoveService do context 'issue description with uploads' do let(:uploader) { build(:file_uploader, project: old_project) } - let(:description) { "Text and #{uploader.to_markdown}" } + let(:description) { "Text and #{uploader.markdown_link}" } include_context 'issue move executed' diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb index 1cb6f2e097f..41237dd7160 100644 --- a/spec/services/issues/update_service_spec.rb +++ b/spec/services/issues/update_service_spec.rb @@ -13,7 +13,8 @@ describe Issues::UpdateService, :mailer do create(:issue, title: 'Old title', description: "for #{user2.to_reference}", assignee_ids: [user3.id], - project: project) + project: project, + author: create(:user)) end before do diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb index cb4c3e72aa0..e56d335a7d6 100644 --- a/spec/services/merge_requests/build_service_spec.rb +++ b/spec/services/merge_requests/build_service_spec.rb @@ -172,11 +172,32 @@ describe MergeRequests::BuildService do end end - context 'branch starts with external issue IID followed by a hyphen' do + context 'branch starts with numeric characters followed by a hyphen with no issue tracker' do let(:source_branch) { '12345-fix-issue' } before do + allow(project).to receive(:external_issue_tracker).and_return(false) + allow(project).to receive(:issues_enabled?).and_return(false) + end + + it 'uses the title of the commit as the title of the merge request' do + expect(merge_request.title).to eq(commit_1.safe_message.split("\n").first) + end + + it 'uses the description of the commit as the description of the merge request' do + commit_description = commit_1.safe_message.split(/\n+/, 2).last + + expect(merge_request.description).to eq("#{commit_description}") + end + end + + context 'branch starts with JIRA-formatted external issue IID followed by a hyphen' do + let(:source_branch) { 'EXMPL-12345-fix-issue' } + + before do allow(project).to receive(:external_issue_tracker).and_return(true) + allow(project).to receive(:issues_enabled?).and_return(false) + allow(project).to receive(:external_issue_reference_pattern).and_return(IssueTrackerService.reference_pattern) end it 'uses the title of the commit as the title of the merge request' do @@ -186,7 +207,7 @@ describe MergeRequests::BuildService do it 'uses the description of the commit as the description of the merge request and appends the closes text' do commit_description = commit_1.safe_message.split(/\n+/, 2).last - expect(merge_request.description).to eq("#{commit_description}\n\nCloses #12345") + expect(merge_request.description).to eq("#{commit_description}\n\nCloses EXMPL-12345") end end end @@ -252,19 +273,46 @@ describe MergeRequests::BuildService do end end - context 'branch starts with external issue IID followed by a hyphen' do + context 'branch starts with numeric characters followed by a hyphen with no issue tracker' do let(:source_branch) { '12345-fix-issue' } before do - allow(project).to receive(:external_issue_tracker).and_return(true) + allow(project).to receive(:external_issue_tracker).and_return(false) + allow(project).to receive(:issues_enabled?).and_return(false) end it 'sets the title to the humanized branch title' do expect(merge_request.title).to eq('12345 fix issue') end + end + + context 'branch starts with JIRA-formatted external issue IID' do + let(:source_branch) { 'EXMPL-12345' } + + before do + allow(project).to receive(:external_issue_tracker).and_return(true) + allow(project).to receive(:issues_enabled?).and_return(false) + allow(project).to receive(:external_issue_reference_pattern).and_return(IssueTrackerService.reference_pattern) + end + + it 'sets the title to the humanized branch title' do + expect(merge_request.title).to eq('Resolve EXMPL-12345') + end it 'appends the closes text' do - expect(merge_request.description).to eq('Closes #12345') + expect(merge_request.description).to eq('Closes EXMPL-12345') + end + + context 'followed by hyphenated text' do + let(:source_branch) { 'EXMPL-12345-fix-issue' } + + it 'sets the title to the humanized branch title' do + expect(merge_request.title).to eq('Resolve EXMPL-12345 "Fix issue"') + end + + it 'appends the closes text' do + expect(merge_request.description).to eq('Closes EXMPL-12345') + end end end end diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb index 4d12de3ecce..216e0cd4266 100644 --- a/spec/services/merge_requests/close_service_spec.rb +++ b/spec/services/merge_requests/close_service_spec.rb @@ -4,7 +4,7 @@ describe MergeRequests::CloseService do let(:user) { create(:user) } let(:user2) { create(:user) } let(:guest) { create(:user) } - let(:merge_request) { create(:merge_request, assignee: user2) } + let(:merge_request) { create(:merge_request, assignee: user2, author: create(:user)) } let(:project) { merge_request.project } let!(:todo) { create(:todo, :assigned, user: user, project: project, target: merge_request, author: user2) } diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb index aa90feeef89..5ef6365fcc9 100644 --- a/spec/services/merge_requests/ff_merge_service_spec.rb +++ b/spec/services/merge_requests/ff_merge_service_spec.rb @@ -7,7 +7,8 @@ describe MergeRequests::FfMergeService do create(:merge_request, source_branch: 'flatten-dir', target_branch: 'improve/awesome', - assignee: user2) + assignee: user2, + author: create(:user)) end let(:project) { merge_request.project } diff --git a/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb index f17db70faf6..240aa638f79 100644 --- a/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb +++ b/spec/services/merge_requests/merge_when_pipeline_succeeds_service_spec.rb @@ -43,7 +43,7 @@ describe MergeRequests::MergeWhenPipelineSucceedsService do it 'creates a system note' do note = merge_request.notes.last - expect(note.note).to match /enabled an automatic merge when the pipeline for (\w+\/\w+@)?\h{8}/ + expect(note.note).to match %r{enabled an automatic merge when the pipeline for (\w+/\w+@)?\h{8}} end end diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb index 7a01d3dd698..903aa0a5078 100644 --- a/spec/services/merge_requests/refresh_service_spec.rb +++ b/spec/services/merge_requests/refresh_service_spec.rb @@ -55,11 +55,12 @@ describe MergeRequests::RefreshService do before do allow(refresh_service).to receive(:execute_hooks) - refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') - reload_mrs end it 'executes hooks with update action' do + refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') + reload_mrs + expect(refresh_service).to have_received(:execute_hooks) .with(@merge_request, 'update', old_rev: @oldrev) @@ -72,6 +73,34 @@ describe MergeRequests::RefreshService do expect(@build_failed_todo).to be_done expect(@fork_build_failed_todo).to be_done end + + it 'reloads source branch MRs memoization' do + refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') + + expect { refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') }.to change { + refresh_service.instance_variable_get("@source_merge_requests").first.merge_request_diff + } + end + + context 'when source branch ref does not exists' do + before do + DeleteBranchService.new(@project, @user).execute(@merge_request.source_branch) + end + + it 'closes MRs without source branch ref' do + expect { refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') } + .to change { @merge_request.reload.state } + .from('opened') + .to('closed') + + expect(@fork_merge_request.reload).to be_open + end + + it 'does not change the merge request diff' do + expect { refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') } + .not_to change { @merge_request.reload.merge_request_diff } + end + end end context 'when pipeline exists for the source branch' do @@ -371,37 +400,21 @@ describe MergeRequests::RefreshService do end it 'references the commit that caused the Work in Progress status' do - refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') - allow(refresh_service).to receive(:find_new_commits) - refresh_service.instance_variable_set("@commits", [ - double( - id: 'aaaaaaa', - sha: '38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e', - short_id: 'aaaaaaa', - title: 'Fix issue', - work_in_progress?: false - ), - double( - id: 'bbbbbbb', - sha: '498214de67004b1da3d820901307bed2a68a8ef6', - short_id: 'bbbbbbb', - title: 'fixup! Fix issue', - work_in_progress?: true, - to_reference: 'bbbbbbb' - ), - double( - id: 'ccccccc', - sha: '1b12f15a11fc6e62177bef08f47bc7b5ce50b141', - short_id: 'ccccccc', - title: 'fixup! Fix issue', - work_in_progress?: true, - to_reference: 'ccccccc' - ) - ]) - refresh_service.execute(@oldrev, @newrev, 'refs/heads/wip') - reload_mrs - expect(@merge_request.notes.last.note).to eq( - "marked as a **Work In Progress** from bbbbbbb" + wip_merge_request = create(:merge_request, + source_project: @project, + source_branch: 'wip', + target_branch: 'master', + target_project: @project) + + commits = wip_merge_request.commits + oldrev = commits.last.id + newrev = commits.first.id + wip_commit = wip_merge_request.commits.find(&:work_in_progress?) + + refresh_service.execute(oldrev, newrev, 'refs/heads/wip') + + expect(wip_merge_request.reload.notes.last.note).to eq( + "marked as a **Work In Progress** from #{wip_commit.id}" ) end diff --git a/spec/services/merge_requests/reopen_service_spec.rb b/spec/services/merge_requests/reopen_service_spec.rb index a44d63e5f9f..9ee37c51d95 100644 --- a/spec/services/merge_requests/reopen_service_spec.rb +++ b/spec/services/merge_requests/reopen_service_spec.rb @@ -4,7 +4,7 @@ describe MergeRequests::ReopenService do let(:user) { create(:user) } let(:user2) { create(:user) } let(:guest) { create(:user) } - let(:merge_request) { create(:merge_request, :closed, assignee: user2) } + let(:merge_request) { create(:merge_request, :closed, assignee: user2, author: create(:user)) } let(:project) { merge_request.project } before do diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb index 2238da2d14d..c31259239ee 100644 --- a/spec/services/merge_requests/update_service_spec.rb +++ b/spec/services/merge_requests/update_service_spec.rb @@ -12,7 +12,8 @@ describe MergeRequests::UpdateService, :mailer do create(:merge_request, :simple, title: 'Old title', description: "FYI #{user2.to_reference}", assignee_id: user3.id, - source_project: project) + source_project: project, + author: create(:user)) end before do diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index 5c59455e3e1..35eb84e5e88 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -458,7 +458,7 @@ describe NotificationService, :mailer do context "merge request diff note" do let(:project) { create(:project, :repository) } let(:user) { create(:user) } - let(:merge_request) { create(:merge_request, source_project: project, assignee: user) } + let(:merge_request) { create(:merge_request, source_project: project, assignee: user, author: create(:user)) } let(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request) } before do @@ -469,11 +469,13 @@ describe NotificationService, :mailer do describe '#new_note' do it "records sent notifications" do - # Ensure create SentNotification by noteable = merge_request 6 times, not noteable = note + # 3 SentNotification are sent: the MR assignee and author, and the @u_watcher expect(SentNotification).to receive(:record_note).with(note, any_args).exactly(3).times.and_call_original notification.new_note(note) + expect(SentNotification.last(3).map(&:recipient).map(&:id)) + .to contain_exactly(merge_request.assignee.id, merge_request.author.id, @u_watcher.id) expect(SentNotification.last.in_reply_to_discussion_id).to eq(note.discussion_id) end end diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb index 50e59954f73..15699574b3a 100644 --- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb +++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb @@ -6,7 +6,7 @@ describe Projects::HashedStorage::MigrateAttachmentsService do let(:legacy_storage) { Storage::LegacyProject.new(project) } let(:hashed_storage) { Storage::HashedProject.new(project) } - let!(:upload) { Upload.find_by(path: file_uploader.relative_path) } + let!(:upload) { Upload.find_by(path: file_uploader.upload_path) } let(:file_uploader) { build(:file_uploader, project: project) } let(:old_path) { File.join(base_path(legacy_storage), upload.path) } let(:new_path) { File.join(base_path(hashed_storage), upload.path) } @@ -58,6 +58,6 @@ describe Projects::HashedStorage::MigrateAttachmentsService do end def base_path(storage) - FileUploader.dynamic_path_builder(storage.disk_path) + File.join(FileUploader.root, storage.disk_path) end end diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb index ab3a257f36f..ab3aa18cf4e 100644 --- a/spec/services/system_note_service_spec.rb +++ b/spec/services/system_note_service_spec.rb @@ -308,7 +308,7 @@ describe SystemNoteService do end it "posts the 'merge when pipeline succeeds' system note" do - expect(subject.note).to match(/enabled an automatic merge when the pipeline for (\w+\/\w+@)?\h{40} succeeds/) + expect(subject.note).to match(%r{enabled an automatic merge when the pipeline for (\w+/\w+@)?\h{40} succeeds}) end end @@ -695,7 +695,7 @@ describe SystemNoteService do commit = double(title: '<pre>This is a test</pre>', short_id: '12345678') escaped = '<pre>This is a test</pre>' - expect(described_class.new_commit_summary([commit])).to all(match(%r[- #{escaped}])) + expect(described_class.new_commit_summary([commit])).to all(match(/- #{escaped}/)) end end diff --git a/spec/support/javascript_fixtures_helpers.rb b/spec/support/javascript_fixtures_helpers.rb index 923c8080e6c..2197bc9d853 100644 --- a/spec/support/javascript_fixtures_helpers.rb +++ b/spec/support/javascript_fixtures_helpers.rb @@ -1,6 +1,5 @@ require 'action_dispatch/testing/test_request' require 'fileutils' -require 'gitlab/popen' module JavaScriptFixturesHelpers include Gitlab::Popen diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb index 935c08221e0..7ce80c82439 100644 --- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb +++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb @@ -2,6 +2,8 @@ shared_examples 'handle uploads' do let(:user) { create(:user) } let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') } let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') } + let(:secret) { FileUploader.generate_secret } + let(:uploader_class) { FileUploader } describe "POST #create" do context 'when a user is not authorized to upload a file' do @@ -65,7 +67,12 @@ shared_examples 'handle uploads' do describe "GET #show" do let(:show_upload) do - get :show, params.merge(secret: "123456", filename: "image.jpg") + get :show, params.merge(secret: secret, filename: "rails_sample.jpg") + end + + before do + expect(FileUploader).to receive(:generate_secret).and_return(secret) + UploadService.new(model, jpg, uploader_class).execute end context "when the model is public" do @@ -75,11 +82,6 @@ shared_examples 'handle uploads' do context "when not signed in" do context "when the file exists" do - before do - allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg) - allow(jpg).to receive(:exists?).and_return(true) - end - it "responds with status 200" do show_upload @@ -88,6 +90,10 @@ shared_examples 'handle uploads' do end context "when the file doesn't exist" do + before do + allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) + end + it "responds with status 404" do show_upload @@ -102,11 +108,6 @@ shared_examples 'handle uploads' do end context "when the file exists" do - before do - allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg) - allow(jpg).to receive(:exists?).and_return(true) - end - it "responds with status 200" do show_upload @@ -115,6 +116,10 @@ shared_examples 'handle uploads' do end context "when the file doesn't exist" do + before do + allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) + end + it "responds with status 404" do show_upload @@ -131,11 +136,6 @@ shared_examples 'handle uploads' do context "when not signed in" do context "when the file exists" do - before do - allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg) - allow(jpg).to receive(:exists?).and_return(true) - end - context "when the file is an image" do before do allow_any_instance_of(FileUploader).to receive(:image?).and_return(true) @@ -149,6 +149,10 @@ shared_examples 'handle uploads' do end context "when the file is not an image" do + before do + allow_any_instance_of(FileUploader).to receive(:image?).and_return(false) + end + it "redirects to the sign in page" do show_upload @@ -158,6 +162,10 @@ shared_examples 'handle uploads' do end context "when the file doesn't exist" do + before do + allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) + end + it "redirects to the sign in page" do show_upload @@ -177,11 +185,6 @@ shared_examples 'handle uploads' do end context "when the file exists" do - before do - allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg) - allow(jpg).to receive(:exists?).and_return(true) - end - it "responds with status 200" do show_upload @@ -190,6 +193,10 @@ shared_examples 'handle uploads' do end context "when the file doesn't exist" do + before do + allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) + end + it "responds with status 404" do show_upload @@ -200,11 +207,6 @@ shared_examples 'handle uploads' do context "when the user doesn't have access to the model" do context "when the file exists" do - before do - allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg) - allow(jpg).to receive(:exists?).and_return(true) - end - context "when the file is an image" do before do allow_any_instance_of(FileUploader).to receive(:image?).and_return(true) @@ -218,6 +220,10 @@ shared_examples 'handle uploads' do end context "when the file is not an image" do + before do + allow_any_instance_of(FileUploader).to receive(:image?).and_return(false) + end + it "responds with status 404" do show_upload @@ -227,6 +233,10 @@ shared_examples 'handle uploads' do end context "when the file doesn't exist" do + before do + allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false) + end + it "responds with status 404" do show_upload diff --git a/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb new file mode 100644 index 00000000000..934d53e7bba --- /dev/null +++ b/spec/support/shared_examples/uploaders/gitlab_uploader_shared_examples.rb @@ -0,0 +1,48 @@ +shared_examples "matches the method pattern" do |method| + let(:target) { subject } + let(:args) { nil } + let(:pattern) { patterns[method] } + + it do + return skip "No pattern provided, skipping." unless pattern + + expect(target.method(method).call(*args)).to match(pattern) + end +end + +shared_examples "builds correct paths" do |**patterns| + let(:patterns) { patterns } + + before do + allow(subject).to receive(:filename).and_return('<filename>') + end + + describe "#store_dir" do + it_behaves_like "matches the method pattern", :store_dir + end + + describe "#cache_dir" do + it_behaves_like "matches the method pattern", :cache_dir + end + + describe "#work_dir" do + it_behaves_like "matches the method pattern", :work_dir + end + + describe "#upload_path" do + it_behaves_like "matches the method pattern", :upload_path + end + + describe ".absolute_path" do + it_behaves_like "matches the method pattern", :absolute_path do + let(:target) { subject.class } + let(:args) { [upload] } + end + end + + describe ".base_dir" do + it_behaves_like "matches the method pattern", :base_dir do + let(:target) { subject.class } + end + end +end diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb index 9e5f08fbc51..c275522159c 100644 --- a/spec/support/test_env.rb +++ b/spec/support/test_env.rb @@ -237,7 +237,7 @@ module TestEnv end def artifacts_path - Gitlab.config.artifacts.path + Gitlab.config.artifacts.storage_path end # When no cached assets exist, manually hit the root path to create them diff --git a/spec/support/track_untracked_uploads_helpers.rb b/spec/support/track_untracked_uploads_helpers.rb index d05eda08201..5752078d2a0 100644 --- a/spec/support/track_untracked_uploads_helpers.rb +++ b/spec/support/track_untracked_uploads_helpers.rb @@ -1,6 +1,6 @@ module TrackUntrackedUploadsHelpers def uploaded_file - fixture_path = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg') + fixture_path = Rails.root.join('spec/fixtures/rails_sample.jpg') fixture_file_upload(fixture_path) end diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb index b41c3b3958a..168facd51a6 100644 --- a/spec/tasks/gitlab/backup_rake_spec.rb +++ b/spec/tasks/gitlab/backup_rake_spec.rb @@ -165,7 +165,7 @@ describe 'gitlab:app namespace rake task' do expect(tar_contents).to match('pages.tar.gz') expect(tar_contents).to match('lfs.tar.gz') expect(tar_contents).to match('registry.tar.gz') - expect(tar_contents).not_to match(/^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|pages.tar.gz|artifacts.tar.gz|registry.tar.gz)\/$/) + expect(tar_contents).not_to match(%r{^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|pages.tar.gz|artifacts.tar.gz|registry.tar.gz)/$}) end it 'deletes temp directories' do diff --git a/spec/tasks/gitlab/git_rake_spec.rb b/spec/tasks/gitlab/git_rake_spec.rb index dacc5dc5ae7..9aebf7b0b4a 100644 --- a/spec/tasks/gitlab/git_rake_spec.rb +++ b/spec/tasks/gitlab/git_rake_spec.rb @@ -19,7 +19,7 @@ describe 'gitlab:git rake tasks' do describe 'fsck' do it 'outputs the integrity check for a repo' do - expect { run_rake_task('gitlab:git:fsck') }.to output(/Performed Checking integrity at .*@hashed\/1\/2\/test.git/).to_stdout + expect { run_rake_task('gitlab:git:fsck') }.to output(%r{Performed Checking integrity at .*@hashed/1/2/test.git}).to_stdout end it 'errors out about config.lock issues' do diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb index 6aba86fdc3c..b37d6ac831f 100644 --- a/spec/tasks/gitlab/gitaly_rake_spec.rb +++ b/spec/tasks/gitlab/gitaly_rake_spec.rb @@ -76,7 +76,11 @@ describe 'gitlab:gitaly namespace rake task' do end context 'when Rails.env is test' do - let(:command) { %w[make BUNDLE_FLAGS=--no-deployment] } + let(:command) do + %W[make + BUNDLE_FLAGS=--no-deployment + BUNDLE_PATH=#{Bundler.bundle_path}] + end before do allow(Rails.env).to receive(:test?).and_return(true) diff --git a/spec/tasks/gitlab/task_helpers_spec.rb b/spec/tasks/gitlab/task_helpers_spec.rb index fae5ec35c47..e9322ec4931 100644 --- a/spec/tasks/gitlab/task_helpers_spec.rb +++ b/spec/tasks/gitlab/task_helpers_spec.rb @@ -1,5 +1,4 @@ require 'spec_helper' -require 'tasks/gitlab/task_helpers' class TestHelpersTest include Gitlab::TaskHelpers diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb index 04ee6e9bfad..091ba824fc6 100644 --- a/spec/uploaders/attachment_uploader_spec.rb +++ b/spec/uploaders/attachment_uploader_spec.rb @@ -1,28 +1,14 @@ require 'spec_helper' describe AttachmentUploader do - let(:uploader) { described_class.new(build_stubbed(:user)) } + let(:note) { create(:note, :with_attachment) } + let(:uploader) { note.attachment } + let(:upload) { create(:upload, :attachment_upload, model: uploader.model) } - describe "#store_dir" do - it "stores in the system dir" do - expect(uploader.store_dir).to start_with("uploads/-/system/user") - end + subject { uploader } - it "uses the old path when using object storage" do - expect(described_class).to receive(:file_storage?).and_return(false) - expect(uploader.store_dir).to start_with("uploads/user") - end - end - - describe '#move_to_cache' do - it 'is true' do - expect(uploader.move_to_cache).to eq(true) - end - end - - describe '#move_to_store' do - it 'is true' do - expect(uploader.move_to_store).to eq(true) - end - end + it_behaves_like 'builds correct paths', + store_dir: %r[uploads/-/system/note/attachment/], + upload_path: %r[uploads/-/system/note/attachment/], + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/] end diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb index 1dc574699d8..bf9028c9260 100644 --- a/spec/uploaders/avatar_uploader_spec.rb +++ b/spec/uploaders/avatar_uploader_spec.rb @@ -1,18 +1,16 @@ require 'spec_helper' describe AvatarUploader do - let(:uploader) { described_class.new(build_stubbed(:user)) } + let(:model) { create(:user, :with_avatar) } + let(:uploader) { described_class.new(model, :avatar) } + let(:upload) { create(:upload, model: model) } - describe "#store_dir" do - it "stores in the system dir" do - expect(uploader.store_dir).to start_with("uploads/-/system/user") - end + subject { uploader } - it "uses the old path when using object storage" do - expect(described_class).to receive(:file_storage?).and_return(false) - expect(uploader.store_dir).to start_with("uploads/user") - end - end + it_behaves_like 'builds correct paths', + store_dir: %r[uploads/-/system/user/avatar/], + upload_path: %r[uploads/-/system/user/avatar/], + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/] describe '#move_to_cache' do it 'is false' do diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb index 0cf462e9553..bc024cd307c 100644 --- a/spec/uploaders/file_mover_spec.rb +++ b/spec/uploaders/file_mover_spec.rb @@ -3,13 +3,13 @@ require 'spec_helper' describe FileMover do let(:filename) { 'banana_sample.gif' } let(:file) { fixture_file_upload(Rails.root.join('spec', 'fixtures', filename)) } + let(:temp_file_path) { File.join('uploads/-/system/temp', 'secret55', filename) } + let(:temp_description) do - 'test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) same ![banana_sample]'\ - '(/uploads/-/system/temp/secret55/banana_sample.gif)' + "test ![banana_sample](/#{temp_file_path}) "\ + "same ![banana_sample](/#{temp_file_path}) " end - let(:temp_file_path) { File.join('secret55', filename).to_s } - let(:file_path) { File.join('uploads', '-', 'system', 'personal_snippet', snippet.id.to_s, 'secret55', filename).to_s } - + let(:file_path) { File.join('uploads/-/system/personal_snippet', snippet.id.to_s, 'secret55', filename) } let(:snippet) { create(:personal_snippet, description: temp_description) } subject { described_class.new(file_path, snippet).execute } @@ -28,8 +28,8 @@ describe FileMover do expect(snippet.reload.description) .to eq( - "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)"\ - " same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif)" + "test ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) "\ + "same ![banana_sample](/uploads/-/system/personal_snippet/#{snippet.id}/secret55/banana_sample.gif) " ) end @@ -50,8 +50,8 @@ describe FileMover do expect(snippet.reload.description) .to eq( - "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)"\ - " same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif)" + "test ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) "\ + "same ![banana_sample](/uploads/-/system/temp/secret55/banana_sample.gif) " ) end diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb index fd195d6f9b8..a72f853df75 100644 --- a/spec/uploaders/file_uploader_spec.rb +++ b/spec/uploaders/file_uploader_spec.rb @@ -1,118 +1,57 @@ require 'spec_helper' describe FileUploader do - let(:uploader) { described_class.new(build_stubbed(:project)) } + let(:group) { create(:group, name: 'awesome') } + let(:project) { create(:project, namespace: group, name: 'project') } + let(:uploader) { described_class.new(project) } + let(:upload) { double(model: project, path: 'secret/foo.jpg') } - context 'legacy storage' do - let(:project) { build_stubbed(:project) } - - describe '.absolute_path' do - it 'returns the correct absolute path by building it dynamically' do - upload = double(model: project, path: 'secret/foo.jpg') - - dynamic_segment = project.full_path + subject { uploader } - expect(described_class.absolute_path(upload)) - .to end_with("#{dynamic_segment}/secret/foo.jpg") - end - end - - describe "#store_dir" do - it "stores in the namespace path" do - uploader = described_class.new(project) - - expect(uploader.store_dir).to include(project.full_path) - expect(uploader.store_dir).not_to include("system") - end - end + shared_examples 'builds correct legacy storage paths' do + include_examples 'builds correct paths', + store_dir: %r{awesome/project/\h+}, + absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg} end - context 'hashed storage' do + shared_examples 'uses hashed storage' do context 'when rolled out attachments' do - let(:project) { build_stubbed(:project, :hashed) } - - describe '.absolute_path' do - it 'returns the correct absolute path by building it dynamically' do - upload = double(model: project, path: 'secret/foo.jpg') - - dynamic_segment = project.disk_path - - expect(described_class.absolute_path(upload)) - .to end_with("#{dynamic_segment}/secret/foo.jpg") - end + before do + allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed') end - describe "#store_dir" do - it "stores in the namespace path" do - uploader = described_class.new(project) + let(:project) { build_stubbed(:project, :hashed, namespace: group, name: 'project') } - expect(uploader.store_dir).to include(project.disk_path) - expect(uploader.store_dir).not_to include("system") - end - end + it_behaves_like 'builds correct paths', + store_dir: %r{ca/fe/fe/ed/\h+}, + absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg} end context 'when only repositories are rolled out' do - let(:project) { build_stubbed(:project, storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) } - - describe '.absolute_path' do - it 'returns the correct absolute path by building it dynamically' do - upload = double(model: project, path: 'secret/foo.jpg') - - dynamic_segment = project.full_path - - expect(described_class.absolute_path(upload)) - .to end_with("#{dynamic_segment}/secret/foo.jpg") - end - end - - describe "#store_dir" do - it "stores in the namespace path" do - uploader = described_class.new(project) + let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) } - expect(uploader.store_dir).to include(project.full_path) - expect(uploader.store_dir).not_to include("system") - end - end + it_behaves_like 'builds correct legacy storage paths' end end - describe 'initialize' do - it 'generates a secret if none is provided' do - expect(SecureRandom).to receive(:hex).and_return('secret') - - uploader = described_class.new(double) - - expect(uploader.secret).to eq 'secret' - end - - it 'accepts a secret parameter' do - expect(SecureRandom).not_to receive(:hex) - - uploader = described_class.new(double, 'secret') - - expect(uploader.secret).to eq 'secret' - end + context 'legacy storage' do + it_behaves_like 'builds correct legacy storage paths' + include_examples 'uses hashed storage' end - describe '#move_to_cache' do - it 'is true' do - expect(uploader.move_to_cache).to eq(true) - end - end + describe 'initialize' do + let(:uploader) { described_class.new(double, 'secret') } - describe '#move_to_store' do - it 'is true' do - expect(uploader.move_to_store).to eq(true) + it 'accepts a secret parameter' do + expect(described_class).not_to receive(:generate_secret) + expect(uploader.secret).to eq('secret') end end - describe '#relative_path' do - it 'removes the leading dynamic path segment' do - fixture = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg') - uploader.store!(fixture_file_upload(fixture)) - - expect(uploader.relative_path).to match(/\A\h{32}\/rails_sample.jpg\z/) + describe '#secret' do + it 'generates a secret if none is provided' do + expect(described_class).to receive(:generate_secret).and_return('secret') + expect(uploader.secret).to eq('secret') end end end diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb index 98a4373e9d0..d606404e95d 100644 --- a/spec/uploaders/job_artifact_uploader_spec.rb +++ b/spec/uploaders/job_artifact_uploader_spec.rb @@ -3,33 +3,13 @@ require 'spec_helper' describe JobArtifactUploader do let(:job_artifact) { create(:ci_job_artifact) } let(:uploader) { described_class.new(job_artifact, :file) } - let(:local_path) { Gitlab.config.artifacts.path } - describe '#store_dir' do - subject { uploader.store_dir } + subject { uploader } - let(:path) { "#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/#{job_artifact.job_id}/#{job_artifact.id}" } - - context 'when using local storage' do - it { is_expected.to start_with(local_path) } - it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) } - it { is_expected.to end_with(path) } - end - end - - describe '#cache_dir' do - subject { uploader.cache_dir } - - it { is_expected.to start_with(local_path) } - it { is_expected.to end_with('/tmp/cache') } - end - - describe '#work_dir' do - subject { uploader.work_dir } - - it { is_expected.to start_with(local_path) } - it { is_expected.to end_with('/tmp/work') } - end + it_behaves_like "builds correct paths", + store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z], + cache_dir: %r[artifacts/tmp/cache], + work_dir: %r[artifacts/tmp/work] context 'file is stored in valid local_path' do let(:file) do @@ -43,7 +23,7 @@ describe JobArtifactUploader do subject { uploader.file.path } - it { is_expected.to start_with(local_path) } + it { is_expected.to start_with("#{uploader.root}/#{uploader.class.base_dir}") } it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") } it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") } it { is_expected.to end_with("ci_build_artifacts.zip") } diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb index efeffb78772..54c6a8b869b 100644 --- a/spec/uploaders/legacy_artifact_uploader_spec.rb +++ b/spec/uploaders/legacy_artifact_uploader_spec.rb @@ -3,49 +3,22 @@ require 'rails_helper' describe LegacyArtifactUploader do let(:job) { create(:ci_build) } let(:uploader) { described_class.new(job, :legacy_artifacts_file) } - let(:local_path) { Gitlab.config.artifacts.path } + let(:local_path) { described_class.root } - describe '.local_store_path' do - subject { described_class.local_store_path } + subject { uploader } - it "delegate to artifacts path" do - expect(Gitlab.config.artifacts).to receive(:path) - - subject - end - end - - describe '.artifacts_upload_path' do - subject { described_class.artifacts_upload_path } + # TODO: move to Workhorse::UploadPath + describe '.workhorse_upload_path' do + subject { described_class.workhorse_upload_path } it { is_expected.to start_with(local_path) } - it { is_expected.to end_with('tmp/uploads/') } - end - - describe '#store_dir' do - subject { uploader.store_dir } - - let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" } - - context 'when using local storage' do - it { is_expected.to start_with(local_path) } - it { is_expected.to end_with(path) } - end + it { is_expected.to end_with('tmp/uploads') } end - describe '#cache_dir' do - subject { uploader.cache_dir } - - it { is_expected.to start_with(local_path) } - it { is_expected.to end_with('/tmp/cache') } - end - - describe '#work_dir' do - subject { uploader.work_dir } - - it { is_expected.to start_with(local_path) } - it { is_expected.to end_with('/tmp/work') } - end + it_behaves_like "builds correct paths", + store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z], + cache_dir: %r[artifacts/tmp/cache], + work_dir: %r[artifacts/tmp/work] describe '#filename' do # we need to use uploader, as this makes to use mounter @@ -69,7 +42,7 @@ describe LegacyArtifactUploader do subject { uploader.file.path } - it { is_expected.to start_with(local_path) } + it { is_expected.to start_with("#{uploader.root}") } it { is_expected.to include("/#{job.created_at.utc.strftime('%Y_%m')}/") } it { is_expected.to include("/#{job.project_id}/") } it { is_expected.to end_with("ci_build_artifacts.zip") } diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb index 7088bc23334..6ebc885daa8 100644 --- a/spec/uploaders/lfs_object_uploader_spec.rb +++ b/spec/uploaders/lfs_object_uploader_spec.rb @@ -2,39 +2,13 @@ require 'spec_helper' describe LfsObjectUploader do let(:lfs_object) { create(:lfs_object, :with_file) } - let(:uploader) { described_class.new(lfs_object) } + let(:uploader) { described_class.new(lfs_object, :file) } let(:path) { Gitlab.config.lfs.storage_path } - describe '#move_to_cache' do - it 'is true' do - expect(uploader.move_to_cache).to eq(true) - end - end + subject { uploader } - describe '#move_to_store' do - it 'is true' do - expect(uploader.move_to_store).to eq(true) - end - end - - describe '#store_dir' do - subject { uploader.store_dir } - - it { is_expected.to start_with(path) } - it { is_expected.to end_with("#{lfs_object.oid[0, 2]}/#{lfs_object.oid[2, 2]}") } - end - - describe '#cache_dir' do - subject { uploader.cache_dir } - - it { is_expected.to start_with(path) } - it { is_expected.to end_with('/tmp/cache') } - end - - describe '#work_dir' do - subject { uploader.work_dir } - - it { is_expected.to start_with(path) } - it { is_expected.to end_with('/tmp/work') } - end + it_behaves_like "builds correct paths", + store_dir: %r[\h{2}/\h{2}], + cache_dir: %r[/lfs-objects/tmp/cache], + work_dir: %r[/lfs-objects/tmp/work] end diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb index c6c4500c179..24a2fc0f72e 100644 --- a/spec/uploaders/namespace_file_uploader_spec.rb +++ b/spec/uploaders/namespace_file_uploader_spec.rb @@ -1,21 +1,16 @@ require 'spec_helper' +IDENTIFIER = %r{\h+/\S+} + describe NamespaceFileUploader do let(:group) { build_stubbed(:group) } let(:uploader) { described_class.new(group) } + let(:upload) { create(:upload, :namespace_upload, model: group) } - describe "#store_dir" do - it "stores in the namespace id directory" do - expect(uploader.store_dir).to include(group.id.to_s) - end - end - - describe ".absolute_path" do - it "stores in thecorrect directory" do - upload_record = create(:upload, :namespace_upload, model: group) + subject { uploader } - expect(described_class.absolute_path(upload_record)) - .to include("-/system/namespace/#{group.id}") - end - end + it_behaves_like 'builds correct paths', + store_dir: %r[uploads/-/system/namespace/\d+], + upload_path: IDENTIFIER, + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}] end diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb index cbafa9f478d..ed1fba6edda 100644 --- a/spec/uploaders/personal_file_uploader_spec.rb +++ b/spec/uploaders/personal_file_uploader_spec.rb @@ -1,25 +1,27 @@ require 'spec_helper' +IDENTIFIER = %r{\h+/\S+} + describe PersonalFileUploader do - let(:uploader) { described_class.new(build_stubbed(:project)) } - let(:snippet) { create(:personal_snippet) } + let(:model) { create(:personal_snippet) } + let(:uploader) { described_class.new(model) } + let(:upload) { create(:upload, :personal_snippet_upload) } - describe '.absolute_path' do - it 'returns the correct absolute path by building it dynamically' do - upload = double(model: snippet, path: 'secret/foo.jpg') + subject { uploader } - dynamic_segment = "personal_snippet/#{snippet.id}" - - expect(described_class.absolute_path(upload)).to end_with("/-/system/#{dynamic_segment}/secret/foo.jpg") - end - end + it_behaves_like 'builds correct paths', + store_dir: %r[uploads/-/system/personal_snippet/\d+], + upload_path: IDENTIFIER, + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}] describe '#to_h' do - it 'returns the hass' do - uploader = described_class.new(snippet, 'secret') + before do + subject.instance_variable_set(:@secret, 'secret') + end + it 'is correct' do allow(uploader).to receive(:file).and_return(double(extension: 'txt', filename: 'file_name')) - expected_url = "/uploads/-/system/personal_snippet/#{snippet.id}/secret/file_name" + expected_url = "/uploads/-/system/personal_snippet/#{model.id}/secret/file_name" expect(uploader.to_h).to eq( alt: 'file_name', diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb index 7ef7fb7d758..9a3e5d83e01 100644 --- a/spec/uploaders/records_uploads_spec.rb +++ b/spec/uploaders/records_uploads_spec.rb @@ -3,16 +3,16 @@ require 'rails_helper' describe RecordsUploads do let!(:uploader) do class RecordsUploadsExampleUploader < GitlabUploader - include RecordsUploads + include RecordsUploads::Concern storage :file - def model - FactoryBot.build_stubbed(:user) + def dynamic_segment + 'co/fe/ee' end end - RecordsUploadsExampleUploader.new + RecordsUploadsExampleUploader.new(build_stubbed(:user)) end def upload_fixture(filename) @@ -20,48 +20,55 @@ describe RecordsUploads do end describe 'callbacks' do - it 'calls `record_upload` after `store`' do + let(:upload) { create(:upload) } + + before do + uploader.upload = upload + end + + it '#record_upload after `store`' do expect(uploader).to receive(:record_upload).once uploader.store!(upload_fixture('doc_sample.txt')) end - it 'calls `destroy_upload` after `remove`' do - expect(uploader).to receive(:destroy_upload).once - + it '#destroy_upload after `remove`' do uploader.store!(upload_fixture('doc_sample.txt')) + expect(uploader).to receive(:destroy_upload).once uploader.remove! end end describe '#record_upload callback' do - it 'returns early when not using file storage' do - allow(uploader).to receive(:file_storage?).and_return(false) - expect(Upload).not_to receive(:record) - - uploader.store!(upload_fixture('rails_sample.jpg')) + it 'creates an Upload record after store' do + expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.to change { Upload.count }.by(1) end - it "returns early when the file doesn't exist" do - allow(uploader).to receive(:file).and_return(double(exists?: false)) - expect(Upload).not_to receive(:record) - + it 'creates a new record and assigns size, path, model, and uploader' do uploader.store!(upload_fixture('rails_sample.jpg')) + + upload = uploader.upload + aggregate_failures do + expect(upload).to be_persisted + expect(upload.size).to eq uploader.file.size + expect(upload.path).to eq uploader.upload_path + expect(upload.model_id).to eq uploader.model.id + expect(upload.model_type).to eq uploader.model.class.to_s + expect(upload.uploader).to eq uploader.class.to_s + end end - it 'creates an Upload record after store' do - expect(Upload).to receive(:record) - .with(uploader) + it "does not create an Upload record when the file doesn't exist" do + allow(uploader).to receive(:file).and_return(double(exists?: false)) - uploader.store!(upload_fixture('rails_sample.jpg')) + expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count } end it 'does not create an Upload record if model is missing' do - expect_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil) - expect(Upload).not_to receive(:record).with(uploader) + allow_any_instance_of(RecordsUploadsExampleUploader).to receive(:model).and_return(nil) - uploader.store!(upload_fixture('rails_sample.jpg')) + expect { uploader.store!(upload_fixture('rails_sample.jpg')) }.not_to change { Upload.count } end it 'it destroys Upload records at the same path before recording' do @@ -72,29 +79,15 @@ describe RecordsUploads do uploader: uploader.class.to_s ) + uploader.upload = existing uploader.store!(upload_fixture('rails_sample.jpg')) expect { existing.reload }.to raise_error(ActiveRecord::RecordNotFound) - expect(Upload.count).to eq 1 + expect(Upload.count).to eq(1) end end describe '#destroy_upload callback' do - it 'returns early when not using file storage' do - uploader.store!(upload_fixture('rails_sample.jpg')) - - allow(uploader).to receive(:file_storage?).and_return(false) - expect(Upload).not_to receive(:remove_path) - - uploader.remove! - end - - it 'returns early when file is nil' do - expect(Upload).not_to receive(:remove_path) - - uploader.remove! - end - it 'it destroys Upload records at the same path after removal' do uploader.store!(upload_fixture('rails_sample.jpg')) diff --git a/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb b/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb new file mode 100644 index 00000000000..6e7d8db99c4 --- /dev/null +++ b/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb @@ -0,0 +1,47 @@ +require 'spec_helper' + +describe 'projects/pipeline_schedules/_pipeline_schedule' do + let(:owner) { create(:user) } + let(:master) { create(:user) } + let(:project) { create(:project) } + let(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project) } + + before do + assign(:project, project) + + allow(view).to receive(:current_user).and_return(user) + allow(view).to receive(:pipeline_schedule).and_return(pipeline_schedule) + + allow(view).to receive(:can?).and_return(true) + end + + context 'taking ownership of schedule' do + context 'when non-owner is signed in' do + let(:user) { master } + + before do + allow(view).to receive(:can?).with(master, :take_ownership_pipeline_schedule, pipeline_schedule).and_return(true) + end + + it 'non-owner can take ownership of pipeline' do + render + + expect(rendered).to have_link('Take ownership') + end + end + + context 'when owner is signed in' do + let(:user) { owner } + + before do + allow(view).to receive(:can?).with(owner, :take_ownership_pipeline_schedule, pipeline_schedule).and_return(false) + end + + it 'owner cannot take ownership of pipeline' do + render + + expect(rendered).not_to have_link('Take ownership') + end + end + end +end diff --git a/spec/workers/repository_import_worker_spec.rb b/spec/workers/repository_import_worker_spec.rb index 7274a9f00f9..2b1a617ee62 100644 --- a/spec/workers/repository_import_worker_spec.rb +++ b/spec/workers/repository_import_worker_spec.rb @@ -49,9 +49,22 @@ describe RepositoryImportWorker do expect do subject.perform(project.id) - end.to raise_error(StandardError, error) + end.to raise_error(RuntimeError, error) expect(project.reload.import_jid).not_to be_nil end + + it 'updates the error on Import/Export' do + error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found } + + project.update_attributes(import_jid: '123', import_type: 'gitlab_project') + expect_any_instance_of(Projects::ImportService).to receive(:execute).and_return({ status: :error, message: error }) + + expect do + subject.perform(project.id) + end.to raise_error(RuntimeError, error) + + expect(project.reload.import_error).not_to be_nil + end end context 'when using an asynchronous importer' do diff --git a/spec/workers/upload_checksum_worker_spec.rb b/spec/workers/upload_checksum_worker_spec.rb index 911360da66c..9e50ce15871 100644 --- a/spec/workers/upload_checksum_worker_spec.rb +++ b/spec/workers/upload_checksum_worker_spec.rb @@ -2,18 +2,31 @@ require 'rails_helper' describe UploadChecksumWorker do describe '#perform' do - it 'rescues ActiveRecord::RecordNotFound' do - expect { described_class.new.perform(999_999) }.not_to raise_error + subject { described_class.new } + + context 'without a valid record' do + it 'rescues ActiveRecord::RecordNotFound' do + expect { subject.perform(999_999) }.not_to raise_error + end end - it 'calls calculate_checksum_without_delay and save!' do - upload = spy - expect(Upload).to receive(:find).with(999_999).and_return(upload) + context 'with a valid record' do + let(:upload) { create(:user, :with_avatar).avatar.upload } + + before do + expect(Upload).to receive(:find).and_return(upload) + allow(upload).to receive(:foreground_checksumable?).and_return(false) + end - described_class.new.perform(999_999) + it 'calls calculate_checksum!' do + expect(upload).to receive(:calculate_checksum!) + subject.perform(upload.id) + end - expect(upload).to have_received(:calculate_checksum) - expect(upload).to have_received(:save!) + it 'calls save!' do + expect(upload).to receive(:save!) + subject.perform(upload.id) + end end end end |