summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.codeclimate.yml2
-rw-r--r--Gemfile2
-rw-r--r--Gemfile.lock4
-rw-r--r--app/assets/javascripts/api.js132
-rw-r--r--app/assets/javascripts/blob/viewer/index.js22
-rw-r--r--app/assets/javascripts/commits.js34
-rw-r--r--app/assets/javascripts/create_item_dropdown.js44
-rw-r--r--app/assets/javascripts/gl_dropdown.js31
-rw-r--r--app/assets/javascripts/graphs/graphs_show.js16
-rw-r--r--app/assets/javascripts/group_label_subscription.js27
-rw-r--r--app/assets/javascripts/ide/stores/actions.js8
-rw-r--r--app/assets/javascripts/ide/stores/actions/branch.js2
-rw-r--r--app/assets/javascripts/integrations/integration_settings_form.js48
-rw-r--r--app/assets/javascripts/issuable_bulk_update_actions.js13
-rw-r--r--app/assets/javascripts/issuable_index.js34
-rw-r--r--app/assets/javascripts/lib/utils/users_cache.js8
-rw-r--r--app/assets/javascripts/users/activity_calendar.js2
-rw-r--r--app/controllers/concerns/issuable_collections.rb20
-rw-r--r--app/controllers/groups_controller.rb1
-rw-r--r--app/models/concerns/discussion_on_diff.rb2
-rw-r--r--app/models/merge_request.rb4
-rw-r--r--app/models/project_wiki.rb6
-rw-r--r--app/models/repository.rb71
-rw-r--r--app/models/wiki_page.rb5
-rw-r--r--app/serializers/merge_request_widget_entity.rb13
-rw-r--r--app/views/layouts/header/_new_dropdown.haml2
-rw-r--r--changelogs/unreleased/21554-mark-new-user-as-external.yml5
-rw-r--r--changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml5
-rw-r--r--changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml5
-rw-r--r--changelogs/unreleased/42591-update-nokogiri.yml5
-rw-r--r--changelogs/unreleased/contribution_calendar_label_cut_off.yml5
-rw-r--r--changelogs/unreleased/osw-short-circuit-mergeable-disccusions-state.yml5
-rw-r--r--changelogs/unreleased/sh-fix-events-collection.yml5
-rw-r--r--config/initializers/ar5_pg_10_support.rb88
-rw-r--r--config/initializers/peek.rb4
-rw-r--r--doc/api/repositories.md1
-rw-r--r--lib/gitlab/ee_compat_check.rb2
-rw-r--r--lib/gitlab/git/diff.rb2
-rw-r--r--lib/gitlab/git/operation_service.rb5
-rw-r--r--lib/gitlab/git/repository.rb161
-rw-r--r--lib/gitlab/git/wiki.rb14
-rw-r--r--lib/gitlab/gitaly_client.rb33
-rw-r--r--lib/gitlab/gitaly_client/operation_service.rb8
-rw-r--r--lib/gitlab/gitaly_client/ref_service.rb9
-rw-r--r--lib/gitlab/gitaly_client/repository_service.rb16
-rw-r--r--lib/gitlab/gitaly_client/wiki_service.rb12
-rw-r--r--lib/gitlab/metrics.rb4
-rw-r--r--lib/gitlab/metrics/influx_db.rb290
-rw-r--r--lib/gitlab/metrics/method_call.rb38
-rw-r--r--lib/gitlab/metrics/methods.rb129
-rw-r--r--lib/gitlab/metrics/methods/metric_options.rb61
-rw-r--r--lib/gitlab/metrics/null_metric.rb2
-rw-r--r--lib/gitlab/metrics/prometheus.rb94
-rw-r--r--lib/gitlab/metrics/subscribers/action_view.rb22
-rw-r--r--lib/gitlab/metrics/subscribers/active_record.rb18
-rw-r--r--lib/gitlab/metrics/transaction.rb83
-rw-r--r--lib/gitlab/o_auth/user.rb2
-rw-r--r--qa/README.md3
-rw-r--r--qa/qa.rb14
-rw-r--r--qa/qa/factory/resource/issue.rb34
-rw-r--r--qa/qa/page/admin/settings.rb13
-rw-r--r--qa/qa/page/base.rb17
-rw-r--r--qa/qa/page/component/dropzone.rb29
-rw-r--r--qa/qa/page/group/show.rb53
-rw-r--r--qa/qa/page/menu/admin.rb11
-rw-r--r--qa/qa/page/menu/side.rb8
-rw-r--r--qa/qa/page/project/issue/index.rb17
-rw-r--r--qa/qa/page/project/issue/new.rb33
-rw-r--r--qa/qa/page/project/issue/show.rb40
-rw-r--r--qa/qa/page/project/show.rb11
-rw-r--r--qa/spec/fixtures/banana_sample.gifbin0 -> 71759 bytes
-rwxr-xr-xscripts/lint-rugged1
-rwxr-xr-xscripts/static-analysis1
-rw-r--r--spec/controllers/groups_controller_spec.rb24
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb12
-rw-r--r--spec/features/projects/members/share_with_group_spec.rb2
-rw-r--r--spec/javascripts/api_spec.js173
-rw-r--r--spec/javascripts/blob/viewer/index_spec.js36
-rw-r--r--spec/javascripts/commits_spec.js43
-rw-r--r--spec/javascripts/create_item_dropdown_spec.js139
-rw-r--r--spec/javascripts/integrations/integration_settings_form_spec.js128
-rw-r--r--spec/javascripts/issuable_spec.js26
-rw-r--r--spec/javascripts/lib/utils/users_cache_spec.js4
-rw-r--r--spec/javascripts/repo/components/new_dropdown/modal_spec.js6
-rw-r--r--spec/javascripts/repo/components/new_dropdown/upload_spec.js6
-rw-r--r--spec/javascripts/repo/components/repo_commit_section_spec.js6
-rw-r--r--spec/javascripts/repo/stores/actions_spec.js32
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb69
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/method_call_spec.rb44
-rw-r--r--spec/lib/gitlab/metrics/methods_spec.rb137
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb22
-rw-r--r--spec/lib/gitlab/metrics/subscribers/action_view_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb7
-rw-r--r--spec/lib/gitlab/metrics_spec.rb2
-rw-r--r--spec/lib/gitlab/o_auth/user_spec.rb30
-rw-r--r--spec/models/concerns/discussion_on_diff_spec.rb10
-rw-r--r--spec/models/merge_request_spec.rb4
-rw-r--r--spec/models/wiki_page_spec.rb20
-rw-r--r--spec/requests/api/jobs_spec.rb71
-rw-r--r--spec/requests/api/v3/builds_spec.rb54
102 files changed, 1953 insertions, 1131 deletions
diff --git a/.codeclimate.yml b/.codeclimate.yml
index dc8ac60fb44..ecac24b68d7 100644
--- a/.codeclimate.yml
+++ b/.codeclimate.yml
@@ -1,7 +1,5 @@
---
engines:
- brakeman:
- enabled: true
bundler-audit:
enabled: true
duplication:
diff --git a/Gemfile b/Gemfile
index 1c9bd409084..7bffae06b51 100644
--- a/Gemfile
+++ b/Gemfile
@@ -132,7 +132,7 @@ gem 'asciidoctor-plantuml', '0.0.7'
gem 'rouge', '~> 2.0'
gem 'truncato', '~> 0.7.9'
gem 'bootstrap_form', '~> 2.7.0'
-gem 'nokogiri', '~> 1.8.1'
+gem 'nokogiri', '~> 1.8.2'
# Diffs
gem 'diffy', '~> 3.1.0'
diff --git a/Gemfile.lock b/Gemfile.lock
index 042809f918f..f770a7019e7 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -513,7 +513,7 @@ GEM
net-ldap (0.16.0)
net-ssh (4.1.0)
netrc (0.11.0)
- nokogiri (1.8.1)
+ nokogiri (1.8.2)
mini_portile2 (~> 2.3.0)
numerizer (0.1.1)
oauth (0.5.1)
@@ -1100,7 +1100,7 @@ DEPENDENCIES
mysql2 (~> 0.4.10)
net-ldap
net-ssh (~> 4.1.0)
- nokogiri (~> 1.8.1)
+ nokogiri (~> 1.8.2)
oauth2 (~> 1.4)
octokit (~> 4.6.2)
oj (~> 2.17.4)
diff --git a/app/assets/javascripts/api.js b/app/assets/javascripts/api.js
index 7cb81bf4d5b..1f34c6b50c2 100644
--- a/app/assets/javascripts/api.js
+++ b/app/assets/javascripts/api.js
@@ -1,9 +1,9 @@
-import $ from 'jquery';
+import _ from 'underscore';
import axios from './lib/utils/axios_utils';
const Api = {
groupsPath: '/api/:version/groups.json',
- groupPath: '/api/:version/groups/:id.json',
+ groupPath: '/api/:version/groups/:id',
namespacesPath: '/api/:version/namespaces.json',
groupProjectsPath: '/api/:version/groups/:id/projects.json',
projectsPath: '/api/:version/projects.json',
@@ -23,42 +23,44 @@ const Api = {
group(groupId, callback) {
const url = Api.buildUrl(Api.groupPath)
.replace(':id', groupId);
- return $.ajax({
- url,
- dataType: 'json',
- })
- .done(group => callback(group));
+ return axios.get(url)
+ .then(({ data }) => {
+ callback(data);
+
+ return data;
+ });
},
// Return groups list. Filtered by query
groups(query, options, callback) {
const url = Api.buildUrl(Api.groupsPath);
- return $.ajax({
- url,
- data: Object.assign({
+ return axios.get(url, {
+ params: Object.assign({
search: query,
per_page: 20,
}, options),
- dataType: 'json',
})
- .done(groups => callback(groups));
+ .then(({ data }) => {
+ callback(data);
+
+ return data;
+ });
},
// Return namespaces list. Filtered by query
namespaces(query, callback) {
const url = Api.buildUrl(Api.namespacesPath);
- return $.ajax({
- url,
- data: {
+ return axios.get(url, {
+ params: {
search: query,
per_page: 20,
},
- dataType: 'json',
- }).done(namespaces => callback(namespaces));
+ })
+ .then(({ data }) => callback(data));
},
// Return projects list. Filtered by query
- projects(query, options, callback) {
+ projects(query, options, callback = _.noop) {
const url = Api.buildUrl(Api.projectsPath);
const defaults = {
search: query,
@@ -70,12 +72,14 @@ const Api = {
defaults.membership = true;
}
- return $.ajax({
- url,
- data: Object.assign(defaults, options),
- dataType: 'json',
+ return axios.get(url, {
+ params: Object.assign(defaults, options),
})
- .done(projects => callback(projects));
+ .then(({ data }) => {
+ callback(data);
+
+ return data;
+ });
},
// Return single project
@@ -97,41 +101,34 @@ const Api = {
url = Api.buildUrl(Api.groupLabelsPath).replace(':namespace_path', namespacePath);
}
- return $.ajax({
- url,
- type: 'POST',
- data: { label: data },
- dataType: 'json',
+ return axios.post(url, {
+ label: data,
})
- .done(label => callback(label))
- .fail(message => callback(message.responseJSON));
+ .then(res => callback(res.data))
+ .catch(e => callback(e.response.data));
},
// Return group projects list. Filtered by query
groupProjects(groupId, query, callback) {
const url = Api.buildUrl(Api.groupProjectsPath)
.replace(':id', groupId);
- return $.ajax({
- url,
- data: {
+ return axios.get(url, {
+ params: {
search: query,
per_page: 20,
},
- dataType: 'json',
})
- .done(projects => callback(projects));
+ .then(({ data }) => callback(data));
},
commitMultiple(id, data) {
// see https://docs.gitlab.com/ce/api/commits.html#create-a-commit-with-multiple-files-and-actions
const url = Api.buildUrl(Api.commitPath)
.replace(':id', encodeURIComponent(id));
- return this.wrapAjaxCall({
- url,
- type: 'POST',
- contentType: 'application/json; charset=utf-8',
- data: JSON.stringify(data),
- dataType: 'json',
+ return axios.post(url, JSON.stringify(data), {
+ headers: {
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
});
},
@@ -140,40 +137,37 @@ const Api = {
.replace(':id', encodeURIComponent(id))
.replace(':branch', branch);
- return this.wrapAjaxCall({
- url,
- type: 'GET',
- contentType: 'application/json; charset=utf-8',
- dataType: 'json',
- });
+ return axios.get(url);
},
// Return text for a specific license
licenseText(key, data, callback) {
const url = Api.buildUrl(Api.licensePath)
.replace(':key', key);
- return $.ajax({
- url,
- data,
+ return axios.get(url, {
+ params: data,
})
- .done(license => callback(license));
+ .then(res => callback(res.data));
},
gitignoreText(key, callback) {
const url = Api.buildUrl(Api.gitignorePath)
.replace(':key', key);
- return $.get(url, gitignore => callback(gitignore));
+ return axios.get(url)
+ .then(({ data }) => callback(data));
},
gitlabCiYml(key, callback) {
const url = Api.buildUrl(Api.gitlabCiYmlPath)
.replace(':key', key);
- return $.get(url, file => callback(file));
+ return axios.get(url)
+ .then(({ data }) => callback(data));
},
dockerfileYml(key, callback) {
const url = Api.buildUrl(Api.dockerfilePath).replace(':key', key);
- $.get(url, callback);
+ return axios.get(url)
+ .then(({ data }) => callback(data));
},
issueTemplate(namespacePath, projectPath, key, type, callback) {
@@ -182,23 +176,18 @@ const Api = {
.replace(':type', type)
.replace(':project_path', projectPath)
.replace(':namespace_path', namespacePath);
- $.ajax({
- url,
- dataType: 'json',
- })
- .done(file => callback(null, file))
- .fail(callback);
+ return axios.get(url)
+ .then(({ data }) => callback(null, data))
+ .catch(callback);
},
users(query, options) {
const url = Api.buildUrl(this.usersPath);
- return Api.wrapAjaxCall({
- url,
- data: Object.assign({
+ return axios.get(url, {
+ params: Object.assign({
search: query,
per_page: 20,
}, options),
- dataType: 'json',
});
},
@@ -209,21 +198,6 @@ const Api = {
}
return urlRoot + url.replace(':version', gon.api_version);
},
-
- wrapAjaxCall(options) {
- return new Promise((resolve, reject) => {
- // jQuery 2 is not Promises/A+ compatible (missing catch)
- $.ajax(options) // eslint-disable-line promise/catch-or-return
- .then(data => resolve(data),
- (jqXHR, textStatus, errorThrown) => {
- const error = new Error(`${options.url}: ${errorThrown}`);
- error.textStatus = textStatus;
- if (jqXHR && jqXHR.responseJSON) error.responseJSON = jqXHR.responseJSON;
- reject(error);
- },
- );
- });
- },
};
export default Api;
diff --git a/app/assets/javascripts/blob/viewer/index.js b/app/assets/javascripts/blob/viewer/index.js
index 54132e8537b..612f604e725 100644
--- a/app/assets/javascripts/blob/viewer/index.js
+++ b/app/assets/javascripts/blob/viewer/index.js
@@ -1,5 +1,6 @@
import Flash from '../../flash';
import { handleLocationHash } from '../../lib/utils/common_utils';
+import axios from '../../lib/utils/axios_utils';
export default class BlobViewer {
constructor() {
@@ -127,25 +128,18 @@ export default class BlobViewer {
const viewer = viewerParam;
const url = viewer.getAttribute('data-url');
- return new Promise((resolve, reject) => {
- if (!url || viewer.getAttribute('data-loaded') || viewer.getAttribute('data-loading')) {
- resolve(viewer);
- return;
- }
+ if (!url || viewer.getAttribute('data-loaded') || viewer.getAttribute('data-loading')) {
+ return Promise.resolve(viewer);
+ }
- viewer.setAttribute('data-loading', 'true');
+ viewer.setAttribute('data-loading', 'true');
- $.ajax({
- url,
- dataType: 'JSON',
- })
- .fail(reject)
- .done((data) => {
+ return axios.get(url)
+ .then(({ data }) => {
viewer.innerHTML = data.html;
viewer.setAttribute('data-loaded', 'true');
- resolve(viewer);
+ return viewer;
});
- });
}
}
diff --git a/app/assets/javascripts/commits.js b/app/assets/javascripts/commits.js
index 3a03cbf6b90..4b2f75fffde 100644
--- a/app/assets/javascripts/commits.js
+++ b/app/assets/javascripts/commits.js
@@ -5,6 +5,7 @@
import { pluralize } from './lib/utils/text_utility';
import { localTimeAgo } from './lib/utils/datetime_utility';
import Pager from './pager';
+import axios from './lib/utils/axios_utils';
export default (function () {
const CommitsList = {};
@@ -43,29 +44,30 @@ export default (function () {
CommitsList.filterResults = function () {
const form = $('.commits-search-form');
const search = CommitsList.searchField.val();
- if (search === CommitsList.lastSearch) return;
+ if (search === CommitsList.lastSearch) return Promise.resolve();
const commitsUrl = form.attr('action') + '?' + form.serialize();
CommitsList.content.fadeTo('fast', 0.5);
- return $.ajax({
- type: 'GET',
- url: form.attr('action'),
- data: form.serialize(),
- complete: function () {
- return CommitsList.content.fadeTo('fast', 1.0);
- },
- success: function (data) {
+ const params = form.serializeArray().reduce((acc, obj) => Object.assign(acc, {
+ [obj.name]: obj.value,
+ }), {});
+
+ return axios.get(form.attr('action'), {
+ params,
+ })
+ .then(({ data }) => {
CommitsList.lastSearch = search;
CommitsList.content.html(data.html);
- return history.replaceState({
- page: commitsUrl,
+ CommitsList.content.fadeTo('fast', 1.0);
+
// Change url so if user reload a page - search results are saved
+ history.replaceState({
+ page: commitsUrl,
}, document.title, commitsUrl);
- },
- error: function () {
+ })
+ .catch(() => {
+ CommitsList.content.fadeTo('fast', 1.0);
CommitsList.lastSearch = null;
- },
- dataType: 'json',
- });
+ });
};
// Prepare loaded data.
diff --git a/app/assets/javascripts/create_item_dropdown.js b/app/assets/javascripts/create_item_dropdown.js
index 488db023ee7..42e9e568170 100644
--- a/app/assets/javascripts/create_item_dropdown.js
+++ b/app/assets/javascripts/create_item_dropdown.js
@@ -12,6 +12,7 @@ export default class CreateItemDropdown {
this.fieldName = options.fieldName;
this.onSelect = options.onSelect || (() => {});
this.getDataOption = options.getData;
+ this.createNewItemFromValueOption = options.createNewItemFromValue;
this.$dropdown = options.$dropdown;
this.$dropdownContainer = this.$dropdown.parent();
this.$dropdownFooter = this.$dropdownContainer.find('.dropdown-footer');
@@ -30,15 +31,15 @@ export default class CreateItemDropdown {
filterable: true,
remote: false,
search: {
- fields: ['title'],
+ fields: ['text'],
},
selectable: true,
toggleLabel(selected) {
- return (selected && 'id' in selected) ? selected.title : this.defaultToggleLabel;
+ return (selected && 'id' in selected) ? _.escape(selected.title) : this.defaultToggleLabel;
},
fieldName: this.fieldName,
text(item) {
- return _.escape(item.title);
+ return _.escape(item.text);
},
id(item) {
return _.escape(item.id);
@@ -51,6 +52,11 @@ export default class CreateItemDropdown {
});
}
+ clearDropdown() {
+ this.$dropdownContainer.find('.dropdown-content').html('');
+ this.$dropdownContainer.find('.dropdown-input-field').val('');
+ }
+
bindEvents() {
this.$createButton.on('click', this.onClickCreateWildcard.bind(this));
}
@@ -58,9 +64,13 @@ export default class CreateItemDropdown {
onClickCreateWildcard(e) {
e.preventDefault();
+ this.refreshData();
+ this.$dropdown.data('glDropdown').selectRowAtIndex();
+ }
+
+ refreshData() {
// Refresh the dropdown's data, which ends up calling `getData`
this.$dropdown.data('glDropdown').remote.execute();
- this.$dropdown.data('glDropdown').selectRowAtIndex();
}
getData(term, callback) {
@@ -79,20 +89,28 @@ export default class CreateItemDropdown {
});
}
- toggleCreateNewButton(item) {
- if (item) {
- this.selectedItem = {
- title: item,
- id: item,
- text: item,
- };
+ createNewItemFromValue(newValue) {
+ if (this.createNewItemFromValueOption) {
+ return this.createNewItemFromValueOption(newValue);
+ }
+
+ return {
+ title: newValue,
+ id: newValue,
+ text: newValue,
+ };
+ }
+
+ toggleCreateNewButton(newValue) {
+ if (newValue) {
+ this.selectedItem = this.createNewItemFromValue(newValue);
this.$dropdownContainer
.find('.js-dropdown-create-new-item code')
- .text(item);
+ .text(newValue);
}
- this.toggleFooter(!item);
+ this.toggleFooter(!newValue);
}
toggleFooter(toggleState) {
diff --git a/app/assets/javascripts/gl_dropdown.js b/app/assets/javascripts/gl_dropdown.js
index 64f258aed64..15df7a7f989 100644
--- a/app/assets/javascripts/gl_dropdown.js
+++ b/app/assets/javascripts/gl_dropdown.js
@@ -2,6 +2,7 @@
/* global fuzzaldrinPlus */
import _ from 'underscore';
import fuzzaldrinPlus from 'fuzzaldrin-plus';
+import axios from './lib/utils/axios_utils';
import { visitUrl } from './lib/utils/url_utility';
import { isObject } from './lib/utils/type_utility';
@@ -212,25 +213,17 @@ GitLabDropdownRemote = (function() {
};
GitLabDropdownRemote.prototype.fetchData = function() {
- return $.ajax({
- url: this.dataEndpoint,
- dataType: this.options.dataType,
- beforeSend: (function(_this) {
- return function() {
- if (_this.options.beforeSend) {
- return _this.options.beforeSend();
- }
- };
- })(this),
- success: (function(_this) {
- return function(data) {
- if (_this.options.success) {
- return _this.options.success(data);
- }
- };
- })(this)
- });
- // Fetch the data through ajax if the data is a string
+ if (this.options.beforeSend) {
+ this.options.beforeSend();
+ }
+
+ // Fetch the data through ajax if the data is a string
+ return axios.get(this.dataEndpoint)
+ .then(({ data }) => {
+ if (this.options.success) {
+ return this.options.success(data);
+ }
+ });
};
return GitLabDropdownRemote;
diff --git a/app/assets/javascripts/graphs/graphs_show.js b/app/assets/javascripts/graphs/graphs_show.js
index 36bad6db3e1..b670e907a5c 100644
--- a/app/assets/javascripts/graphs/graphs_show.js
+++ b/app/assets/javascripts/graphs/graphs_show.js
@@ -1,11 +1,13 @@
+import flash from '../flash';
+import { __ } from '../locale';
+import axios from '../lib/utils/axios_utils';
import ContributorsStatGraph from './stat_graph_contributors';
document.addEventListener('DOMContentLoaded', () => {
- $.ajax({
- type: 'GET',
- url: document.querySelector('.js-graphs-show').dataset.projectGraphPath,
- dataType: 'json',
- success(data) {
+ const url = document.querySelector('.js-graphs-show').dataset.projectGraphPath;
+
+ axios.get(url)
+ .then(({ data }) => {
const graph = new ContributorsStatGraph();
graph.init(data);
@@ -16,6 +18,6 @@ document.addEventListener('DOMContentLoaded', () => {
$('.stat-graph').fadeIn();
$('.loading-graph').hide();
- },
- });
+ })
+ .catch(() => flash(__('Error fetching contributors data.')));
});
diff --git a/app/assets/javascripts/group_label_subscription.js b/app/assets/javascripts/group_label_subscription.js
index befaebb635e..df9429b1e02 100644
--- a/app/assets/javascripts/group_label_subscription.js
+++ b/app/assets/javascripts/group_label_subscription.js
@@ -1,3 +1,7 @@
+import axios from './lib/utils/axios_utils';
+import flash from './flash';
+import { __ } from './locale';
+
export default class GroupLabelSubscription {
constructor(container) {
const $container = $(container);
@@ -13,14 +17,12 @@ export default class GroupLabelSubscription {
event.preventDefault();
const url = this.$unsubscribeButtons.attr('data-url');
-
- $.ajax({
- type: 'POST',
- url,
- }).done(() => {
- this.toggleSubscriptionButtons();
- this.$unsubscribeButtons.removeAttr('data-url');
- });
+ axios.post(url)
+ .then(() => {
+ this.toggleSubscriptionButtons();
+ this.$unsubscribeButtons.removeAttr('data-url');
+ })
+ .catch(() => flash(__('There was an error when unsubscribing from this label.')));
}
subscribe(event) {
@@ -31,12 +33,9 @@ export default class GroupLabelSubscription {
this.$unsubscribeButtons.attr('data-url', url);
- $.ajax({
- type: 'POST',
- url,
- }).done(() => {
- this.toggleSubscriptionButtons();
- });
+ axios.post(url)
+ .then(() => this.toggleSubscriptionButtons())
+ .catch(() => flash(__('There was an error when subscribing to this label.')));
}
toggleSubscriptionButtons() {
diff --git a/app/assets/javascripts/ide/stores/actions.js b/app/assets/javascripts/ide/stores/actions.js
index 96a87744df5..d007d0ae78f 100644
--- a/app/assets/javascripts/ide/stores/actions.js
+++ b/app/assets/javascripts/ide/stores/actions.js
@@ -71,7 +71,7 @@ export const setResizingStatus = ({ commit }, resizing) => {
export const checkCommitStatus = ({ state }) =>
service
.getBranchData(state.currentProjectId, state.currentBranchId)
- .then((data) => {
+ .then(({ data }) => {
const { id } = data.commit;
const selectedBranch =
state.projects[state.currentProjectId].branches[state.currentBranchId];
@@ -90,7 +90,7 @@ export const commitChanges = (
) =>
service
.commit(state.currentProjectId, payload)
- .then((data) => {
+ .then(({ data }) => {
const { branch } = payload;
if (!data.short_id) {
flash(data.message, 'alert', document, null, false, true);
@@ -147,8 +147,8 @@ export const commitChanges = (
})
.catch((err) => {
let errMsg = 'Error committing changes. Please try again.';
- if (err.responseJSON && err.responseJSON.message) {
- errMsg += ` (${stripHtml(err.responseJSON.message)})`;
+ if (err.response.data && err.response.data.message) {
+ errMsg += ` (${stripHtml(err.response.data.message)})`;
}
flash(errMsg, 'alert', document, null, false, true);
window.dispatchEvent(new Event('resize'));
diff --git a/app/assets/javascripts/ide/stores/actions/branch.js b/app/assets/javascripts/ide/stores/actions/branch.js
index 589ec28c6a4..bc6fd2d4163 100644
--- a/app/assets/javascripts/ide/stores/actions/branch.js
+++ b/app/assets/javascripts/ide/stores/actions/branch.js
@@ -10,7 +10,7 @@ export const getBranchData = (
!state.projects[`${projectId}`].branches[branchId])
|| force) {
service.getBranchData(`${projectId}`, branchId)
- .then((data) => {
+ .then(({ data }) => {
const { id } = data.commit;
commit(types.SET_BRANCH, { projectPath: `${projectId}`, branchName: branchId, branch: data });
commit(types.SET_BRANCH_WORKING_REFERENCE, { projectId, branchId, reference: id });
diff --git a/app/assets/javascripts/integrations/integration_settings_form.js b/app/assets/javascripts/integrations/integration_settings_form.js
index 32415a8791f..3f27cfc2f6d 100644
--- a/app/assets/javascripts/integrations/integration_settings_form.js
+++ b/app/assets/javascripts/integrations/integration_settings_form.js
@@ -1,4 +1,5 @@
-import Flash from '../flash';
+import axios from '../lib/utils/axios_utils';
+import flash from '../flash';
export default class IntegrationSettingsForm {
constructor(formSelector) {
@@ -95,29 +96,26 @@ export default class IntegrationSettingsForm {
*/
testSettings(formData) {
this.toggleSubmitBtnState(true);
- $.ajax({
- type: 'PUT',
- url: this.testEndPoint,
- data: formData,
- })
- .done((res) => {
- if (res.error) {
- new Flash(`${res.message} ${res.service_response}`, 'alert', document, {
- title: 'Save anyway',
- clickHandler: (e) => {
- e.preventDefault();
- this.$form.submit();
- },
- });
- } else {
- this.$form.submit();
- }
- })
- .fail(() => {
- new Flash('Something went wrong on our end.');
- })
- .always(() => {
- this.toggleSubmitBtnState(false);
- });
+
+ return axios.put(this.testEndPoint, formData)
+ .then(({ data }) => {
+ if (data.error) {
+ flash(`${data.message} ${data.service_response}`, 'alert', document, {
+ title: 'Save anyway',
+ clickHandler: (e) => {
+ e.preventDefault();
+ this.$form.submit();
+ },
+ });
+ } else {
+ this.$form.submit();
+ }
+
+ this.toggleSubmitBtnState(false);
+ })
+ .catch(() => {
+ flash('Something went wrong on our end.');
+ this.toggleSubmitBtnState(false);
+ });
}
}
diff --git a/app/assets/javascripts/issuable_bulk_update_actions.js b/app/assets/javascripts/issuable_bulk_update_actions.js
index b124fafec70..8c1b2e78ca4 100644
--- a/app/assets/javascripts/issuable_bulk_update_actions.js
+++ b/app/assets/javascripts/issuable_bulk_update_actions.js
@@ -1,5 +1,6 @@
/* eslint-disable comma-dangle, quotes, consistent-return, func-names, array-callback-return, space-before-function-paren, prefer-arrow-callback, max-len, no-unused-expressions, no-sequences, no-underscore-dangle, no-unused-vars, no-param-reassign */
import _ from 'underscore';
+import axios from './lib/utils/axios_utils';
import Flash from './flash';
export default {
@@ -22,15 +23,9 @@ export default {
},
submit() {
- const _this = this;
- const xhr = $.ajax({
- url: this.form.attr('action'),
- method: this.form.attr('method'),
- dataType: 'JSON',
- data: this.getFormDataAsObject()
- });
- xhr.done(() => window.location.reload());
- xhr.fail(() => this.onFormSubmitFailure());
+ axios[this.form.attr('method')](this.form.attr('action'), this.getFormDataAsObject())
+ .then(() => window.location.reload())
+ .catch(() => this.onFormSubmitFailure());
},
onFormSubmitFailure() {
diff --git a/app/assets/javascripts/issuable_index.js b/app/assets/javascripts/issuable_index.js
index c3e0acdff66..0683ca82a38 100644
--- a/app/assets/javascripts/issuable_index.js
+++ b/app/assets/javascripts/issuable_index.js
@@ -1,3 +1,6 @@
+import axios from './lib/utils/axios_utils';
+import flash from './flash';
+import { __ } from './locale';
import IssuableBulkUpdateSidebar from './issuable_bulk_update_sidebar';
import IssuableBulkUpdateActions from './issuable_bulk_update_actions';
@@ -20,23 +23,24 @@ export default class IssuableIndex {
}
static resetIncomingEmailToken() {
- $('.incoming-email-token-reset').on('click', (e) => {
+ const $resetToken = $('.incoming-email-token-reset');
+
+ $resetToken.on('click', (e) => {
e.preventDefault();
- $.ajax({
- type: 'PUT',
- url: $('.incoming-email-token-reset').attr('href'),
- dataType: 'json',
- success(response) {
- $('#issuable_email').val(response.new_address).focus();
- },
- beforeSend() {
- $('.incoming-email-token-reset').text('resetting...');
- },
- complete() {
- $('.incoming-email-token-reset').text('reset it');
- },
- });
+ $resetToken.text('resetting...');
+
+ axios.put($resetToken.attr('href'))
+ .then(({ data }) => {
+ $('#issuable_email').val(data.new_address).focus();
+
+ $resetToken.text('reset it');
+ })
+ .catch(() => {
+ flash(__('There was an error when reseting email token.'));
+
+ $resetToken.text('reset it');
+ });
});
}
}
diff --git a/app/assets/javascripts/lib/utils/users_cache.js b/app/assets/javascripts/lib/utils/users_cache.js
index 88f8a622c00..b01ec6b81a3 100644
--- a/app/assets/javascripts/lib/utils/users_cache.js
+++ b/app/assets/javascripts/lib/utils/users_cache.js
@@ -8,16 +8,16 @@ class UsersCache extends Cache {
}
return Api.users('', { username })
- .then((users) => {
- if (!users.length) {
+ .then(({ data }) => {
+ if (!data.length) {
throw new Error(`User "${username}" could not be found!`);
}
- if (users.length > 1) {
+ if (data.length > 1) {
throw new Error(`Expected username "${username}" to be unique!`);
}
- const user = users[0];
+ const user = data[0];
this.internalStorage[username] = user;
return user;
});
diff --git a/app/assets/javascripts/users/activity_calendar.js b/app/assets/javascripts/users/activity_calendar.js
index 0581239d5a5..0ca54faa71c 100644
--- a/app/assets/javascripts/users/activity_calendar.js
+++ b/app/assets/javascripts/users/activity_calendar.js
@@ -98,7 +98,7 @@ export default class ActivityCalendar {
const secondLastColMonth = this.timestampsTmp[group - 2][0].date.getMonth();
if (lastColMonth !== secondLastColMonth) {
- extraWidthPadding = 3;
+ extraWidthPadding = 6;
}
return extraWidthPadding;
diff --git a/app/controllers/concerns/issuable_collections.rb b/app/controllers/concerns/issuable_collections.rb
index b25e753a5ad..2fa0f98e344 100644
--- a/app/controllers/concerns/issuable_collections.rb
+++ b/app/controllers/concerns/issuable_collections.rb
@@ -12,11 +12,9 @@ module IssuableCollections
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def set_issuables_index
- @issuables = issuables_collection
- @issuables = @issuables.page(params[:page])
- @issuable_meta_data = issuable_meta_data(@issuables, collection_type)
- @total_pages = issuable_page_count
+ @issuables = issuables_collection
+ set_pagination
return if redirect_out_of_range(@total_pages)
if params[:label_name].present?
@@ -35,14 +33,26 @@ module IssuableCollections
@users.push(author) if author
end
end
+
+ def set_pagination
+ return if pagination_disabled?
+
+ @issuables = @issuables.page(params[:page])
+ @issuable_meta_data = issuable_meta_data(@issuables, collection_type)
+ @total_pages = issuable_page_count
+ end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
+ def pagination_disabled?
+ false
+ end
+
def issuables_collection
finder.execute.preload(preload_for_collection)
end
def redirect_out_of_range(total_pages)
- return false if total_pages.zero?
+ return false if total_pages.nil? || total_pages.zero?
out_of_range = @issuables.current_page > total_pages # rubocop:disable Gitlab/ModuleWithInstanceVariables
diff --git a/app/controllers/groups_controller.rb b/app/controllers/groups_controller.rb
index eb53a522f90..75270a0889b 100644
--- a/app/controllers/groups_controller.rb
+++ b/app/controllers/groups_controller.rb
@@ -150,7 +150,6 @@ class GroupsController < Groups::ApplicationController
@projects = GroupProjectsFinder.new(params: params, group: group, options: options, current_user: current_user)
.execute
.includes(:namespace)
- .page(params[:page])
@events = EventCollection
.new(@projects, offset: params[:offset].to_i, filter: event_filter)
diff --git a/app/models/concerns/discussion_on_diff.rb b/app/models/concerns/discussion_on_diff.rb
index db9770fabf4..8b3c55387b3 100644
--- a/app/models/concerns/discussion_on_diff.rb
+++ b/app/models/concerns/discussion_on_diff.rb
@@ -37,6 +37,8 @@ module DiscussionOnDiff
# Returns an array of at most 16 highlighted lines above a diff note
def truncated_diff_lines(highlight: true)
+ return [] if diff_line.nil? && first_note.is_a?(LegacyDiffNote)
+
lines = highlight ? highlighted_diff_lines : diff_lines
initial_line_index = [diff_line.index - NUMBER_OF_TRUNCATED_DIFF_LINES + 1, 0].max
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index f6d4843abc3..d025062f562 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -618,12 +618,12 @@ class MergeRequest < ActiveRecord::Base
can_be_merged? && !should_be_rebased?
end
- def mergeable_state?(skip_ci_check: false)
+ def mergeable_state?(skip_ci_check: false, skip_discussions_check: false)
return false unless open?
return false if work_in_progress?
return false if broken?
return false unless skip_ci_check || mergeable_ci_state?
- return false unless mergeable_discussions_state?
+ return false unless skip_discussions_check || mergeable_discussions_state?
true
end
diff --git a/app/models/project_wiki.rb b/app/models/project_wiki.rb
index a0af749a93f..459d1673125 100644
--- a/app/models/project_wiki.rb
+++ b/app/models/project_wiki.rb
@@ -124,6 +124,12 @@ class ProjectWiki
update_project_activity
end
+ def page_formatted_data(page)
+ page_title, page_dir = page_title_and_dir(page.title)
+
+ wiki.page_formatted_data(title: page_title, dir: page_dir, version: page.version)
+ end
+
def page_title_and_dir(title)
title_array = title.split("/")
title = title_array.pop
diff --git a/app/models/repository.rb b/app/models/repository.rb
index 5b06dc5a39b..406e3a7f73b 100644
--- a/app/models/repository.rb
+++ b/app/models/repository.rb
@@ -255,6 +255,8 @@ class Repository
# This will still fail if the file is corrupted (e.g. 0 bytes)
raw_repository.write_ref(keep_around_ref_name(sha), sha, shell: false)
+ rescue Gitlab::Git::CommandError => ex
+ Rails.logger.error "Unable to create keep-around reference for repository #{path}: #{ex}"
end
def kept_around?(sha)
@@ -491,7 +493,7 @@ class Repository
raw_repository.root_ref
else
# When the repo does not exist we raise this error so no data is cached.
- raise Rugged::ReferenceError
+ raise Gitlab::Git::Repository::NoRepository
end
end
cache_method :root_ref
@@ -525,11 +527,7 @@ class Repository
def commit_count_for_ref(ref)
return 0 unless exists?
- begin
- cache.fetch(:"commit_count_#{ref}") { raw_repository.commit_count(ref) }
- rescue Rugged::ReferenceError
- 0
- end
+ cache.fetch(:"commit_count_#{ref}") { raw_repository.commit_count(ref) }
end
delegate :branch_names, to: :raw_repository
@@ -653,26 +651,14 @@ class Repository
end
def last_commit_for_path(sha, path)
- raw_repository.gitaly_migrate(:last_commit_for_path) do |is_enabled|
- if is_enabled
- last_commit_for_path_by_gitaly(sha, path)
- else
- last_commit_for_path_by_rugged(sha, path)
- end
- end
+ commit_by(oid: last_commit_id_for_path(sha, path))
end
def last_commit_id_for_path(sha, path)
key = path.blank? ? "last_commit_id_for_path:#{sha}" : "last_commit_id_for_path:#{sha}:#{Digest::SHA1.hexdigest(path)}"
cache.fetch(key) do
- raw_repository.gitaly_migrate(:last_commit_for_path) do |is_enabled|
- if is_enabled
- last_commit_for_path_by_gitaly(sha, path).id
- else
- last_commit_id_for_path_by_shelling_out(sha, path)
- end
- end
+ raw_repository.last_commit_id_for_path(sha, path)
end
end
@@ -800,16 +786,6 @@ class Repository
with_cache_hooks { raw.multi_action(user, **options) }
end
- def can_be_merged?(source_sha, target_branch)
- raw_repository.gitaly_migrate(:can_be_merged) do |is_enabled|
- if is_enabled
- gitaly_can_be_merged?(source_sha, find_branch(target_branch).target)
- else
- rugged_can_be_merged?(source_sha, target_branch)
- end
- end
- end
-
def merge(user, source_sha, merge_request, message)
with_cache_hooks do
raw_repository.merge(user, source_sha, merge_request.target_branch, message) do |commit_id|
@@ -876,26 +852,18 @@ class Repository
@root_ref_sha ||= commit(root_ref).sha
end
- delegate :merged_branch_names, to: :raw_repository
+ delegate :merged_branch_names, :can_be_merged?, to: :raw_repository
def merge_base(first_commit_id, second_commit_id)
first_commit_id = commit(first_commit_id).try(:id) || first_commit_id
second_commit_id = commit(second_commit_id).try(:id) || second_commit_id
raw_repository.merge_base(first_commit_id, second_commit_id)
- rescue Rugged::ReferenceError
- nil
end
def ancestor?(ancestor_id, descendant_id)
return false if ancestor_id.nil? || descendant_id.nil?
- Gitlab::GitalyClient.migrate(:is_ancestor) do |is_enabled|
- if is_enabled
- raw_repository.ancestor?(ancestor_id, descendant_id)
- else
- rugged_is_ancestor?(ancestor_id, descendant_id)
- end
- end
+ raw_repository.ancestor?(ancestor_id, descendant_id)
end
def fetch_as_mirror(url, forced: false, refmap: :all_refs, remote_name: nil)
@@ -1077,30 +1045,7 @@ class Repository
Gitlab::Metrics.add_event(event, { path: full_path }.merge(tags))
end
- def last_commit_for_path_by_gitaly(sha, path)
- c = raw_repository.gitaly_commit_client.last_commit_for_path(sha, path)
- commit_by(oid: c)
- end
-
- def last_commit_for_path_by_rugged(sha, path)
- sha = last_commit_id_for_path_by_shelling_out(sha, path)
- commit_by(oid: sha)
- end
-
- def last_commit_id_for_path_by_shelling_out(sha, path)
- args = %W(rev-list --max-count=1 #{sha} -- #{path})
- raw_repository.run_git_with_timeout(args, Gitlab::Git::Popen::FAST_GIT_PROCESS_TIMEOUT).first.strip
- end
-
def initialize_raw_repository
Gitlab::Git::Repository.new(project.repository_storage, disk_path + '.git', Gitlab::GlRepository.gl_repository(project, is_wiki))
end
-
- def gitaly_can_be_merged?(their_commit, our_commit)
- !raw_repository.gitaly_conflicts_client(our_commit, their_commit).conflicts?
- end
-
- def rugged_can_be_merged?(their_commit, our_commit)
- !rugged.merge_commits(our_commit, their_commit).conflicts?
- end
end
diff --git a/app/models/wiki_page.rb b/app/models/wiki_page.rb
index bdfef677ef3..e6254183baf 100644
--- a/app/models/wiki_page.rb
+++ b/app/models/wiki_page.rb
@@ -107,7 +107,10 @@ class WikiPage
# The processed/formatted content of this page.
def formatted_content
- @attributes[:formatted_content] ||= @page&.formatted_data
+ # Assuming @page exists, nil formatted_data means we didn't load it
+ # before hand (i.e. page was fetched by Gitaly), so we fetch it separately.
+ # If the page was fetched by Gollum, formatted_data would've been a String.
+ @attributes[:formatted_content] ||= @page&.formatted_data || @wiki.page_formatted_data(@page)
end
# The markup format for the page.
diff --git a/app/serializers/merge_request_widget_entity.rb b/app/serializers/merge_request_widget_entity.rb
index 48cd2317f46..fbfe480503b 100644
--- a/app/serializers/merge_request_widget_entity.rb
+++ b/app/serializers/merge_request_widget_entity.rb
@@ -48,7 +48,18 @@ class MergeRequestWidgetEntity < IssuableEntity
expose :merge_ongoing?, as: :merge_ongoing
expose :work_in_progress?, as: :work_in_progress
expose :source_branch_exists?, as: :source_branch_exists
- expose :mergeable_discussions_state?, as: :mergeable_discussions_state
+
+ expose :mergeable_discussions_state?, as: :mergeable_discussions_state do |merge_request|
+ # This avoids calling MergeRequest#mergeable_discussions_state without
+ # considering the state of the MR first. If a MR isn't mergeable, we can
+ # safely short-circuit it.
+ if merge_request.mergeable_state?(skip_ci_check: true, skip_discussions_check: true)
+ merge_request.mergeable_discussions_state?
+ else
+ false
+ end
+ end
+
expose :branch_missing?, as: :branch_missing
expose :commits_count
expose :cannot_be_merged?, as: :has_conflicts
diff --git a/app/views/layouts/header/_new_dropdown.haml b/app/views/layouts/header/_new_dropdown.haml
index 088f2785092..eb32f393310 100644
--- a/app/views/layouts/header/_new_dropdown.haml
+++ b/app/views/layouts/header/_new_dropdown.haml
@@ -1,5 +1,5 @@
%li.header-new.dropdown
- = link_to new_project_path, class: "header-new-dropdown-toggle has-tooltip", title: "New...", ref: 'tooltip', aria: { label: "New..." }, data: { toggle: 'dropdown', placement: 'bottom', container: 'body' } do
+ = link_to new_project_path, class: "header-new-dropdown-toggle has-tooltip qa-new-menu-toggle", title: "New...", ref: 'tooltip', aria: { label: "New..." }, data: { toggle: 'dropdown', placement: 'bottom', container: 'body' } do
= sprite_icon('plus-square', size: 16)
= sprite_icon('angle-down', css_class: 'caret-down')
.dropdown-menu-nav.dropdown-menu-align-right
diff --git a/changelogs/unreleased/21554-mark-new-user-as-external.yml b/changelogs/unreleased/21554-mark-new-user-as-external.yml
new file mode 100644
index 00000000000..fb0826fc176
--- /dev/null
+++ b/changelogs/unreleased/21554-mark-new-user-as-external.yml
@@ -0,0 +1,5 @@
+---
+title: Login via OAuth now only marks new users as external
+merge_request: 16672
+author:
+type: fixed
diff --git a/changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml b/changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml
new file mode 100644
index 00000000000..f64fd66ef79
--- /dev/null
+++ b/changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml
@@ -0,0 +1,5 @@
+---
+title: Reduce the number of Prometheus metrics
+merge_request: 16443
+author:
+type: performance
diff --git a/changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml b/changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml
new file mode 100644
index 00000000000..64340ab08cd
--- /dev/null
+++ b/changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml
@@ -0,0 +1,5 @@
+---
+title: Fix 500 error when loading a merge request with an invalid comment
+merge_request: 16795
+author:
+type: fixed
diff --git a/changelogs/unreleased/42591-update-nokogiri.yml b/changelogs/unreleased/42591-update-nokogiri.yml
new file mode 100644
index 00000000000..5f9587d2d92
--- /dev/null
+++ b/changelogs/unreleased/42591-update-nokogiri.yml
@@ -0,0 +1,5 @@
+---
+title: Update nokogiri to 1.8.2
+merge_request: 16807
+author:
+type: security
diff --git a/changelogs/unreleased/contribution_calendar_label_cut_off.yml b/changelogs/unreleased/contribution_calendar_label_cut_off.yml
new file mode 100644
index 00000000000..0b4a746bab8
--- /dev/null
+++ b/changelogs/unreleased/contribution_calendar_label_cut_off.yml
@@ -0,0 +1,5 @@
+---
+title: Contribution calendar label was cut off
+merge_request:
+author: Branka Martinovic
+type: fixed
diff --git a/changelogs/unreleased/osw-short-circuit-mergeable-disccusions-state.yml b/changelogs/unreleased/osw-short-circuit-mergeable-disccusions-state.yml
new file mode 100644
index 00000000000..62931218861
--- /dev/null
+++ b/changelogs/unreleased/osw-short-circuit-mergeable-disccusions-state.yml
@@ -0,0 +1,5 @@
+---
+title: Stop checking if discussions are in a mergeable state if the MR isn't
+merge_request:
+author:
+type: performance
diff --git a/changelogs/unreleased/sh-fix-events-collection.yml b/changelogs/unreleased/sh-fix-events-collection.yml
new file mode 100644
index 00000000000..50af39d9caf
--- /dev/null
+++ b/changelogs/unreleased/sh-fix-events-collection.yml
@@ -0,0 +1,5 @@
+---
+title: Fix not all events being shown in group dashboard
+merge_request:
+author:
+type: fixed
diff --git a/config/initializers/ar5_pg_10_support.rb b/config/initializers/ar5_pg_10_support.rb
index 6fae770015c..a529c74a8ce 100644
--- a/config/initializers/ar5_pg_10_support.rb
+++ b/config/initializers/ar5_pg_10_support.rb
@@ -1,57 +1,59 @@
raise "Vendored ActiveRecord 5 code! Delete #{__FILE__}!" if ActiveRecord::VERSION::MAJOR >= 5
-require 'active_record/connection_adapters/postgresql_adapter'
-require 'active_record/connection_adapters/postgresql/schema_statements'
-
-#
-# Monkey-patch the refused Rails 4.2 patch at https://github.com/rails/rails/pull/31330
-#
-# Updates sequence logic to support PostgreSQL 10.
-#
-# rubocop:disable all
-module ActiveRecord
- module ConnectionAdapters
-
- # We need #postgresql_version to be public as in ActiveRecord 5 for seed_fu
- # to work. In ActiveRecord 4, it is protected.
- # https://github.com/mbleigh/seed-fu/issues/123
- class PostgreSQLAdapter
- public :postgresql_version
- end
+if Gitlab::Database.postgresql?
+ require 'active_record/connection_adapters/postgresql_adapter'
+ require 'active_record/connection_adapters/postgresql/schema_statements'
+
+ #
+ # Monkey-patch the refused Rails 4.2 patch at https://github.com/rails/rails/pull/31330
+ #
+ # Updates sequence logic to support PostgreSQL 10.
+ #
+ # rubocop:disable all
+ module ActiveRecord
+ module ConnectionAdapters
+
+ # We need #postgresql_version to be public as in ActiveRecord 5 for seed_fu
+ # to work. In ActiveRecord 4, it is protected.
+ # https://github.com/mbleigh/seed-fu/issues/123
+ class PostgreSQLAdapter
+ public :postgresql_version
+ end
- module PostgreSQL
- module SchemaStatements
- # Resets the sequence of a table's primary key to the maximum value.
- def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:
- unless pk and sequence
- default_pk, default_sequence = pk_and_sequence_for(table)
+ module PostgreSQL
+ module SchemaStatements
+ # Resets the sequence of a table's primary key to the maximum value.
+ def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:
+ unless pk and sequence
+ default_pk, default_sequence = pk_and_sequence_for(table)
- pk ||= default_pk
- sequence ||= default_sequence
- end
+ pk ||= default_pk
+ sequence ||= default_sequence
+ end
- if @logger && pk && !sequence
- @logger.warn "#{table} has primary key #{pk} with no default sequence"
- end
+ if @logger && pk && !sequence
+ @logger.warn "#{table} has primary key #{pk} with no default sequence"
+ end
- if pk && sequence
- quoted_sequence = quote_table_name(sequence)
- max_pk = select_value("SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}")
- if max_pk.nil?
- if postgresql_version >= 100000
- minvalue = select_value("SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass")
- else
- minvalue = select_value("SELECT min_value FROM #{quoted_sequence}")
+ if pk && sequence
+ quoted_sequence = quote_table_name(sequence)
+ max_pk = select_value("SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}")
+ if max_pk.nil?
+ if postgresql_version >= 100000
+ minvalue = select_value("SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass")
+ else
+ minvalue = select_value("SELECT min_value FROM #{quoted_sequence}")
+ end
end
- end
- select_value <<-end_sql, 'SCHEMA'
- SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue}, #{max_pk ? true : false})
- end_sql
+ select_value <<-end_sql, 'SCHEMA'
+ SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue}, #{max_pk ? true : false})
+ end_sql
+ end
end
end
end
end
end
+ # rubocop:enable all
end
-# rubocop:enable all
diff --git a/config/initializers/peek.rb b/config/initializers/peek.rb
index e74b95f1646..11759801112 100644
--- a/config/initializers/peek.rb
+++ b/config/initializers/peek.rb
@@ -7,10 +7,12 @@ if Gitlab::Database.mysql?
require 'peek-mysql2'
PEEK_DB_CLIENT = ::Mysql2::Client
PEEK_DB_VIEW = Peek::Views::Mysql2
-else
+elsif Gitlab::Database.postgresql?
require 'peek-pg'
PEEK_DB_CLIENT = ::PG::Connection
PEEK_DB_VIEW = Peek::Views::PG
+else
+ raise "Unsupported database adapter for peek!"
end
Peek.into PEEK_DB_VIEW
diff --git a/doc/api/repositories.md b/doc/api/repositories.md
index 5fb25e40ed7..96609cd530f 100644
--- a/doc/api/repositories.md
+++ b/doc/api/repositories.md
@@ -114,6 +114,7 @@ Parameters:
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
- `sha` (optional) - The commit SHA to download. A tag, branch reference or sha can be used. This defaults to the tip of the default branch if not specified
+- `format` (optional) - The archive format. Default is `tar.gz`. Options are `tar.gz`, `tar.bz2`, `tbz`, `tbz2`, `tb2`, `bz2`, `tar`, `zip`
## Compare branches, tags or commits
diff --git a/lib/gitlab/ee_compat_check.rb b/lib/gitlab/ee_compat_check.rb
index d3b49b1ec75..0fb71976883 100644
--- a/lib/gitlab/ee_compat_check.rb
+++ b/lib/gitlab/ee_compat_check.rb
@@ -5,7 +5,7 @@ module Gitlab
DEFAULT_CE_PROJECT_URL = 'https://gitlab.com/gitlab-org/gitlab-ce'.freeze
EE_REPO_URL = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze
CHECK_DIR = Rails.root.join('ee_compat_check')
- IGNORED_FILES_REGEX = /(VERSION|CHANGELOG\.md:\d+)/.freeze
+ IGNORED_FILES_REGEX = %r{VERSION|CHANGELOG\.md|db/schema\.rb}i.freeze
PLEASE_READ_THIS_BANNER = %Q{
============================================================
===================== PLEASE READ THIS =====================
diff --git a/lib/gitlab/git/diff.rb b/lib/gitlab/git/diff.rb
index ca94b4baa59..a203587aec1 100644
--- a/lib/gitlab/git/diff.rb
+++ b/lib/gitlab/git/diff.rb
@@ -44,7 +44,7 @@ module Gitlab
# branch1...branch2) From the git documentation:
# "git diff A...B" is equivalent to "git diff
# $(git-merge-base A B) B"
- repo.merge_base_commit(head, base)
+ repo.merge_base(head, base)
end
options ||= {}
diff --git a/lib/gitlab/git/operation_service.rb b/lib/gitlab/git/operation_service.rb
index 3fb0e2eed93..280def182d5 100644
--- a/lib/gitlab/git/operation_service.rb
+++ b/lib/gitlab/git/operation_service.rb
@@ -131,7 +131,10 @@ module Gitlab
oldrev = branch.target
- if oldrev == repository.merge_base(newrev, branch.target)
+ merge_base = repository.merge_base(newrev, branch.target)
+ raise Gitlab::Git::Repository::InvalidRef unless merge_base
+
+ if oldrev == merge_base
oldrev
else
raise Gitlab::Git::CommitError.new('Branch diverged')
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index 3a7930154e5..8137c582c0f 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -551,29 +551,34 @@ module Gitlab
end
# Returns the SHA of the most recent common ancestor of +from+ and +to+
- def merge_base_commit(from, to)
+ def merge_base(from, to)
gitaly_migrate(:merge_base) do |is_enabled|
if is_enabled
gitaly_repository_client.find_merge_base(from, to)
else
- rugged.merge_base(from, to)
+ rugged_merge_base(from, to)
end
end
end
- alias_method :merge_base, :merge_base_commit
# Gitaly note: JV: check gitlab-ee before removing this method.
def rugged_is_ancestor?(ancestor_id, descendant_id)
return false if ancestor_id.nil? || descendant_id.nil?
- merge_base_commit(ancestor_id, descendant_id) == ancestor_id
+ rugged_merge_base(ancestor_id, descendant_id) == ancestor_id
rescue Rugged::OdbError
false
end
# Returns true is +from+ is direct ancestor to +to+, otherwise false
def ancestor?(from, to)
- gitaly_commit_client.ancestor?(from, to)
+ Gitlab::GitalyClient.migrate(:is_ancestor) do |is_enabled|
+ if is_enabled
+ gitaly_commit_client.ancestor?(from, to)
+ else
+ rugged_is_ancestor?(from, to)
+ end
+ end
end
def merged_branch_names(branch_names = [])
@@ -680,11 +685,7 @@ module Gitlab
if is_enabled
gitaly_commit_client.commit_count(ref)
else
- walker = Rugged::Walker.new(rugged)
- walker.sorting(Rugged::SORT_TOPO | Rugged::SORT_REVERSE)
- oid = rugged.rev_parse_oid(ref)
- walker.push(oid)
- walker.count
+ rugged_commit_count(ref)
end
end
end
@@ -887,16 +888,12 @@ module Gitlab
end
def delete_refs(*ref_names)
- instructions = ref_names.map do |ref|
- "delete #{ref}\x00\x00"
- end
-
- message, status = run_git(%w[update-ref --stdin -z]) do |stdin|
- stdin.write(instructions.join)
- end
-
- unless status.zero?
- raise GitError.new("Could not delete refs #{ref_names}: #{message}")
+ gitaly_migrate(:delete_refs) do |is_enabled|
+ if is_enabled
+ gitaly_delete_refs(*ref_names)
+ else
+ git_delete_refs(*ref_names)
+ end
end
end
@@ -1105,10 +1102,14 @@ module Gitlab
end
def write_ref(ref_path, ref, old_ref: nil, shell: true)
- if shell
- shell_write_ref(ref_path, ref, old_ref)
- else
- rugged_write_ref(ref_path, ref)
+ ref_path = "#{Gitlab::Git::BRANCH_REF_PREFIX}#{ref_path}" unless ref_path.start_with?("refs/") || ref_path == "HEAD"
+
+ gitaly_migrate(:write_ref) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.write_ref(ref_path, ref, old_ref, shell)
+ else
+ local_write_ref(ref_path, ref, old_ref: old_ref, shell: shell)
+ end
end
end
@@ -1131,13 +1132,6 @@ module Gitlab
end
# Refactoring aid; allows us to copy code from app/models/repository.rb
- def run_git_with_timeout(args, timeout, env: {})
- circuit_breaker.perform do
- popen_with_timeout([Gitlab.config.git.bin_path, *args], timeout, path, env)
- end
- end
-
- # Refactoring aid; allows us to copy code from app/models/repository.rb
def commit(ref = 'HEAD')
Gitlab::Git::Commit.find(self, ref)
end
@@ -1392,6 +1386,16 @@ module Gitlab
run_git(args).first.scrub.split(/^--$/)
end
+ def can_be_merged?(source_sha, target_branch)
+ gitaly_migrate(:can_be_merged) do |is_enabled|
+ if is_enabled
+ gitaly_can_be_merged?(source_sha, find_branch(target_branch, true).target)
+ else
+ rugged_can_be_merged?(source_sha, target_branch)
+ end
+ end
+ end
+
def search_files_by_name(query, ref)
safe_query = Regexp.escape(query.sub(/^\/*/, ""))
@@ -1417,8 +1421,36 @@ module Gitlab
output
end
+ def can_be_merged?(source_sha, target_branch)
+ gitaly_migrate(:can_be_merged) do |is_enabled|
+ if is_enabled
+ gitaly_can_be_merged?(source_sha, find_branch(target_branch).target)
+ else
+ rugged_can_be_merged?(source_sha, target_branch)
+ end
+ end
+ end
+
+ def last_commit_id_for_path(sha, path)
+ gitaly_migrate(:last_commit_for_path) do |is_enabled|
+ if is_enabled
+ last_commit_for_path_by_gitaly(sha, path).id
+ else
+ last_commit_id_for_path_by_shelling_out(sha, path)
+ end
+ end
+ end
+
private
+ def local_write_ref(ref_path, ref, old_ref: nil, shell: true)
+ if shell
+ shell_write_ref(ref_path, ref, old_ref)
+ else
+ rugged_write_ref(ref_path, ref)
+ end
+ end
+
def shell_write_ref(ref_path, ref, old_ref)
raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ')
raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00")
@@ -1460,6 +1492,12 @@ module Gitlab
output
end
+ def run_git_with_timeout(args, timeout, env: {})
+ circuit_breaker.perform do
+ popen_with_timeout([Gitlab.config.git.bin_path, *args], timeout, path, env)
+ end
+ end
+
def fresh_worktree?(path)
File.exist?(path) && !clean_stuck_worktree(path)
end
@@ -2160,7 +2198,7 @@ module Gitlab
source_sha
end
- rescue Rugged::ReferenceError
+ rescue Rugged::ReferenceError, InvalidRef
raise ArgumentError, 'Invalid merge source'
end
@@ -2172,6 +2210,24 @@ module Gitlab
remote_update(remote_name, url: url)
end
+ def git_delete_refs(*ref_names)
+ instructions = ref_names.map do |ref|
+ "delete #{ref}\x00\x00"
+ end
+
+ message, status = run_git(%w[update-ref --stdin -z]) do |stdin|
+ stdin.write(instructions.join)
+ end
+
+ unless status.zero?
+ raise GitError.new("Could not delete refs #{ref_names}: #{message}")
+ end
+ end
+
+ def gitaly_delete_refs(*ref_names)
+ gitaly_ref_client.delete_refs(refs: ref_names)
+ end
+
def rugged_remove_remote(remote_name)
# When a remote is deleted all its remote refs are deleted too, but in
# the case of mirrors we map its refs (that would usualy go under
@@ -2234,6 +2290,14 @@ module Gitlab
run_git(['fetch', remote_name], env: env).last.zero?
end
+ def gitaly_can_be_merged?(their_commit, our_commit)
+ !gitaly_conflicts_client(our_commit, their_commit).conflicts?
+ end
+
+ def rugged_can_be_merged?(their_commit, our_commit)
+ !rugged.merge_commits(our_commit, their_commit).conflicts?
+ end
+
def gitlab_projects_error
raise CommandError, @gitlab_projects.output
end
@@ -2257,6 +2321,39 @@ module Gitlab
.commits_by_message(query, revision: ref, path: path, limit: limit, offset: offset)
.map { |c| commit(c) }
end
+
+ def gitaly_can_be_merged?(their_commit, our_commit)
+ !gitaly_conflicts_client(our_commit, their_commit).conflicts?
+ end
+
+ def rugged_can_be_merged?(their_commit, our_commit)
+ !rugged.merge_commits(our_commit, their_commit).conflicts?
+ end
+
+ def last_commit_for_path_by_gitaly(sha, path)
+ gitaly_commit_client.last_commit_for_path(sha, path)
+ end
+
+ def last_commit_id_for_path_by_shelling_out(sha, path)
+ args = %W(rev-list --max-count=1 #{sha} -- #{path})
+ run_git_with_timeout(args, Gitlab::Git::Popen::FAST_GIT_PROCESS_TIMEOUT).first.strip
+ end
+
+ def rugged_merge_base(from, to)
+ rugged.merge_base(from, to)
+ rescue Rugged::ReferenceError
+ nil
+ end
+
+ def rugged_commit_count(ref)
+ walker = Rugged::Walker.new(rugged)
+ walker.sorting(Rugged::SORT_TOPO | Rugged::SORT_REVERSE)
+ oid = rugged.rev_parse_oid(ref)
+ walker.push(oid)
+ walker.count
+ rescue Rugged::ReferenceError
+ 0
+ end
end
end
end
diff --git a/lib/gitlab/git/wiki.rb b/lib/gitlab/git/wiki.rb
index d4a53d32c28..ccdb8975342 100644
--- a/lib/gitlab/git/wiki.rb
+++ b/lib/gitlab/git/wiki.rb
@@ -117,6 +117,20 @@ module Gitlab
page.url_path
end
+ def page_formatted_data(title:, dir: nil, version: nil)
+ version = version&.id
+
+ @repository.gitaly_migrate(:wiki_page_formatted_data) do |is_enabled|
+ if is_enabled
+ gitaly_wiki_client.get_formatted_data(title: title, dir: dir, version: version)
+ else
+ # We don't use #page because if wiki_find_page feature is enabled, we would
+ # get a page without formatted_data.
+ gollum_find_page(title: title, dir: dir, version: version)&.formatted_data
+ end
+ end
+ end
+
private
# options:
diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb
index 6bd256f57c7..c5d3e944f7d 100644
--- a/lib/gitlab/gitaly_client.rb
+++ b/lib/gitlab/gitaly_client.rb
@@ -6,6 +6,7 @@ require 'grpc/health/v1/health_services_pb'
module Gitlab
module GitalyClient
+ include Gitlab::Metrics::Methods
module MigrationStatus
DISABLED = 1
OPT_IN = 2
@@ -33,8 +34,6 @@ module Gitlab
CLIENT_NAME = (Sidekiq.server? ? 'gitlab-sidekiq' : 'gitlab-web').freeze
MUTEX = Mutex.new
- METRICS_MUTEX = Mutex.new
- private_constant :MUTEX, :METRICS_MUTEX
class << self
attr_accessor :query_time
@@ -42,28 +41,14 @@ module Gitlab
self.query_time = 0
- def self.migrate_histogram
- @migrate_histogram ||=
- METRICS_MUTEX.synchronize do
- # If a thread was blocked on the mutex, the value was set already
- return @migrate_histogram if @migrate_histogram
-
- Gitlab::Metrics.histogram(:gitaly_migrate_call_duration_seconds,
- "Gitaly migration call execution timings",
- gitaly_enabled: nil, feature: nil)
- end
+ define_histogram :gitaly_migrate_call_duration_seconds do
+ docstring "Gitaly migration call execution timings"
+ base_labels gitaly_enabled: nil, feature: nil
end
- def self.gitaly_call_histogram
- @gitaly_call_histogram ||=
- METRICS_MUTEX.synchronize do
- # If a thread was blocked on the mutex, the value was set already
- return @gitaly_call_histogram if @gitaly_call_histogram
-
- Gitlab::Metrics.histogram(:gitaly_controller_action_duration_seconds,
- "Gitaly endpoint histogram by controller and action combination",
- Gitlab::Metrics::Transaction::BASE_LABELS.merge(gitaly_service: nil, rpc: nil))
- end
+ define_histogram :gitaly_controller_action_duration_seconds do
+ docstring "Gitaly endpoint histogram by controller and action combination"
+ base_labels Gitlab::Metrics::Transaction::BASE_LABELS.merge(gitaly_service: nil, rpc: nil)
end
def self.stub(name, storage)
@@ -145,7 +130,7 @@ module Gitlab
# Keep track, seperately, for the performance bar
self.query_time += duration
- gitaly_call_histogram.observe(
+ gitaly_controller_action_duration_seconds.observe(
current_transaction_labels.merge(gitaly_service: service.to_s, rpc: rpc.to_s),
duration)
end
@@ -247,7 +232,7 @@ module Gitlab
yield is_enabled
ensure
total_time = Gitlab::Metrics::System.monotonic_time - start
- migrate_histogram.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time)
+ gitaly_migrate_call_duration_seconds.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time)
feature_stack.shift
Thread.current[:gitaly_feature_stack] = nil if feature_stack.empty?
end
diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb
index c2b4155e6a5..cd2734b5a07 100644
--- a/lib/gitlab/gitaly_client/operation_service.rb
+++ b/lib/gitlab/gitaly_client/operation_service.rb
@@ -103,7 +103,13 @@ module Gitlab
request_enum.push(Gitaly::UserMergeBranchRequest.new(apply: true))
- branch_update = response_enum.next.branch_update
+ second_response = response_enum.next
+
+ if second_response.pre_receive_error.present?
+ raise Gitlab::Git::HooksService::PreReceiveError, second_response.pre_receive_error
+ end
+
+ branch_update = second_response.branch_update
return if branch_update.nil?
raise Gitlab::Git::CommitError.new('failed to apply merge to branch') unless branch_update.commit_id.present?
diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb
index f8e2a27f3fe..8b9a224b700 100644
--- a/lib/gitlab/gitaly_client/ref_service.rb
+++ b/lib/gitlab/gitaly_client/ref_service.rb
@@ -133,13 +133,16 @@ module Gitlab
GitalyClient.call(@repository.storage, :ref_service, :delete_branch, request)
end
- def delete_refs(except_with_prefixes:)
+ def delete_refs(refs: [], except_with_prefixes: [])
request = Gitaly::DeleteRefsRequest.new(
repository: @gitaly_repo,
- except_with_prefix: except_with_prefixes
+ refs: refs.map { |r| encode_binary(r) },
+ except_with_prefix: except_with_prefixes.map { |r| encode_binary(r) }
)
- GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request)
+ response = GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request)
+
+ raise Gitlab::Git::Repository::GitError, response.git_error if response.git_error.present?
end
private
diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb
index b0dbaf11598..7adf32af209 100644
--- a/lib/gitlab/gitaly_client/repository_service.rb
+++ b/lib/gitlab/gitaly_client/repository_service.rb
@@ -203,6 +203,22 @@ module Gitlab
timeout: GitalyClient.default_timeout
)
end
+
+ def write_ref(ref_path, ref, old_ref, shell)
+ request = Gitaly::WriteRefRequest.new(
+ repository: @gitaly_repo,
+ ref: ref_path.b,
+ revision: ref.b,
+ shell: shell
+ )
+ request.old_revision = old_ref.b unless old_ref.nil?
+
+ response = GitalyClient.call(@storage, :repository_service, :write_ref, request)
+
+ raise Gitlab::Git::CommandError, encode!(response.error) if response.error.present?
+
+ true
+ end
end
end
end
diff --git a/lib/gitlab/gitaly_client/wiki_service.rb b/lib/gitlab/gitaly_client/wiki_service.rb
index 5c5b170a3e0..8e87a8cc36f 100644
--- a/lib/gitlab/gitaly_client/wiki_service.rb
+++ b/lib/gitlab/gitaly_client/wiki_service.rb
@@ -127,6 +127,18 @@ module Gitlab
wiki_file
end
+ def get_formatted_data(title:, dir: nil, version: nil)
+ request = Gitaly::WikiGetFormattedDataRequest.new(
+ repository: @gitaly_repo,
+ title: encode_binary(title),
+ revision: encode_binary(version),
+ directory: encode_binary(dir)
+ )
+
+ response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_get_formatted_data, request)
+ response.reduce("") { |memo, msg| memo << msg.data }
+ end
+
private
# If a block is given and the yielded value is true, iteration will be
diff --git a/lib/gitlab/metrics.rb b/lib/gitlab/metrics.rb
index 4779755bb22..7d63ca5627d 100644
--- a/lib/gitlab/metrics.rb
+++ b/lib/gitlab/metrics.rb
@@ -1,7 +1,7 @@
module Gitlab
module Metrics
- extend Gitlab::Metrics::InfluxDb
- extend Gitlab::Metrics::Prometheus
+ include Gitlab::Metrics::InfluxDb
+ include Gitlab::Metrics::Prometheus
def self.enabled?
influx_metrics_enabled? || prometheus_metrics_enabled?
diff --git a/lib/gitlab/metrics/influx_db.rb b/lib/gitlab/metrics/influx_db.rb
index ef44a13df51..66f30e3b397 100644
--- a/lib/gitlab/metrics/influx_db.rb
+++ b/lib/gitlab/metrics/influx_db.rb
@@ -1,179 +1,187 @@
module Gitlab
module Metrics
module InfluxDb
- include Gitlab::CurrentSettings
- extend self
+ extend ActiveSupport::Concern
+ include Gitlab::Metrics::Methods
+
+ EXECUTION_MEASUREMENT_BUCKETS = [0.001, 0.01, 0.1, 1].freeze
MUTEX = Mutex.new
private_constant :MUTEX
- def influx_metrics_enabled?
- settings[:enabled] || false
- end
+ class_methods do
+ def influx_metrics_enabled?
+ settings[:enabled] || false
+ end
- # Prometheus histogram buckets used for arbitrary code measurements
- EXECUTION_MEASUREMENT_BUCKETS = [0.001, 0.002, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1].freeze
- RAILS_ROOT = Rails.root.to_s
- METRICS_ROOT = Rails.root.join('lib', 'gitlab', 'metrics').to_s
- PATH_REGEX = /^#{RAILS_ROOT}\/?/
-
- def settings
- @settings ||= {
- enabled: current_application_settings[:metrics_enabled],
- pool_size: current_application_settings[:metrics_pool_size],
- timeout: current_application_settings[:metrics_timeout],
- method_call_threshold: current_application_settings[:metrics_method_call_threshold],
- host: current_application_settings[:metrics_host],
- port: current_application_settings[:metrics_port],
- sample_interval: current_application_settings[:metrics_sample_interval] || 15,
- packet_size: current_application_settings[:metrics_packet_size] || 1
- }
- end
+ # Prometheus histogram buckets used for arbitrary code measurements
+
+ def settings
+ @settings ||= begin
+ current_settings = Gitlab::CurrentSettings.current_application_settings
+
+ {
+ enabled: current_settings[:metrics_enabled],
+ pool_size: current_settings[:metrics_pool_size],
+ timeout: current_settings[:metrics_timeout],
+ method_call_threshold: current_settings[:metrics_method_call_threshold],
+ host: current_settings[:metrics_host],
+ port: current_settings[:metrics_port],
+ sample_interval: current_settings[:metrics_sample_interval] || 15,
+ packet_size: current_settings[:metrics_packet_size] || 1
+ }
+ end
+ end
- def mri?
- RUBY_ENGINE == 'ruby'
- end
+ def mri?
+ RUBY_ENGINE == 'ruby'
+ end
- def method_call_threshold
- # This is memoized since this method is called for every instrumented
- # method. Loading data from an external cache on every method call slows
- # things down too much.
- # in milliseconds
- @method_call_threshold ||= settings[:method_call_threshold]
- end
+ def method_call_threshold
+ # This is memoized since this method is called for every instrumented
+ # method. Loading data from an external cache on every method call slows
+ # things down too much.
+ # in milliseconds
+ @method_call_threshold ||= settings[:method_call_threshold]
+ end
- def submit_metrics(metrics)
- prepared = prepare_metrics(metrics)
+ def submit_metrics(metrics)
+ prepared = prepare_metrics(metrics)
- pool&.with do |connection|
- prepared.each_slice(settings[:packet_size]) do |slice|
- begin
- connection.write_points(slice)
- rescue StandardError
+ pool&.with do |connection|
+ prepared.each_slice(settings[:packet_size]) do |slice|
+ begin
+ connection.write_points(slice)
+ rescue StandardError
+ end
end
end
+ rescue Errno::EADDRNOTAVAIL, SocketError => ex
+ Gitlab::EnvironmentLogger.error('Cannot resolve InfluxDB address. GitLab Performance Monitoring will not work.')
+ Gitlab::EnvironmentLogger.error(ex)
end
- rescue Errno::EADDRNOTAVAIL, SocketError => ex
- Gitlab::EnvironmentLogger.error('Cannot resolve InfluxDB address. GitLab Performance Monitoring will not work.')
- Gitlab::EnvironmentLogger.error(ex)
- end
- def prepare_metrics(metrics)
- metrics.map do |hash|
- new_hash = hash.symbolize_keys
+ def prepare_metrics(metrics)
+ metrics.map do |hash|
+ new_hash = hash.symbolize_keys
- new_hash[:tags].each do |key, value|
- if value.blank?
- new_hash[:tags].delete(key)
- else
- new_hash[:tags][key] = escape_value(value)
+ new_hash[:tags].each do |key, value|
+ if value.blank?
+ new_hash[:tags].delete(key)
+ else
+ new_hash[:tags][key] = escape_value(value)
+ end
end
+
+ new_hash
end
+ end
- new_hash
+ def escape_value(value)
+ value.to_s.gsub('=', '\\=')
end
- end
- def escape_value(value)
- value.to_s.gsub('=', '\\=')
- end
+ # Measures the execution time of a block.
+ #
+ # Example:
+ #
+ # Gitlab::Metrics.measure(:find_by_username_duration) do
+ # User.find_by_username(some_username)
+ # end
+ #
+ # name - The name of the field to store the execution time in.
+ #
+ # Returns the value yielded by the supplied block.
+ def measure(name)
+ trans = current_transaction
+
+ return yield unless trans
+
+ real_start = Time.now.to_f
+ cpu_start = System.cpu_time
+
+ retval = yield
+
+ cpu_stop = System.cpu_time
+ real_stop = Time.now.to_f
+
+ real_time = (real_stop - real_start)
+ cpu_time = cpu_stop - cpu_start
+
+ real_duration_seconds = fetch_histogram("gitlab_#{name}_real_duration_seconds".to_sym) do
+ docstring "Measure #{name}"
+ base_labels Transaction::BASE_LABELS
+ buckets EXECUTION_MEASUREMENT_BUCKETS
+ end
- # Measures the execution time of a block.
- #
- # Example:
- #
- # Gitlab::Metrics.measure(:find_by_username_duration) do
- # User.find_by_username(some_username)
- # end
- #
- # name - The name of the field to store the execution time in.
- #
- # Returns the value yielded by the supplied block.
- def measure(name)
- trans = current_transaction
-
- return yield unless trans
-
- real_start = Time.now.to_f
- cpu_start = System.cpu_time
-
- retval = yield
-
- cpu_stop = System.cpu_time
- real_stop = Time.now.to_f
-
- real_time = (real_stop - real_start)
- cpu_time = cpu_stop - cpu_start
-
- Gitlab::Metrics.histogram("gitlab_#{name}_real_duration_seconds".to_sym,
- "Measure #{name}",
- Transaction::BASE_LABELS,
- EXECUTION_MEASUREMENT_BUCKETS)
- .observe(trans.labels, real_time)
-
- Gitlab::Metrics.histogram("gitlab_#{name}_cpu_duration_seconds".to_sym,
- "Measure #{name}",
- Transaction::BASE_LABELS,
- EXECUTION_MEASUREMENT_BUCKETS)
- .observe(trans.labels, cpu_time / 1000.0)
-
- # InfluxDB stores the _real_time time values as milliseconds
- trans.increment("#{name}_real_time", real_time * 1000, false)
- trans.increment("#{name}_cpu_time", cpu_time, false)
- trans.increment("#{name}_call_count", 1, false)
-
- retval
- end
+ real_duration_seconds.observe(trans.labels, real_time)
- # Sets the action of the current transaction (if any)
- #
- # action - The name of the action.
- def action=(action)
- trans = current_transaction
+ cpu_duration_seconds = fetch_histogram("gitlab_#{name}_cpu_duration_seconds".to_sym) do
+ docstring "Measure #{name}"
+ base_labels Transaction::BASE_LABELS
+ buckets EXECUTION_MEASUREMENT_BUCKETS
+ with_feature "prometheus_metrics_measure_#{name}_cpu_duration"
+ end
+ cpu_duration_seconds.observe(trans.labels, cpu_time)
- trans&.action = action
- end
+ # InfluxDB stores the _real_time and _cpu_time time values as milliseconds
+ trans.increment("#{name}_real_time", real_time.in_milliseconds, false)
+ trans.increment("#{name}_cpu_time", cpu_time.in_milliseconds, false)
+ trans.increment("#{name}_call_count", 1, false)
- # Tracks an event.
- #
- # See `Gitlab::Metrics::Transaction#add_event` for more details.
- def add_event(*args)
- trans = current_transaction
+ retval
+ end
- trans&.add_event(*args)
- end
+ # Sets the action of the current transaction (if any)
+ #
+ # action - The name of the action.
+ def action=(action)
+ trans = current_transaction
- # Returns the prefix to use for the name of a series.
- def series_prefix
- @series_prefix ||= Sidekiq.server? ? 'sidekiq_' : 'rails_'
- end
+ trans&.action = action
+ end
- # Allow access from other metrics related middlewares
- def current_transaction
- Transaction.current
- end
+ # Tracks an event.
+ #
+ # See `Gitlab::Metrics::Transaction#add_event` for more details.
+ def add_event(*args)
+ trans = current_transaction
- # When enabled this should be set before being used as the usual pattern
- # "@foo ||= bar" is _not_ thread-safe.
- # rubocop:disable Gitlab/ModuleWithInstanceVariables
- def pool
- if influx_metrics_enabled?
- if @pool.nil?
- MUTEX.synchronize do
- @pool ||= ConnectionPool.new(size: settings[:pool_size], timeout: settings[:timeout]) do
- host = settings[:host]
- port = settings[:port]
-
- InfluxDB::Client
- .new(udp: { host: host, port: port })
+ trans&.add_event(*args)
+ end
+
+ # Returns the prefix to use for the name of a series.
+ def series_prefix
+ @series_prefix ||= Sidekiq.server? ? 'sidekiq_' : 'rails_'
+ end
+
+ # Allow access from other metrics related middlewares
+ def current_transaction
+ Transaction.current
+ end
+
+ # When enabled this should be set before being used as the usual pattern
+ # "@foo ||= bar" is _not_ thread-safe.
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def pool
+ if influx_metrics_enabled?
+ if @pool.nil?
+ MUTEX.synchronize do
+ @pool ||= ConnectionPool.new(size: settings[:pool_size], timeout: settings[:timeout]) do
+ host = settings[:host]
+ port = settings[:port]
+
+ InfluxDB::Client
+ .new(udp: { host: host, port: port })
+ end
end
end
- end
- @pool
+ @pool
+ end
end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
end
- # rubocop:enable Gitlab/ModuleWithInstanceVariables
end
end
end
diff --git a/lib/gitlab/metrics/method_call.rb b/lib/gitlab/metrics/method_call.rb
index c2f9db56824..b11520a79bb 100644
--- a/lib/gitlab/metrics/method_call.rb
+++ b/lib/gitlab/metrics/method_call.rb
@@ -4,26 +4,15 @@ module Gitlab
module Metrics
# Class for tracking timing information about method calls
class MethodCall
- @@measurement_enabled_cache = Concurrent::AtomicBoolean.new(false)
- @@measurement_enabled_cache_expires_at = Concurrent::AtomicReference.new(Time.now.to_i)
- MUTEX = Mutex.new
+ include Gitlab::Metrics::Methods
BASE_LABELS = { module: nil, method: nil }.freeze
attr_reader :real_time, :cpu_time, :call_count, :labels
- def self.call_duration_histogram
- return @call_duration_histogram if @call_duration_histogram
-
- MUTEX.synchronize do
- @call_duration_histogram ||= Gitlab::Metrics.histogram(
- :gitlab_method_call_duration_seconds,
- 'Method calls real duration',
- Transaction::BASE_LABELS.merge(BASE_LABELS),
- [0.01, 0.05, 0.1, 0.5, 1])
- end
- end
-
- def self.measurement_enabled_cache_expires_at
- @@measurement_enabled_cache_expires_at
+ define_histogram :gitlab_method_call_duration_seconds do
+ docstring 'Method calls real duration'
+ base_labels Transaction::BASE_LABELS.merge(BASE_LABELS)
+ buckets [0.01, 0.05, 0.1, 0.5, 1]
+ with_feature :prometheus_metrics_method_instrumentation
end
# name - The full name of the method (including namespace) such as
@@ -53,8 +42,8 @@ module Gitlab
@cpu_time += cpu_time
@call_count += 1
- if call_measurement_enabled? && above_threshold?
- self.class.call_duration_histogram.observe(@transaction.labels.merge(labels), real_time)
+ if above_threshold?
+ self.class.gitlab_method_call_duration_seconds.observe(@transaction.labels.merge(labels), real_time)
end
retval
@@ -78,17 +67,6 @@ module Gitlab
def above_threshold?
real_time.in_milliseconds >= Metrics.method_call_threshold
end
-
- def call_measurement_enabled?
- expires_at = @@measurement_enabled_cache_expires_at.value
- if expires_at < Time.now.to_i
- if @@measurement_enabled_cache_expires_at.compare_and_set(expires_at, 1.minute.from_now.to_i)
- @@measurement_enabled_cache.value = Feature.get(:prometheus_metrics_method_instrumentation).enabled?
- end
- end
-
- @@measurement_enabled_cache.value
- end
end
end
end
diff --git a/lib/gitlab/metrics/methods.rb b/lib/gitlab/metrics/methods.rb
new file mode 100644
index 00000000000..cd7c1e507f7
--- /dev/null
+++ b/lib/gitlab/metrics/methods.rb
@@ -0,0 +1,129 @@
+# rubocop:disable Style/ClassVars
+
+module Gitlab
+ module Metrics
+ module Methods
+ extend ActiveSupport::Concern
+
+ included do
+ @@_metric_provider_mutex ||= Mutex.new
+ @@_metrics_provider_cache = {}
+ end
+
+ class_methods do
+ def reload_metric!(name)
+ @@_metrics_provider_cache.delete(name)
+ end
+
+ private
+
+ def define_metric(type, name, opts = {}, &block)
+ if respond_to?(name)
+ raise ArgumentError, "method #{name} already exists"
+ end
+
+ define_singleton_method(name) do
+ # inlining fetch_metric method to avoid method call overhead when instrumenting hot spots
+ @@_metrics_provider_cache[name] || init_metric(type, name, opts, &block)
+ end
+ end
+
+ def fetch_metric(type, name, opts = {}, &block)
+ @@_metrics_provider_cache[name] || init_metric(type, name, opts, &block)
+ end
+
+ def init_metric(type, name, opts = {}, &block)
+ options = MetricOptions.new(opts)
+ options.evaluate(&block)
+
+ if disabled_by_feature(options)
+ synchronized_cache_fill(name) { NullMetric.instance }
+ else
+ synchronized_cache_fill(name) { build_metric!(type, name, options) }
+ end
+ end
+
+ def synchronized_cache_fill(key)
+ @@_metric_provider_mutex.synchronize do
+ @@_metrics_provider_cache[key] ||= yield
+ end
+ end
+
+ def disabled_by_feature(options)
+ options.with_feature && !Feature.get(options.with_feature).enabled?
+ end
+
+ def build_metric!(type, name, options)
+ case type
+ when :gauge
+ Gitlab::Metrics.gauge(name, options.docstring, options.base_labels, options.multiprocess_mode)
+ when :counter
+ Gitlab::Metrics.counter(name, options.docstring, options.base_labels)
+ when :histogram
+ Gitlab::Metrics.histogram(name, options.docstring, options.base_labels, options.buckets)
+ when :summary
+ raise NotImplementedError, "summary metrics are not currently supported"
+ else
+ raise ArgumentError, "uknown metric type #{type}"
+ end
+ end
+
+ # Fetch and/or initialize counter metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_counter(name, opts = {}, &block)
+ fetch_metric(:counter, name, opts, &block)
+ end
+
+ # Fetch and/or initialize gauge metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_gauge(name, opts = {}, &block)
+ fetch_metric(:gauge, name, opts, &block)
+ end
+
+ # Fetch and/or initialize histogram metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_histogram(name, opts = {}, &block)
+ fetch_metric(:histogram, name, opts, &block)
+ end
+
+ # Fetch and/or initialize summary metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_summary(name, opts = {}, &block)
+ fetch_metric(:summary, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Counter
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_counter(name, opts = {}, &block)
+ define_metric(:counter, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Gauge
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_gauge(name, opts = {}, &block)
+ define_metric(:gauge, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Histogram
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_histogram(name, opts = {}, &block)
+ define_metric(:histogram, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Summary
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_summary(name, opts = {}, &block)
+ define_metric(:summary, name, opts, &block)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/methods/metric_options.rb b/lib/gitlab/metrics/methods/metric_options.rb
new file mode 100644
index 00000000000..70e122d4e15
--- /dev/null
+++ b/lib/gitlab/metrics/methods/metric_options.rb
@@ -0,0 +1,61 @@
+module Gitlab
+ module Metrics
+ module Methods
+ class MetricOptions
+ SMALL_NETWORK_BUCKETS = [0.005, 0.01, 0.1, 1, 10].freeze
+
+ def initialize(options = {})
+ @multiprocess_mode = options[:multiprocess_mode] || :all
+ @buckets = options[:buckets] || SMALL_NETWORK_BUCKETS
+ @base_labels = options[:base_labels] || {}
+ @docstring = options[:docstring]
+ @with_feature = options[:with_feature]
+ end
+
+ # Documentation describing metric in metrics endpoint '/-/metrics'
+ def docstring(docstring = nil)
+ @docstring = docstring unless docstring.nil?
+
+ @docstring
+ end
+
+ # Gauge aggregation mode for multiprocess metrics
+ # - :all (default) returns each gauge for every process
+ # - :livesum all process'es gauges summed up
+ # - :max maximum value of per process gauges
+ # - :min minimum value of per process gauges
+ def multiprocess_mode(mode = nil)
+ @multiprocess_mode = mode unless mode.nil?
+
+ @multiprocess_mode
+ end
+
+ # Measurement buckets for histograms
+ def buckets(buckets = nil)
+ @buckets = buckets unless buckets.nil?
+
+ @buckets
+ end
+
+ # Base labels are merged with per metric labels
+ def base_labels(base_labels = nil)
+ @base_labels = base_labels unless base_labels.nil?
+
+ @base_labels
+ end
+
+ # Use feature toggle to control whether certain metric is enabled/disabled
+ def with_feature(name = nil)
+ @with_feature = name unless name.nil?
+
+ @with_feature
+ end
+
+ def evaluate(&block)
+ instance_eval(&block) if block_given?
+ self
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/null_metric.rb b/lib/gitlab/metrics/null_metric.rb
index 3b5a2907195..aabada5c21a 100644
--- a/lib/gitlab/metrics/null_metric.rb
+++ b/lib/gitlab/metrics/null_metric.rb
@@ -2,6 +2,8 @@ module Gitlab
module Metrics
# Mocks ::Prometheus::Client::Metric and all derived metrics
class NullMetric
+ include Singleton
+
def method_missing(name, *args, &block)
nil
end
diff --git a/lib/gitlab/metrics/prometheus.rb b/lib/gitlab/metrics/prometheus.rb
index b0b8e8436db..f07ea3560ff 100644
--- a/lib/gitlab/metrics/prometheus.rb
+++ b/lib/gitlab/metrics/prometheus.rb
@@ -3,73 +3,77 @@ require 'prometheus/client'
module Gitlab
module Metrics
module Prometheus
- include Gitlab::CurrentSettings
- include Gitlab::Utils::StrongMemoize
+ extend ActiveSupport::Concern
REGISTRY_MUTEX = Mutex.new
PROVIDER_MUTEX = Mutex.new
- def metrics_folder_present?
- multiprocess_files_dir = ::Prometheus::Client.configuration.multiprocess_files_dir
+ class_methods do
+ include Gitlab::Utils::StrongMemoize
- multiprocess_files_dir &&
- ::Dir.exist?(multiprocess_files_dir) &&
- ::File.writable?(multiprocess_files_dir)
- end
+ def metrics_folder_present?
+ multiprocess_files_dir = ::Prometheus::Client.configuration.multiprocess_files_dir
- def prometheus_metrics_enabled?
- strong_memoize(:prometheus_metrics_enabled) do
- prometheus_metrics_enabled_unmemoized
+ multiprocess_files_dir &&
+ ::Dir.exist?(multiprocess_files_dir) &&
+ ::File.writable?(multiprocess_files_dir)
+ end
+
+ def prometheus_metrics_enabled?
+ strong_memoize(:prometheus_metrics_enabled) do
+ prometheus_metrics_enabled_unmemoized
+ end
end
- end
- def registry
- strong_memoize(:registry) do
- REGISTRY_MUTEX.synchronize do
- strong_memoize(:registry) do
- ::Prometheus::Client.registry
+ def registry
+ strong_memoize(:registry) do
+ REGISTRY_MUTEX.synchronize do
+ strong_memoize(:registry) do
+ ::Prometheus::Client.registry
+ end
end
end
end
- end
- def counter(name, docstring, base_labels = {})
- safe_provide_metric(:counter, name, docstring, base_labels)
- end
+ def counter(name, docstring, base_labels = {})
+ safe_provide_metric(:counter, name, docstring, base_labels)
+ end
- def summary(name, docstring, base_labels = {})
- safe_provide_metric(:summary, name, docstring, base_labels)
- end
+ def summary(name, docstring, base_labels = {})
+ safe_provide_metric(:summary, name, docstring, base_labels)
+ end
- def gauge(name, docstring, base_labels = {}, multiprocess_mode = :all)
- safe_provide_metric(:gauge, name, docstring, base_labels, multiprocess_mode)
- end
+ def gauge(name, docstring, base_labels = {}, multiprocess_mode = :all)
+ safe_provide_metric(:gauge, name, docstring, base_labels, multiprocess_mode)
+ end
- def histogram(name, docstring, base_labels = {}, buckets = ::Prometheus::Client::Histogram::DEFAULT_BUCKETS)
- safe_provide_metric(:histogram, name, docstring, base_labels, buckets)
- end
+ def histogram(name, docstring, base_labels = {}, buckets = ::Prometheus::Client::Histogram::DEFAULT_BUCKETS)
+ safe_provide_metric(:histogram, name, docstring, base_labels, buckets)
+ end
- private
+ private
- def safe_provide_metric(method, name, *args)
- metric = provide_metric(name)
- return metric if metric
+ def safe_provide_metric(method, name, *args)
+ metric = provide_metric(name)
+ return metric if metric
- PROVIDER_MUTEX.synchronize do
- provide_metric(name) || registry.method(method).call(name, *args)
+ PROVIDER_MUTEX.synchronize do
+ provide_metric(name) || registry.method(method).call(name, *args)
+ end
end
- end
- def provide_metric(name)
- if prometheus_metrics_enabled?
- registry.get(name)
- else
- NullMetric.new
+ def provide_metric(name)
+ if prometheus_metrics_enabled?
+ registry.get(name)
+ else
+ NullMetric.instance
+ end
end
- end
- def prometheus_metrics_enabled_unmemoized
- metrics_folder_present? && current_application_settings[:prometheus_metrics_enabled] || false
+ def prometheus_metrics_enabled_unmemoized
+ metrics_folder_present? &&
+ Gitlab::CurrentSettings.current_application_settings[:prometheus_metrics_enabled] || false
+ end
end
end
end
diff --git a/lib/gitlab/metrics/subscribers/action_view.rb b/lib/gitlab/metrics/subscribers/action_view.rb
index 3da474fc1ec..274436ca2b4 100644
--- a/lib/gitlab/metrics/subscribers/action_view.rb
+++ b/lib/gitlab/metrics/subscribers/action_view.rb
@@ -3,6 +3,14 @@ module Gitlab
module Subscribers
# Class for tracking the rendering timings of views.
class ActionView < ActiveSupport::Subscriber
+ include Gitlab::Metrics::Methods
+ define_histogram :gitlab_view_rendering_duration_seconds do
+ docstring 'View rendering time'
+ base_labels Transaction::BASE_LABELS.merge({ path: nil })
+ buckets [0.001, 0.01, 0.1, 1, 10.0]
+ with_feature :prometheus_metrics_view_instrumentation
+ end
+
attach_to :action_view
SERIES = 'views'.freeze
@@ -15,23 +23,11 @@ module Gitlab
private
- def metric_view_rendering_duration_seconds
- @metric_view_rendering_duration_seconds ||= Gitlab::Metrics.histogram(
- :gitlab_view_rendering_duration_seconds,
- 'View rendering time',
- Transaction::BASE_LABELS.merge({ path: nil }),
- [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0]
- )
- end
-
def track(event)
values = values_for(event)
tags = tags_for(event)
- metric_view_rendering_duration_seconds.observe(
- current_transaction.labels.merge(tags),
- event.duration
- )
+ self.class.gitlab_view_rendering_duration_seconds.observe(current_transaction.labels.merge(tags), event.duration)
current_transaction.increment(:view_duration, event.duration)
current_transaction.add_metric(SERIES, values, tags)
diff --git a/lib/gitlab/metrics/subscribers/active_record.rb b/lib/gitlab/metrics/subscribers/active_record.rb
index ead1acb8d44..4b3e8d0a6a0 100644
--- a/lib/gitlab/metrics/subscribers/active_record.rb
+++ b/lib/gitlab/metrics/subscribers/active_record.rb
@@ -3,12 +3,13 @@ module Gitlab
module Subscribers
# Class for tracking the total query duration of a transaction.
class ActiveRecord < ActiveSupport::Subscriber
+ include Gitlab::Metrics::Methods
attach_to :active_record
def sql(event)
return unless current_transaction
- metric_sql_duration_seconds.observe(current_transaction.labels, event.duration / 1000.0)
+ self.class.gitlab_sql_duration_seconds.observe(current_transaction.labels, event.duration / 1000.0)
current_transaction.increment(:sql_duration, event.duration, false)
current_transaction.increment(:sql_count, 1, false)
@@ -16,17 +17,14 @@ module Gitlab
private
- def current_transaction
- Transaction.current
+ define_histogram :gitlab_sql_duration_seconds do
+ docstring 'SQL time'
+ base_labels Transaction::BASE_LABELS
+ buckets [0.001, 0.01, 0.1, 1.0, 10.0]
end
- def metric_sql_duration_seconds
- @metric_sql_duration_seconds ||= Gitlab::Metrics.histogram(
- :gitlab_sql_duration_seconds,
- 'SQL time',
- Transaction::BASE_LABELS,
- [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0]
- )
+ def current_transaction
+ Transaction.current
end
end
end
diff --git a/lib/gitlab/metrics/transaction.rb b/lib/gitlab/metrics/transaction.rb
index e7975c023a9..45b9e14ba55 100644
--- a/lib/gitlab/metrics/transaction.rb
+++ b/lib/gitlab/metrics/transaction.rb
@@ -2,11 +2,12 @@ module Gitlab
module Metrics
# Class for storing metrics information of a single transaction.
class Transaction
+ include Gitlab::Metrics::Methods
+
# base labels shared among all transactions
BASE_LABELS = { controller: nil, action: nil }.freeze
THREAD_KEY = :_gitlab_metrics_transaction
- METRICS_MUTEX = Mutex.new
# The series to store events (e.g. Git pushes) in.
EVENT_SERIES = 'events'.freeze
@@ -54,8 +55,8 @@ module Gitlab
@memory_after = System.memory_usage
@finished_at = System.monotonic_time
- self.class.metric_transaction_duration_seconds.observe(labels, duration)
- self.class.metric_transaction_allocated_memory_bytes.observe(labels, allocated_memory * 1024.0)
+ self.class.gitlab_transaction_duration_seconds.observe(labels, duration)
+ self.class.gitlab_transaction_allocated_memory_bytes.observe(labels, allocated_memory * 1024.0)
Thread.current[THREAD_KEY] = nil
end
@@ -72,7 +73,7 @@ module Gitlab
# event_name - The name of the event (e.g. "git_push").
# tags - A set of tags to attach to the event.
def add_event(event_name, tags = {})
- self.class.metric_event_counter(event_name, tags).increment(tags.merge(labels))
+ self.class.transaction_metric(event_name, :counter, prefix: 'event_', tags: tags).increment(tags.merge(labels))
@metrics << Metric.new(EVENT_SERIES, { count: 1 }, tags.merge(event: event_name), :event)
end
@@ -86,12 +87,12 @@ module Gitlab
end
def increment(name, value, use_prometheus = true)
- self.class.metric_transaction_counter(name).increment(labels, value) if use_prometheus
+ self.class.transaction_metric(name, :counter).increment(labels, value) if use_prometheus
@values[name] += value
end
def set(name, value, use_prometheus = true)
- self.class.metric_transaction_gauge(name).set(labels, value) if use_prometheus
+ self.class.transaction_metric(name, :gauge).set(labels, value) if use_prometheus
@values[name] = value
end
@@ -136,64 +137,28 @@ module Gitlab
"#{labels[:controller]}##{labels[:action]}" if labels && !labels.empty?
end
- def self.metric_transaction_duration_seconds
- return @metric_transaction_duration_seconds if @metric_transaction_duration_seconds
-
- METRICS_MUTEX.synchronize do
- @metric_transaction_duration_seconds ||= Gitlab::Metrics.histogram(
- :gitlab_transaction_duration_seconds,
- 'Transaction duration',
- BASE_LABELS,
- [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0]
- )
- end
- end
-
- def self.metric_transaction_allocated_memory_bytes
- return @metric_transaction_allocated_memory_bytes if @metric_transaction_allocated_memory_bytes
-
- METRICS_MUTEX.synchronize do
- @metric_transaction_allocated_memory_bytes ||= Gitlab::Metrics.histogram(
- :gitlab_transaction_allocated_memory_bytes,
- 'Transaction allocated memory bytes',
- BASE_LABELS,
- [1000, 10000, 20000, 500000, 1000000, 2000000, 5000000, 10000000, 20000000, 100000000]
- )
- end
+ define_histogram :gitlab_transaction_duration_seconds do
+ docstring 'Transaction duration'
+ base_labels BASE_LABELS
+ buckets [0.001, 0.01, 0.1, 1.0, 10.0]
end
- def self.metric_event_counter(event_name, tags)
- return @metric_event_counters[event_name] if @metric_event_counters&.has_key?(event_name)
-
- METRICS_MUTEX.synchronize do
- @metric_event_counters ||= {}
- @metric_event_counters[event_name] ||= Gitlab::Metrics.counter(
- "gitlab_transaction_event_#{event_name}_total".to_sym,
- "Transaction event #{event_name} counter",
- tags.merge(BASE_LABELS)
- )
- end
- end
-
- def self.metric_transaction_counter(name)
- return @metric_transaction_counters[name] if @metric_transaction_counters&.has_key?(name)
-
- METRICS_MUTEX.synchronize do
- @metric_transaction_counters ||= {}
- @metric_transaction_counters[name] ||= Gitlab::Metrics.counter(
- "gitlab_transaction_#{name}_total".to_sym, "Transaction #{name} counter", BASE_LABELS
- )
- end
+ define_histogram :gitlab_transaction_allocated_memory_bytes do
+ docstring 'Transaction allocated memory bytes'
+ base_labels BASE_LABELS
+ buckets [100, 1000, 10000, 100000, 1000000, 10000000]
+ with_feature :prometheus_metrics_transaction_allocated_memory
end
- def self.metric_transaction_gauge(name)
- return @metric_transaction_gauges[name] if @metric_transaction_gauges&.has_key?(name)
+ def self.transaction_metric(name, type, prefix: nil, tags: {})
+ metric_name = "gitlab_transaction_#{prefix}#{name}_total".to_sym
+ fetch_metric(type, metric_name) do
+ docstring "Transaction #{prefix}#{name} #{type}"
+ base_labels tags.merge(BASE_LABELS)
- METRICS_MUTEX.synchronize do
- @metric_transaction_gauges ||= {}
- @metric_transaction_gauges[name] ||= Gitlab::Metrics.gauge(
- "gitlab_transaction_#{name}".to_sym, "Transaction gauge #{name}", BASE_LABELS, :livesum
- )
+ if type == :gauge
+ multiprocess_mode :livesum
+ end
end
end
end
diff --git a/lib/gitlab/o_auth/user.rb b/lib/gitlab/o_auth/user.rb
index fff9360ea27..e40a001d20c 100644
--- a/lib/gitlab/o_auth/user.rb
+++ b/lib/gitlab/o_auth/user.rb
@@ -55,7 +55,7 @@ module Gitlab
user ||= find_or_build_ldap_user if auto_link_ldap_user?
user ||= build_new_user if signup_enabled?
- user.external = true if external_provider? && user
+ user.external = true if external_provider? && user&.new_record?
user
end
diff --git a/qa/README.md b/qa/README.md
index 3c1b61900d9..b937dc4c7a0 100644
--- a/qa/README.md
+++ b/qa/README.md
@@ -34,6 +34,9 @@ You can use GitLab QA to exercise tests on any live instance! For example, the
following call would login to a local [GDK] instance and run all specs in
`qa/specs/features`:
+First, `cd` into the `$gdk/gitlab/qa` directory.
+The `bin/qa` script expects you to be in the `qa` folder of the app.
+
```
bin/qa Test::Instance http://localhost:3000
```
diff --git a/qa/qa.rb b/qa/qa.rb
index 180ee778fd4..8630e2a522c 100644
--- a/qa/qa.rb
+++ b/qa/qa.rb
@@ -27,6 +27,7 @@ module QA
module Resource
autoload :Sandbox, 'qa/factory/resource/sandbox'
autoload :Group, 'qa/factory/resource/group'
+ autoload :Issue, 'qa/factory/resource/issue'
autoload :Project, 'qa/factory/resource/project'
autoload :MergeRequest, 'qa/factory/resource/merge_request'
autoload :DeployKey, 'qa/factory/resource/deploy_key'
@@ -125,6 +126,12 @@ module QA
autoload :SecretVariables, 'qa/page/project/settings/secret_variables'
autoload :Runners, 'qa/page/project/settings/runners'
end
+
+ module Issue
+ autoload :New, 'qa/page/project/issue/new'
+ autoload :Show, 'qa/page/project/issue/show'
+ autoload :Index, 'qa/page/project/issue/index'
+ end
end
module Profile
@@ -143,6 +150,13 @@ module QA
autoload :Main, 'qa/page/mattermost/main'
autoload :Login, 'qa/page/mattermost/login'
end
+
+ ##
+ # Classes describing components that are used by several pages.
+ #
+ module Component
+ autoload :Dropzone, 'qa/page/component/dropzone'
+ end
end
##
diff --git a/qa/qa/factory/resource/issue.rb b/qa/qa/factory/resource/issue.rb
new file mode 100644
index 00000000000..06e7e8df56c
--- /dev/null
+++ b/qa/qa/factory/resource/issue.rb
@@ -0,0 +1,34 @@
+require 'securerandom'
+
+module QA
+ module Factory
+ module Resource
+ class Issue < Factory::Base
+ attr_writer :title, :description, :project
+
+ dependency Factory::Resource::Project, as: :project do |project|
+ project.name = 'project-for-issues'
+ project.description = 'project for adding issues'
+ end
+
+ product :title do
+ Page::Project::Issue::Show.act { issue_title }
+ end
+
+ def fabricate!
+ project.visit!
+
+ Page::Project::Show.act do
+ go_to_new_issue
+ end
+
+ Page::Project::Issue::New.perform do |page|
+ page.add_title(@title)
+ page.add_description(@description)
+ page.create_new_issue
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/qa/qa/page/admin/settings.rb b/qa/qa/page/admin/settings.rb
index 1904732aee6..1f646103e7f 100644
--- a/qa/qa/page/admin/settings.rb
+++ b/qa/qa/page/admin/settings.rb
@@ -2,12 +2,13 @@ module QA
module Page
module Admin
class Settings < Page::Base
- ##
- # TODO, define all selectors required by this page object
- #
- # See gitlab-org/gitlab-qa#154
- #
- view 'app/views/admin/application_settings/show.html.haml'
+ view 'app/views/admin/application_settings/_form.html.haml' do
+ element :form_actions, '.form-actions'
+ element :submit, "submit 'Save'"
+ element :repository_storage, '%legend Repository Storage'
+ element :hashed_storage,
+ 'Create new projects using hashed storage paths'
+ end
def enable_hashed_storage
scroll_to 'legend', text: 'Repository Storage'
diff --git a/qa/qa/page/base.rb b/qa/qa/page/base.rb
index f472e8ccc7e..5c3af4b9115 100644
--- a/qa/qa/page/base.rb
+++ b/qa/qa/page/base.rb
@@ -42,6 +42,23 @@ module QA
page.within(selector) { yield } if block_given?
end
+ # Returns true if successfully GETs the given URL
+ # Useful because `page.status_code` is unsupported by our driver, and
+ # we don't have access to the `response` to use `have_http_status`.
+ def asset_exists?(url)
+ page.execute_script <<~JS
+ xhr = new XMLHttpRequest();
+ xhr.open('GET', '#{url}', true);
+ xhr.send();
+ JS
+
+ return false unless wait(time: 0.5, max: 60, reload: false) do
+ page.evaluate_script('xhr.readyState == XMLHttpRequest.DONE')
+ end
+
+ page.evaluate_script('xhr.status') == 200
+ end
+
def find_element(name)
find(element_selector_css(name))
end
diff --git a/qa/qa/page/component/dropzone.rb b/qa/qa/page/component/dropzone.rb
new file mode 100644
index 00000000000..5e6fdff20eb
--- /dev/null
+++ b/qa/qa/page/component/dropzone.rb
@@ -0,0 +1,29 @@
+module QA
+ module Page
+ module Component
+ class Dropzone
+ attr_reader :page, :container
+
+ def initialize(page, container)
+ @page = page
+ @container = container
+ end
+
+ # Not tested and not expected to work with multiple dropzones
+ # instantiated on one page because there is no distinguishing
+ # attribute per dropzone file field.
+ def attach_file(attachment)
+ filename = File.basename(attachment)
+
+ field_style = { visibility: 'visible', height: '', width: '' }
+ page.attach_file(attachment, class: 'dz-hidden-input', make_visible: field_style)
+
+ # Wait for link to be appended to dropzone text
+ page.wait(reload: false) do
+ page.find("#{container} textarea").value.match(filename)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/qa/qa/page/group/show.rb b/qa/qa/page/group/show.rb
index f23294145dd..d215518d316 100644
--- a/qa/qa/page/group/show.rb
+++ b/qa/qa/page/group/show.rb
@@ -2,12 +2,20 @@ module QA
module Page
module Group
class Show < Page::Base
- ##
- # TODO, define all selectors required by this page object
- #
- # See gitlab-org/gitlab-qa#154
- #
- view 'app/views/groups/show.html.haml'
+ view 'app/views/groups/show.html.haml' do
+ element :new_project_or_subgroup_dropdown, '.new-project-subgroup'
+ element :new_project_or_subgroup_dropdown_toggle, '.dropdown-toggle'
+ element :new_project_option, /%li.*data:.*value: "new-project"/
+ element :new_project_button, /%input.*data:.*action: "new-project"/
+ element :new_subgroup_option, /%li.*data:.*value: "new-subgroup"/
+
+ # data-value and data-action get modified by JS for subgroup
+ element :new_subgroup_button, /%input.*\.js-new-group-child/
+ end
+
+ view 'app/assets/javascripts/groups/constants.js' do
+ element :no_result_text, 'Sorry, no groups or projects matched your search'
+ end
def go_to_subgroup(name)
click_link name
@@ -20,35 +28,40 @@ module QA
def has_subgroup?(name)
filter_by_name(name)
- page.has_link?(name)
+ wait(reload: false) do
+ return false if page.has_content?('Sorry, no groups or projects matched your search')
+
+ page.has_link?(name)
+ end
end
def go_to_new_subgroup
- within '.new-project-subgroup' do
- # May need to click again because it is possible to click the button quicker than the JS is bound
- wait(reload: false) do
- find('.dropdown-toggle').click
-
- page.has_css?("li[data-value='new-subgroup']")
- end
- find("li[data-value='new-subgroup']").click
- end
+ click_new('subgroup')
find("input[data-action='new-subgroup']").click
end
def go_to_new_project
+ click_new('project')
+
+ find("input[data-action='new-project']").click
+ end
+
+ private
+
+ def click_new(kind)
within '.new-project-subgroup' do
+ css = "li[data-value='new-#{kind}']"
+
# May need to click again because it is possible to click the button quicker than the JS is bound
wait(reload: false) do
find('.dropdown-toggle').click
- page.has_css?("li[data-value='new-project']")
+ page.has_css?(css)
end
- find("li[data-value='new-project']").click
- end
- find("input[data-action='new-project']").click
+ find(css).click
+ end
end
end
end
diff --git a/qa/qa/page/menu/admin.rb b/qa/qa/page/menu/admin.rb
index 40da4a53e8a..573b98f7386 100644
--- a/qa/qa/page/menu/admin.rb
+++ b/qa/qa/page/menu/admin.rb
@@ -2,15 +2,8 @@ module QA
module Page
module Menu
class Admin < Page::Base
- ##
- # TODO, define all selectors required by this page object
- #
- # See gitlab-org/gitlab-qa#154
- #
- view 'app/views/admin/dashboard/index.html.haml'
-
- def go_to_license
- click_link 'License'
+ view 'app/views/layouts/nav/sidebar/_admin.html.haml' do
+ element :settings, "_('Settings')"
end
def go_to_settings
diff --git a/qa/qa/page/menu/side.rb b/qa/qa/page/menu/side.rb
index b2738152907..5fdcea20029 100644
--- a/qa/qa/page/menu/side.rb
+++ b/qa/qa/page/menu/side.rb
@@ -7,6 +7,8 @@ module QA
element :settings_link, 'link_to edit_project_path'
element :repository_link, "title: 'Repository'"
element :pipelines_settings_link, "title: 'CI / CD'"
+ element :issues_link, %r{link_to.*shortcuts-issues}
+ element :issues_link_text, "Issues"
element :top_level_items, '.sidebar-top-level-items'
element :activity_link, "title: 'Activity'"
end
@@ -43,6 +45,12 @@ module QA
end
end
+ def click_issues
+ within_sidebar do
+ click_link('Issues')
+ end
+ end
+
private
def hover_settings
diff --git a/qa/qa/page/project/issue/index.rb b/qa/qa/page/project/issue/index.rb
new file mode 100644
index 00000000000..b5903f536a4
--- /dev/null
+++ b/qa/qa/page/project/issue/index.rb
@@ -0,0 +1,17 @@
+module QA
+ module Page
+ module Project
+ module Issue
+ class Index < Page::Base
+ view 'app/views/projects/issues/_issue.html.haml' do
+ element :issue_link, 'link_to issue.title'
+ end
+
+ def go_to_issue(title)
+ click_link(title)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/qa/qa/page/project/issue/new.rb b/qa/qa/page/project/issue/new.rb
new file mode 100644
index 00000000000..7fc581da1ed
--- /dev/null
+++ b/qa/qa/page/project/issue/new.rb
@@ -0,0 +1,33 @@
+module QA
+ module Page
+ module Project
+ module Issue
+ class New < Page::Base
+ view 'app/views/shared/issuable/_form.html.haml' do
+ element :submit_issue_button, 'form.submit "Submit'
+ end
+
+ view 'app/views/shared/issuable/form/_title.html.haml' do
+ element :issue_title_textbox, 'form.text_field :title'
+ end
+
+ view 'app/views/shared/form_elements/_description.html.haml' do
+ element :issue_description_textarea, "render 'projects/zen', f: form, attr: :description"
+ end
+
+ def add_title(title)
+ fill_in 'issue_title', with: title
+ end
+
+ def add_description(description)
+ fill_in 'issue_description', with: description
+ end
+
+ def create_new_issue
+ click_on 'Submit issue'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/qa/qa/page/project/issue/show.rb b/qa/qa/page/project/issue/show.rb
new file mode 100644
index 00000000000..364a2c61665
--- /dev/null
+++ b/qa/qa/page/project/issue/show.rb
@@ -0,0 +1,40 @@
+module QA
+ module Page
+ module Project
+ module Issue
+ class Show < Page::Base
+ view 'app/views/projects/issues/show.html.haml' do
+ element :issue_details, '.issue-details'
+ element :title, '.title'
+ end
+
+ view 'app/views/shared/notes/_form.html.haml' do
+ element :new_note_form, 'new-note'
+ element :new_note_form, 'attr: :note'
+ end
+
+ view 'app/views/shared/notes/_comment_button.html.haml' do
+ element :comment_button, '%strong Comment'
+ end
+
+ def issue_title
+ find('.issue-details .title').text
+ end
+
+ # Adds a comment to an issue
+ # attachment option should be an absolute path
+ def comment(text, attachment: nil)
+ fill_in(with: text, name: 'note[note]')
+
+ unless attachment.nil?
+ QA::Page::Component::Dropzone.new(page, '.new-note')
+ .attach_file(attachment)
+ end
+
+ click_on 'Comment'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/qa/qa/page/project/show.rb b/qa/qa/page/project/show.rb
index 75308ae8a3c..553d35f9579 100644
--- a/qa/qa/page/project/show.rb
+++ b/qa/qa/page/project/show.rb
@@ -17,6 +17,11 @@ module QA
element :project_name
end
+ view 'app/views/layouts/header/_new_dropdown.haml' do
+ element :new_menu_toggle
+ element :new_issue_link, "link_to 'New issue', new_project_issue_path(@project)"
+ end
+
def choose_repository_clone_http
wait(reload: false) do
click_element :clone_dropdown
@@ -46,6 +51,12 @@ module QA
sleep 5
refresh
end
+
+ def go_to_new_issue
+ click_element :new_menu_toggle
+
+ click_link 'New issue'
+ end
end
end
end
diff --git a/qa/spec/fixtures/banana_sample.gif b/qa/spec/fixtures/banana_sample.gif
new file mode 100644
index 00000000000..1322ac92d14
--- /dev/null
+++ b/qa/spec/fixtures/banana_sample.gif
Binary files differ
diff --git a/scripts/lint-rugged b/scripts/lint-rugged
index 3f8fcb558e3..03f780f880b 100755
--- a/scripts/lint-rugged
+++ b/scripts/lint-rugged
@@ -21,6 +21,7 @@ ALLOWED = [
].freeze
rugged_lines = IO.popen(%w[git grep -i -n rugged -- app config lib], &:read).lines
+rugged_lines = rugged_lines.select { |l| /^[^:]*\.rb:/ =~ l }
rugged_lines = rugged_lines.reject { |l| l.start_with?(*ALLOWED) }
rugged_lines = rugged_lines.reject do |line|
code, _comment = line.split('# ', 2)
diff --git a/scripts/static-analysis b/scripts/static-analysis
index b3895292ab4..bdb88f3cb57 100755
--- a/scripts/static-analysis
+++ b/scripts/static-analysis
@@ -35,7 +35,6 @@ tasks = [
%w[bundle exec rubocop --parallel],
%w[bundle exec rake gettext:lint],
%w[bundle exec rake lint:static_verification],
- %w[scripts/lint-changelog-yaml],
%w[scripts/lint-conflicts.sh],
%w[scripts/lint-rugged]
]
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index a9cfd964dd5..492fed42d31 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -85,6 +85,30 @@ describe GroupsController do
end
end
+ describe 'GET #activity' do
+ render_views
+
+ before do
+ sign_in(user)
+ project
+ end
+
+ context 'as json' do
+ it 'includes all projects in event feed' do
+ 3.times do
+ project = create(:project, group: group)
+ create(:event, project: project)
+ end
+
+ get :activity, id: group.to_param, format: :json
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['count']).to eq(3)
+ expect(assigns(:projects).limit_value).to be_nil
+ end
+ end
+ end
+
describe 'POST #create' do
context 'when creating subgroups', :nested_groups do
[true, false].each do |can_create_group_status|
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 4a2998b4ccd..9656e7f7e74 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -102,6 +102,18 @@ describe Projects::IssuesController do
expect(response).to redirect_to(namespace_project_issues_path(page: last_page, state: controller.params[:state], scope: controller.params[:scope]))
end
+
+ it 'does not use pagination if disabled' do
+ allow(controller).to receive(:pagination_disabled?).and_return(true)
+
+ get :index,
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ page: (last_page + 1).to_param
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(assigns(:issues).size).to eq(2)
+ end
end
end
diff --git a/spec/features/projects/members/share_with_group_spec.rb b/spec/features/projects/members/share_with_group_spec.rb
index 3198798306c..4cf48098401 100644
--- a/spec/features/projects/members/share_with_group_spec.rb
+++ b/spec/features/projects/members/share_with_group_spec.rb
@@ -122,7 +122,7 @@ feature 'Project > Members > Share with Group', :js do
select2 group.id, from: '#link_group_id'
fill_in 'expires_at_groups', with: (Time.now + 4.5.days).strftime('%Y-%m-%d')
- page.find('body').click
+ click_on 'share-with-group-tab'
find('.btn-create').click
end
diff --git a/spec/javascripts/api_spec.js b/spec/javascripts/api_spec.js
index cc5fa42aafe..cf3a76d0d2e 100644
--- a/spec/javascripts/api_spec.js
+++ b/spec/javascripts/api_spec.js
@@ -1,3 +1,5 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import Api from '~/api';
describe('Api', () => {
@@ -7,20 +9,17 @@ describe('Api', () => {
api_version: dummyApiVersion,
relative_url_root: dummyUrlRoot,
};
- const dummyResponse = 'hello from outer space!';
- const sendDummyResponse = () => {
- const deferred = $.Deferred();
- deferred.resolve(dummyResponse);
- return deferred.promise();
- };
let originalGon;
+ let mock;
beforeEach(() => {
+ mock = new MockAdapter(axios);
originalGon = window.gon;
window.gon = Object.assign({}, dummyGon);
});
afterEach(() => {
+ mock.restore();
window.gon = originalGon;
});
@@ -38,15 +37,13 @@ describe('Api', () => {
describe('group', () => {
it('fetches a group', (done) => {
const groupId = '123456';
- const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}.json`;
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- return sendDummyResponse();
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}`;
+ mock.onGet(expectedUrl).reply(200, {
+ name: 'test',
});
Api.group(groupId, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.name).toBe('test');
done();
});
});
@@ -57,19 +54,13 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups.json`;
- const expectedData = Object.assign({
- search: query,
- per_page: 20,
- }, options);
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.groups(query, options, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
done();
});
});
@@ -79,19 +70,13 @@ describe('Api', () => {
it('fetches namespaces', (done) => {
const query = 'dummy query';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/namespaces.json`;
- const expectedData = {
- search: query,
- per_page: 20,
- };
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.namespaces(query, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
done();
});
});
@@ -103,21 +88,13 @@ describe('Api', () => {
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`;
window.gon.current_user_id = 1;
- const expectedData = Object.assign({
- search: query,
- per_page: 20,
- membership: true,
- simple: true,
- }, options);
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.projects(query, options, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
done();
});
});
@@ -126,20 +103,13 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`;
- const expectedData = Object.assign({
- search: query,
- per_page: 20,
- simple: true,
- }, options);
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.projects(query, options, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
done();
});
});
@@ -154,16 +124,16 @@ describe('Api', () => {
const expectedData = {
label: labelData,
};
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.type).toEqual('POST');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
+ mock.onPost(expectedUrl).reply((config) => {
+ expect(config.data).toBe(JSON.stringify(expectedData));
+
+ return [200, {
+ name: 'test',
+ }];
});
Api.newLabel(namespace, project, labelData, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.name).toBe('test');
done();
});
});
@@ -174,19 +144,13 @@ describe('Api', () => {
const groupId = '123456';
const query = 'dummy query';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`;
- const expectedData = {
- search: query,
- per_page: 20,
- };
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.groupProjects(groupId, query, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
done();
});
});
@@ -197,14 +161,10 @@ describe('Api', () => {
const licenseKey = "driver's license";
const data = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/licenses/${licenseKey}`;
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.data).toEqual(data);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, 'test');
Api.licenseText(licenseKey, data, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response).toBe('test');
done();
});
});
@@ -214,13 +174,10 @@ describe('Api', () => {
it('fetches a gitignore text', (done) => {
const gitignoreKey = 'ignore git';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitignores/${gitignoreKey}`;
- spyOn(jQuery, 'get').and.callFake((url, callback) => {
- expect(url).toEqual(expectedUrl);
- callback(dummyResponse);
- });
+ mock.onGet(expectedUrl).reply(200, 'test');
Api.gitignoreText(gitignoreKey, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response).toBe('test');
done();
});
});
@@ -230,13 +187,10 @@ describe('Api', () => {
it('fetches a .gitlab-ci.yml', (done) => {
const gitlabCiYmlKey = 'Y CI ML';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitlab_ci_ymls/${gitlabCiYmlKey}`;
- spyOn(jQuery, 'get').and.callFake((url, callback) => {
- expect(url).toEqual(expectedUrl);
- callback(dummyResponse);
- });
+ mock.onGet(expectedUrl).reply(200, 'test');
Api.gitlabCiYml(gitlabCiYmlKey, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response).toBe('test');
done();
});
});
@@ -246,13 +200,10 @@ describe('Api', () => {
it('fetches a Dockerfile', (done) => {
const dockerfileYmlKey = 'a giant whale';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/dockerfiles/${dockerfileYmlKey}`;
- spyOn(jQuery, 'get').and.callFake((url, callback) => {
- expect(url).toEqual(expectedUrl);
- callback(dummyResponse);
- });
+ mock.onGet(expectedUrl).reply(200, 'test');
Api.dockerfileYml(dockerfileYmlKey, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response).toBe('test');
done();
});
});
@@ -265,14 +216,10 @@ describe('Api', () => {
const templateKey = ' template #%?.key ';
const templateType = 'template type';
const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/templates/${templateType}/${encodeURIComponent(templateKey)}`;
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, 'test');
Api.issueTemplate(namespace, project, templateKey, templateType, (error, response) => {
- expect(error).toBe(null);
- expect(response).toBe(dummyResponse);
+ expect(response).toBe('test');
done();
});
});
@@ -283,20 +230,14 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users.json`;
- const expectedData = Object.assign({
- search: query,
- per_page: 20,
- }, options);
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.users(query, options)
- .then((response) => {
- expect(response).toBe(dummyResponse);
+ .then(({ data }) => {
+ expect(data.length).toBe(1);
+ expect(data[0].name).toBe('test');
})
.then(done)
.catch(done.fail);
diff --git a/spec/javascripts/blob/viewer/index_spec.js b/spec/javascripts/blob/viewer/index_spec.js
index cfa6650d85f..892411a6a40 100644
--- a/spec/javascripts/blob/viewer/index_spec.js
+++ b/spec/javascripts/blob/viewer/index_spec.js
@@ -1,28 +1,35 @@
/* eslint-disable no-new */
+import MockAdapter from 'axios-mock-adapter';
import BlobViewer from '~/blob/viewer/index';
+import axios from '~/lib/utils/axios_utils';
describe('Blob viewer', () => {
let blob;
+ let mock;
+
preloadFixtures('snippets/show.html.raw');
beforeEach(() => {
+ mock = new MockAdapter(axios);
+
loadFixtures('snippets/show.html.raw');
$('#modal-upload-blob').remove();
blob = new BlobViewer();
- spyOn($, 'ajax').and.callFake(() => {
- const d = $.Deferred();
-
- d.resolve({
- html: '<div>testing</div>',
- });
+ mock.onGet('http://test.host/snippets/1.json?viewer=rich').reply(200, {
+ html: '<div>testing</div>',
+ });
- return d.promise();
+ mock.onGet('http://test.host/snippets/1.json?viewer=simple').reply(200, {
+ html: '<div>testing</div>',
});
+
+ spyOn(axios, 'get').and.callThrough();
});
afterEach(() => {
+ mock.restore();
location.hash = '';
});
@@ -30,7 +37,6 @@ describe('Blob viewer', () => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => {
- expect($.ajax).toHaveBeenCalled();
expect(
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
.classList.contains('hidden'),
@@ -46,7 +52,6 @@ describe('Blob viewer', () => {
new BlobViewer();
setTimeout(() => {
- expect($.ajax).toHaveBeenCalled();
expect(
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
.classList.contains('hidden'),
@@ -64,12 +69,8 @@ describe('Blob viewer', () => {
});
asyncClick()
+ .then(() => asyncClick())
.then(() => {
- expect($.ajax).toHaveBeenCalled();
- return asyncClick();
- })
- .then(() => {
- expect($.ajax.calls.count()).toBe(1);
expect(
document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'),
).toBe('true');
@@ -122,7 +123,6 @@ describe('Blob viewer', () => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => {
- expect($.ajax).toHaveBeenCalled();
expect(
copyButton.classList.contains('disabled'),
).toBeFalsy();
@@ -135,8 +135,6 @@ describe('Blob viewer', () => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => {
- expect($.ajax).toHaveBeenCalled();
-
expect(
copyButton.getAttribute('data-original-title'),
).toBe('Copy source to clipboard');
@@ -171,14 +169,14 @@ describe('Blob viewer', () => {
it('sends AJAX request when switching to simple view', () => {
blob.switchToViewer('simple');
- expect($.ajax).toHaveBeenCalled();
+ expect(axios.get).toHaveBeenCalled();
});
it('does not send AJAX request when switching to rich view', () => {
blob.switchToViewer('simple');
blob.switchToViewer('rich');
- expect($.ajax.calls.count()).toBe(1);
+ expect(axios.get.calls.count()).toBe(1);
});
});
});
diff --git a/spec/javascripts/commits_spec.js b/spec/javascripts/commits_spec.js
index d0176520440..44ec9e4eabf 100644
--- a/spec/javascripts/commits_spec.js
+++ b/spec/javascripts/commits_spec.js
@@ -1,4 +1,6 @@
import 'vendor/jquery.endless-scroll';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import CommitsList from '~/commits';
describe('Commits List', () => {
@@ -43,30 +45,47 @@ describe('Commits List', () => {
describe('on entering input', () => {
let ajaxSpy;
+ let mock;
beforeEach(() => {
CommitsList.init(25);
CommitsList.searchField.val('');
spyOn(history, 'replaceState').and.stub();
- ajaxSpy = spyOn(jQuery, 'ajax').and.callFake((req) => {
- req.success({
- data: '<li>Result</li>',
- });
+ mock = new MockAdapter(axios);
+
+ mock.onGet('/h5bp/html5-boilerplate/commits/master').reply(200, {
+ html: '<li>Result</li>',
});
+
+ ajaxSpy = spyOn(axios, 'get').and.callThrough();
+ });
+
+ afterEach(() => {
+ mock.restore();
});
- it('should save the last search string', () => {
+ it('should save the last search string', (done) => {
CommitsList.searchField.val('GitLab');
- CommitsList.filterResults();
- expect(ajaxSpy).toHaveBeenCalled();
- expect(CommitsList.lastSearch).toEqual('GitLab');
+ CommitsList.filterResults()
+ .then(() => {
+ expect(ajaxSpy).toHaveBeenCalled();
+ expect(CommitsList.lastSearch).toEqual('GitLab');
+
+ done();
+ })
+ .catch(done.fail);
});
- it('should not make ajax call if the input does not change', () => {
- CommitsList.filterResults();
- expect(ajaxSpy).not.toHaveBeenCalled();
- expect(CommitsList.lastSearch).toEqual('');
+ it('should not make ajax call if the input does not change', (done) => {
+ CommitsList.filterResults()
+ .then(() => {
+ expect(ajaxSpy).not.toHaveBeenCalled();
+ expect(CommitsList.lastSearch).toEqual('');
+
+ done();
+ })
+ .catch(done.fail);
});
});
});
diff --git a/spec/javascripts/create_item_dropdown_spec.js b/spec/javascripts/create_item_dropdown_spec.js
index c8b00a4f553..143137c23ec 100644
--- a/spec/javascripts/create_item_dropdown_spec.js
+++ b/spec/javascripts/create_item_dropdown_spec.js
@@ -18,54 +18,67 @@ describe('CreateItemDropdown', () => {
preloadFixtures('static/create_item_dropdown.html.raw');
let $wrapperEl;
+ let createItemDropdown;
+
+ function createItemAndClearInput(text) {
+ // Filter for the new item
+ $wrapperEl.find('.dropdown-input-field')
+ .val(text)
+ .trigger('input');
+
+ // Create the new item
+ const $createButton = $wrapperEl.find('.js-dropdown-create-new-item');
+ $createButton.click();
+
+ // Clear out the filter
+ $wrapperEl.find('.dropdown-input-field')
+ .val('')
+ .trigger('input');
+ }
beforeEach(() => {
loadFixtures('static/create_item_dropdown.html.raw');
$wrapperEl = $('.js-create-item-dropdown-fixture-root');
-
- // eslint-disable-next-line no-new
- new CreateItemDropdown({
- $dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'),
- defaultToggleLabel: 'All variables',
- fieldName: 'variable[environment]',
- getData: (term, callback) => {
- callback(DROPDOWN_ITEM_DATA);
- },
- });
});
afterEach(() => {
$wrapperEl.remove();
});
- it('should have a dropdown item for each piece of data', () => {
- // Get the data in the dropdown
- $('.js-dropdown-menu-toggle').click();
+ describe('items', () => {
+ beforeEach(() => {
+ createItemDropdown = new CreateItemDropdown({
+ $dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'),
+ defaultToggleLabel: 'All variables',
+ fieldName: 'variable[environment]',
+ getData: (term, callback) => {
+ callback(DROPDOWN_ITEM_DATA);
+ },
+ });
+ });
+
+ it('should have a dropdown item for each piece of data', () => {
+ // Get the data in the dropdown
+ $('.js-dropdown-menu-toggle').click();
- const $itemEls = $wrapperEl.find('.js-dropdown-content a');
- expect($itemEls.length).toEqual(DROPDOWN_ITEM_DATA.length);
+ const $itemEls = $wrapperEl.find('.js-dropdown-content a');
+ expect($itemEls.length).toEqual(DROPDOWN_ITEM_DATA.length);
+ });
});
describe('created items', () => {
const NEW_ITEM_TEXT = 'foobarbaz';
- function createItemAndClearInput(text) {
- // Filter for the new item
- $wrapperEl.find('.dropdown-input-field')
- .val(text)
- .trigger('input');
-
- // Create the new item
- const $createButton = $wrapperEl.find('.js-dropdown-create-new-item');
- $createButton.click();
-
- // Clear out the filter
- $wrapperEl.find('.dropdown-input-field')
- .val('')
- .trigger('input');
- }
-
beforeEach(() => {
+ createItemDropdown = new CreateItemDropdown({
+ $dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'),
+ defaultToggleLabel: 'All variables',
+ fieldName: 'variable[environment]',
+ getData: (term, callback) => {
+ callback(DROPDOWN_ITEM_DATA);
+ },
+ });
+
// Open the dropdown
$('.js-dropdown-menu-toggle').click();
@@ -103,4 +116,68 @@ describe('CreateItemDropdown', () => {
expect($itemEls.length).toEqual(DROPDOWN_ITEM_DATA.length);
});
});
+
+ describe('clearDropdown()', () => {
+ beforeEach(() => {
+ createItemDropdown = new CreateItemDropdown({
+ $dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'),
+ defaultToggleLabel: 'All variables',
+ fieldName: 'variable[environment]',
+ getData: (term, callback) => {
+ callback(DROPDOWN_ITEM_DATA);
+ },
+ });
+ });
+
+ it('should clear all data and filter input', () => {
+ const filterInput = $wrapperEl.find('.dropdown-input-field');
+
+ // Get the data in the dropdown
+ $('.js-dropdown-menu-toggle').click();
+
+ // Filter for an item
+ filterInput
+ .val('one')
+ .trigger('input');
+
+ const $itemElsAfterFilter = $wrapperEl.find('.js-dropdown-content a');
+ expect($itemElsAfterFilter.length).toEqual(1);
+
+ createItemDropdown.clearDropdown();
+
+ const $itemElsAfterClear = $wrapperEl.find('.js-dropdown-content a');
+ expect($itemElsAfterClear.length).toEqual(0);
+ expect(filterInput.val()).toEqual('');
+ });
+ });
+
+ describe('createNewItemFromValue option', () => {
+ beforeEach(() => {
+ createItemDropdown = new CreateItemDropdown({
+ $dropdown: $wrapperEl.find('.js-dropdown-menu-toggle'),
+ defaultToggleLabel: 'All variables',
+ fieldName: 'variable[environment]',
+ getData: (term, callback) => {
+ callback(DROPDOWN_ITEM_DATA);
+ },
+ createNewItemFromValue: newValue => ({
+ title: `${newValue}-title`,
+ id: `${newValue}-id`,
+ text: `${newValue}-text`,
+ }),
+ });
+ });
+
+ it('all items go through createNewItemFromValue', () => {
+ // Get the data in the dropdown
+ $('.js-dropdown-menu-toggle').click();
+
+ createItemAndClearInput('new-item');
+
+ const $itemEls = $wrapperEl.find('.js-dropdown-content a');
+ expect($itemEls.length).toEqual(1 + DROPDOWN_ITEM_DATA.length);
+ expect($($itemEls[3]).text()).toEqual('new-item-text');
+ expect($wrapperEl.find('.dropdown-toggle-text').text()).toEqual('new-item-title');
+ });
+ });
});
diff --git a/spec/javascripts/integrations/integration_settings_form_spec.js b/spec/javascripts/integrations/integration_settings_form_spec.js
index 9033eb9ce02..d0fba908e34 100644
--- a/spec/javascripts/integrations/integration_settings_form_spec.js
+++ b/spec/javascripts/integrations/integration_settings_form_spec.js
@@ -1,3 +1,5 @@
+import MockAdaptor from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import IntegrationSettingsForm from '~/integrations/integration_settings_form';
describe('IntegrationSettingsForm', () => {
@@ -109,91 +111,117 @@ describe('IntegrationSettingsForm', () => {
describe('testSettings', () => {
let integrationSettingsForm;
let formData;
+ let mock;
beforeEach(() => {
+ mock = new MockAdaptor(axios);
+
+ spyOn(axios, 'put').and.callThrough();
+
integrationSettingsForm = new IntegrationSettingsForm('.js-integration-settings-form');
formData = integrationSettingsForm.$form.serialize();
});
- it('should make an ajax request with provided `formData`', () => {
- const deferred = $.Deferred();
- spyOn($, 'ajax').and.returnValue(deferred.promise());
+ afterEach(() => {
+ mock.restore();
+ });
- integrationSettingsForm.testSettings(formData);
+ it('should make an ajax request with provided `formData`', (done) => {
+ integrationSettingsForm.testSettings(formData)
+ .then(() => {
+ expect(axios.put).toHaveBeenCalledWith(integrationSettingsForm.testEndPoint, formData);
- expect($.ajax).toHaveBeenCalledWith({
- type: 'PUT',
- url: integrationSettingsForm.testEndPoint,
- data: formData,
- });
+ done();
+ })
+ .catch(done.fail);
});
- it('should show error Flash with `Save anyway` action if ajax request responds with error in test', () => {
+ it('should show error Flash with `Save anyway` action if ajax request responds with error in test', (done) => {
const errorMessage = 'Test failed.';
- const deferred = $.Deferred();
- spyOn($, 'ajax').and.returnValue(deferred.promise());
-
- integrationSettingsForm.testSettings(formData);
+ mock.onPut(integrationSettingsForm.testEndPoint).reply(200, {
+ error: true,
+ message: errorMessage,
+ service_response: 'some error',
+ });
- deferred.resolve({ error: true, message: errorMessage, service_response: 'some error' });
+ integrationSettingsForm.testSettings(formData)
+ .then(() => {
+ const $flashContainer = $('.flash-container');
+ expect($flashContainer.find('.flash-text').text().trim()).toEqual('Test failed. some error');
+ expect($flashContainer.find('.flash-action')).toBeDefined();
+ expect($flashContainer.find('.flash-action').text().trim()).toEqual('Save anyway');
- const $flashContainer = $('.flash-container');
- expect($flashContainer.find('.flash-text').text().trim()).toEqual('Test failed. some error');
- expect($flashContainer.find('.flash-action')).toBeDefined();
- expect($flashContainer.find('.flash-action').text().trim()).toEqual('Save anyway');
+ done();
+ })
+ .catch(done.fail);
});
- it('should submit form if ajax request responds without any error in test', () => {
- const deferred = $.Deferred();
- spyOn($, 'ajax').and.returnValue(deferred.promise());
+ it('should submit form if ajax request responds without any error in test', (done) => {
+ spyOn(integrationSettingsForm.$form, 'submit');
- integrationSettingsForm.testSettings(formData);
+ mock.onPut(integrationSettingsForm.testEndPoint).reply(200, {
+ error: false,
+ });
- spyOn(integrationSettingsForm.$form, 'submit');
- deferred.resolve({ error: false });
+ integrationSettingsForm.testSettings(formData)
+ .then(() => {
+ expect(integrationSettingsForm.$form.submit).toHaveBeenCalled();
- expect(integrationSettingsForm.$form.submit).toHaveBeenCalled();
+ done();
+ })
+ .catch(done.fail);
});
- it('should submit form when clicked on `Save anyway` action of error Flash', () => {
- const errorMessage = 'Test failed.';
- const deferred = $.Deferred();
- spyOn($, 'ajax').and.returnValue(deferred.promise());
+ it('should submit form when clicked on `Save anyway` action of error Flash', (done) => {
+ spyOn(integrationSettingsForm.$form, 'submit');
- integrationSettingsForm.testSettings(formData);
+ const errorMessage = 'Test failed.';
+ mock.onPut(integrationSettingsForm.testEndPoint).reply(200, {
+ error: true,
+ message: errorMessage,
+ });
- deferred.resolve({ error: true, message: errorMessage });
+ integrationSettingsForm.testSettings(formData)
+ .then(() => {
+ const $flashAction = $('.flash-container .flash-action');
+ expect($flashAction).toBeDefined();
- const $flashAction = $('.flash-container .flash-action');
- expect($flashAction).toBeDefined();
+ $flashAction.get(0).click();
+ })
+ .then(() => {
+ expect(integrationSettingsForm.$form.submit).toHaveBeenCalled();
- spyOn(integrationSettingsForm.$form, 'submit');
- $flashAction.get(0).click();
- expect(integrationSettingsForm.$form.submit).toHaveBeenCalled();
+ done();
+ })
+ .catch(done.fail);
});
- it('should show error Flash if ajax request failed', () => {
+ it('should show error Flash if ajax request failed', (done) => {
const errorMessage = 'Something went wrong on our end.';
- const deferred = $.Deferred();
- spyOn($, 'ajax').and.returnValue(deferred.promise());
- integrationSettingsForm.testSettings(formData);
+ mock.onPut(integrationSettingsForm.testEndPoint).networkError();
- deferred.reject();
+ integrationSettingsForm.testSettings(formData)
+ .then(() => {
+ expect($('.flash-container .flash-text').text().trim()).toEqual(errorMessage);
- expect($('.flash-container .flash-text').text().trim()).toEqual(errorMessage);
+ done();
+ })
+ .catch(done.fail);
});
- it('should always call `toggleSubmitBtnState` with `false` once request is completed', () => {
- const deferred = $.Deferred();
- spyOn($, 'ajax').and.returnValue(deferred.promise());
-
- integrationSettingsForm.testSettings(formData);
+ it('should always call `toggleSubmitBtnState` with `false` once request is completed', (done) => {
+ mock.onPut(integrationSettingsForm.testEndPoint).networkError();
spyOn(integrationSettingsForm, 'toggleSubmitBtnState');
- deferred.reject();
- expect(integrationSettingsForm.toggleSubmitBtnState).toHaveBeenCalledWith(false);
+ integrationSettingsForm.testSettings(formData)
+ .then(() => {
+ expect(integrationSettingsForm.toggleSubmitBtnState).toHaveBeenCalledWith(false);
+
+ done();
+ })
+ .catch(done.fail);
});
});
});
diff --git a/spec/javascripts/issuable_spec.js b/spec/javascripts/issuable_spec.js
index 5a9112716f4..d53ffecbd35 100644
--- a/spec/javascripts/issuable_spec.js
+++ b/spec/javascripts/issuable_spec.js
@@ -1,3 +1,5 @@
+import MockAdaptor from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import IssuableIndex from '~/issuable_index';
describe('Issuable', () => {
@@ -19,6 +21,8 @@ describe('Issuable', () => {
});
describe('resetIncomingEmailToken', () => {
+ let mock;
+
beforeEach(() => {
const element = document.createElement('a');
element.classList.add('incoming-email-token-reset');
@@ -30,14 +34,28 @@ describe('Issuable', () => {
document.body.appendChild(input);
Issuable = new IssuableIndex('issue_');
+
+ mock = new MockAdaptor(axios);
+
+ mock.onPut('foo').reply(200, {
+ new_address: 'testing123',
+ });
});
- it('should send request to reset email token', () => {
- spyOn(jQuery, 'ajax').and.callThrough();
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('should send request to reset email token', (done) => {
+ spyOn(axios, 'put').and.callThrough();
document.querySelector('.incoming-email-token-reset').click();
- expect(jQuery.ajax).toHaveBeenCalled();
- expect(jQuery.ajax.calls.argsFor(0)[0].url).toEqual('foo');
+ setTimeout(() => {
+ expect(axios.put).toHaveBeenCalledWith('foo');
+ expect($('#issuable_email').val()).toBe('testing123');
+
+ done();
+ });
});
});
});
diff --git a/spec/javascripts/lib/utils/users_cache_spec.js b/spec/javascripts/lib/utils/users_cache_spec.js
index ec6ea35952b..50371c8c5f6 100644
--- a/spec/javascripts/lib/utils/users_cache_spec.js
+++ b/spec/javascripts/lib/utils/users_cache_spec.js
@@ -92,7 +92,9 @@ describe('UsersCache', () => {
apiSpy = (query, options) => {
expect(query).toBe('');
expect(options).toEqual({ username: dummyUsername });
- return Promise.resolve([dummyUser]);
+ return Promise.resolve({
+ data: [dummyUser],
+ });
};
UsersCache.retrieve(dummyUsername)
diff --git a/spec/javascripts/repo/components/new_dropdown/modal_spec.js b/spec/javascripts/repo/components/new_dropdown/modal_spec.js
index 233cca06ed0..8bbc3100357 100644
--- a/spec/javascripts/repo/components/new_dropdown/modal_spec.js
+++ b/spec/javascripts/repo/components/new_dropdown/modal_spec.js
@@ -18,8 +18,10 @@ describe('new file modal component', () => {
}));
spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
- commit: {
- id: '123branch',
+ data: {
+ commit: {
+ id: '123branch',
+ },
},
}));
diff --git a/spec/javascripts/repo/components/new_dropdown/upload_spec.js b/spec/javascripts/repo/components/new_dropdown/upload_spec.js
index 788c08e5279..667112ab21a 100644
--- a/spec/javascripts/repo/components/new_dropdown/upload_spec.js
+++ b/spec/javascripts/repo/components/new_dropdown/upload_spec.js
@@ -17,8 +17,10 @@ describe('new dropdown upload', () => {
}));
spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
- commit: {
- id: '123branch',
+ data: {
+ commit: {
+ id: '123branch',
+ },
},
}));
diff --git a/spec/javascripts/repo/components/repo_commit_section_spec.js b/spec/javascripts/repo/components/repo_commit_section_spec.js
index 676ac09f2c9..93e94b4f24c 100644
--- a/spec/javascripts/repo/components/repo_commit_section_spec.js
+++ b/spec/javascripts/repo/components/repo_commit_section_spec.js
@@ -87,8 +87,10 @@ describe('RepoCommitSection', () => {
changedFiles = JSON.parse(JSON.stringify(vm.$store.getters.changedFiles));
spyOn(service, 'commit').and.returnValue(Promise.resolve({
- short_id: '1',
- stats: {},
+ data: {
+ short_id: '1',
+ stats: {},
+ },
}));
});
diff --git a/spec/javascripts/repo/stores/actions_spec.js b/spec/javascripts/repo/stores/actions_spec.js
index 8d830c67290..f678967b092 100644
--- a/spec/javascripts/repo/stores/actions_spec.js
+++ b/spec/javascripts/repo/stores/actions_spec.js
@@ -178,7 +178,9 @@ describe('Multi-file store actions', () => {
it('calls service', (done) => {
spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
- commit: { id: '123' },
+ data: {
+ commit: { id: '123' },
+ },
}));
store.dispatch('checkCommitStatus')
@@ -192,7 +194,9 @@ describe('Multi-file store actions', () => {
it('returns true if current ref does not equal returned ID', (done) => {
spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
- commit: { id: '123' },
+ data: {
+ commit: { id: '123' },
+ },
}));
store.dispatch('checkCommitStatus')
@@ -206,7 +210,9 @@ describe('Multi-file store actions', () => {
it('returns false if current ref equals returned ID', (done) => {
spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
- commit: { id: '1' },
+ data: {
+ commit: { id: '1' },
+ },
}));
store.dispatch('checkCommitStatus')
@@ -250,13 +256,15 @@ describe('Multi-file store actions', () => {
describe('success', () => {
beforeEach(() => {
spyOn(service, 'commit').and.returnValue(Promise.resolve({
- id: '123456',
- short_id: '123',
- message: 'test message',
- committed_date: 'date',
- stats: {
- additions: '1',
- deletions: '2',
+ data: {
+ id: '123456',
+ short_id: '123',
+ message: 'test message',
+ committed_date: 'date',
+ stats: {
+ additions: '1',
+ deletions: '2',
+ },
},
}));
});
@@ -324,7 +332,9 @@ describe('Multi-file store actions', () => {
describe('failed', () => {
beforeEach(() => {
spyOn(service, 'commit').and.returnValue(Promise.resolve({
- message: 'failed message',
+ data: {
+ message: 'failed message',
+ },
}));
});
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 935d1df6dad..bf01e6ef8e8 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -2,6 +2,7 @@ require "spec_helper"
describe Gitlab::Git::Repository, seed_helper: true do
include Gitlab::EncodingHelper
+ using RSpec::Parameterized::TableSyntax
shared_examples 'wrapping gRPC errors' do |gitaly_client_class, gitaly_client_method|
it 'wraps gRPC not found error' do
@@ -442,6 +443,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
shared_examples 'simple commit counting' do
it { expect(repository.commit_count("master")).to eq(25) }
it { expect(repository.commit_count("feature")).to eq(9) }
+ it { expect(repository.commit_count("does-not-exist")).to eq(0) }
end
context 'when Gitaly commit_count feature is enabled' do
@@ -560,35 +562,39 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe '#delete_refs' do
- before(:all) do
- @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
- end
+ shared_examples 'deleting refs' do
+ let(:repo) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
- it 'deletes the ref' do
- @repo.delete_refs('refs/heads/feature')
+ after do
+ ensure_seeds
+ end
- expect(@repo.rugged.references['refs/heads/feature']).to be_nil
- end
+ it 'deletes the ref' do
+ repo.delete_refs('refs/heads/feature')
- it 'deletes all refs' do
- refs = %w[refs/heads/wip refs/tags/v1.1.0]
- @repo.delete_refs(*refs)
+ expect(repo.rugged.references['refs/heads/feature']).to be_nil
+ end
+
+ it 'deletes all refs' do
+ refs = %w[refs/heads/wip refs/tags/v1.1.0]
+ repo.delete_refs(*refs)
- refs.each do |ref|
- expect(@repo.rugged.references[ref]).to be_nil
+ refs.each do |ref|
+ expect(repo.rugged.references[ref]).to be_nil
+ end
end
- end
- it 'raises an error if it failed' do
- expect(@repo).to receive(:popen).and_return(['Error', 1])
+ it 'raises an error if it failed' do
+ expect { repo.delete_refs('refs\heads\fix') }.to raise_error(Gitlab::Git::Repository::GitError)
+ end
+ end
- expect do
- @repo.delete_refs('refs/heads/fix')
- end.to raise_error(Gitlab::Git::Repository::GitError)
+ context 'when Gitaly delete_refs feature is enabled' do
+ it_behaves_like 'deleting refs'
end
- after(:all) do
- ensure_seeds
+ context 'when Gitaly delete_refs feature is disabled', :disable_gitaly do
+ it_behaves_like 'deleting refs'
end
end
@@ -1032,6 +1038,29 @@ describe Gitlab::Git::Repository, seed_helper: true do
it { is_expected.to eq(17) }
end
+ describe '#merge_base' do
+ shared_examples '#merge_base' do
+ where(:from, :to, :result) do
+ '570e7b2abdd848b95f2f578043fc23bd6f6fd24d' | '40f4a7a617393735a95a0bb67b08385bc1e7c66d' | '570e7b2abdd848b95f2f578043fc23bd6f6fd24d'
+ '40f4a7a617393735a95a0bb67b08385bc1e7c66d' | '570e7b2abdd848b95f2f578043fc23bd6f6fd24d' | '570e7b2abdd848b95f2f578043fc23bd6f6fd24d'
+ '40f4a7a617393735a95a0bb67b08385bc1e7c66d' | 'foobar' | nil
+ 'foobar' | '40f4a7a617393735a95a0bb67b08385bc1e7c66d' | nil
+ end
+
+ with_them do
+ it { expect(repository.merge_base(from, to)).to eq(result) }
+ end
+ end
+
+ context 'with gitaly' do
+ it_behaves_like '#merge_base'
+ end
+
+ context 'without gitaly', :skip_gitaly_mock do
+ it_behaves_like '#merge_base'
+ end
+ end
+
describe '#count_commits' do
shared_examples 'extended commit counting' do
context 'with after timestamp' do
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 951e146a30a..257e4c50f2d 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -112,7 +112,7 @@ describe Gitlab::GitalyClient::RefService do
expect_any_instance_of(Gitaly::RefService::Stub)
.to receive(:delete_refs)
.with(gitaly_request_with_params(except_with_prefix: prefixes), kind_of(Hash))
- .and_return(double('delete_refs_response'))
+ .and_return(double('delete_refs_response', git_error: ""))
client.delete_refs(except_with_prefixes: prefixes)
end
diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb
index 41a9d1d9c90..d9379cfe674 100644
--- a/spec/lib/gitlab/metrics/method_call_spec.rb
+++ b/spec/lib/gitlab/metrics/method_call_spec.rb
@@ -5,6 +5,10 @@ describe Gitlab::Metrics::MethodCall do
let(:method_call) { described_class.new('Foo#bar', :Foo, '#bar', transaction) }
describe '#measure' do
+ after do
+ described_class.reload_metric!(:gitlab_method_call_duration_seconds)
+ end
+
it 'measures the performance of the supplied block' do
method_call.measure { 'foo' }
@@ -20,8 +24,6 @@ describe Gitlab::Metrics::MethodCall do
context 'prometheus instrumentation is enabled' do
before do
- allow(Feature.get(:prometheus_metrics_method_instrumentation)).to receive(:enabled?).and_call_original
- described_class.measurement_enabled_cache_expires_at.value = Time.now.to_i - 1
Feature.get(:prometheus_metrics_method_instrumentation).enable
end
@@ -31,30 +33,12 @@ describe Gitlab::Metrics::MethodCall do
end
end
- it 'caches subsequent invocations of feature check' do
- 10.times do
- method_call.measure { 'foo' }
- end
-
- expect(Feature.get(:prometheus_metrics_method_instrumentation)).to have_received(:enabled?).once
- end
-
- it 'expires feature check cache after 1 minute' do
- method_call.measure { 'foo' }
-
- Timecop.travel(1.minute.from_now) do
- method_call.measure { 'foo' }
- end
-
- Timecop.travel(1.minute.from_now + 1.second) do
- method_call.measure { 'foo' }
- end
-
- expect(Feature.get(:prometheus_metrics_method_instrumentation)).to have_received(:enabled?).twice
+ it 'metric is not a NullMetric' do
+ expect(described_class).not_to be_instance_of(Gitlab::Metrics::NullMetric)
end
it 'observes the performance of the supplied block' do
- expect(described_class.call_duration_histogram)
+ expect(described_class.gitlab_method_call_duration_seconds)
.to receive(:observe)
.with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric))
@@ -64,14 +48,12 @@ describe Gitlab::Metrics::MethodCall do
context 'prometheus instrumentation is disabled' do
before do
- described_class.measurement_enabled_cache_expires_at.value = Time.now.to_i - 1
-
Feature.get(:prometheus_metrics_method_instrumentation).disable
end
- it 'does not observe the performance' do
- expect(described_class.call_duration_histogram)
- .not_to receive(:observe)
+ it 'observes using NullMetric' do
+ expect(described_class.gitlab_method_call_duration_seconds).to be_instance_of(Gitlab::Metrics::NullMetric)
+ expect(described_class.gitlab_method_call_duration_seconds).to receive(:observe)
method_call.measure { 'foo' }
end
@@ -81,12 +63,10 @@ describe Gitlab::Metrics::MethodCall do
context 'when measurement is below threshold' do
before do
allow(method_call).to receive(:above_threshold?).and_return(false)
-
- Feature.get(:prometheus_metrics_method_instrumentation).enable
end
it 'does not observe the performance' do
- expect(described_class.call_duration_histogram)
+ expect(described_class.gitlab_method_call_duration_seconds)
.not_to receive(:observe)
method_call.measure { 'foo' }
@@ -96,7 +76,7 @@ describe Gitlab::Metrics::MethodCall do
describe '#to_metric' do
it 'returns a Metric instance' do
- expect(method_call).to receive(:real_time).and_return(4.0001)
+ expect(method_call).to receive(:real_time).and_return(4.0001).twice
expect(method_call).to receive(:cpu_time).and_return(3.0001)
method_call.measure { 'foo' }
diff --git a/spec/lib/gitlab/metrics/methods_spec.rb b/spec/lib/gitlab/metrics/methods_spec.rb
new file mode 100644
index 00000000000..9d41ed2442b
--- /dev/null
+++ b/spec/lib/gitlab/metrics/methods_spec.rb
@@ -0,0 +1,137 @@
+require 'spec_helper'
+
+describe Gitlab::Metrics::Methods do
+ subject { Class.new { include Gitlab::Metrics::Methods } }
+
+ shared_context 'metric' do |metric_type, *args|
+ let(:docstring) { 'description' }
+ let(:metric_name) { :sample_metric }
+
+ describe "#define_#{metric_type}" do
+ define_method(:call_define_metric_method) do |**args|
+ subject.__send__("define_#{metric_type}", metric_name, **args)
+ end
+
+ context 'metrics access method not defined' do
+ it "defines metrics accessing method" do
+ expect(subject).not_to respond_to(metric_name)
+
+ call_define_metric_method(docstring: docstring)
+
+ expect(subject).to respond_to(metric_name)
+ end
+ end
+
+ context 'metrics access method defined' do
+ before do
+ call_define_metric_method(docstring: docstring)
+ end
+
+ it 'raises error when trying to redefine method' do
+ expect { call_define_metric_method(docstring: docstring) }.to raise_error(ArgumentError)
+ end
+
+ context 'metric is not cached' do
+ it 'calls fetch_metric' do
+ expect(subject).to receive(:init_metric).with(metric_type, metric_name, docstring: docstring)
+
+ subject.public_send(metric_name)
+ end
+ end
+
+ context 'metric is cached' do
+ before do
+ subject.public_send(metric_name)
+ end
+
+ it 'returns cached metric' do
+ expect(subject).not_to receive(:init_metric)
+
+ subject.public_send(metric_name)
+ end
+ end
+ end
+ end
+
+ describe "#fetch_#{metric_type}" do
+ let(:null_metric) { Gitlab::Metrics::NullMetric.instance }
+
+ define_method(:call_fetch_metric_method) do |**args|
+ subject.__send__("fetch_#{metric_type}", metric_name, **args)
+ end
+
+ context "when #{metric_type} is not cached" do
+ it 'initializes counter metric' do
+ allow(Gitlab::Metrics).to receive(metric_type).and_return(null_metric)
+
+ call_fetch_metric_method(docstring: docstring)
+
+ expect(Gitlab::Metrics).to have_received(metric_type).with(metric_name, docstring, *args)
+ end
+ end
+
+ context "when #{metric_type} is cached" do
+ before do
+ call_fetch_metric_method(docstring: docstring)
+ end
+
+ it 'uses class metric cache' do
+ expect(Gitlab::Metrics).not_to receive(metric_type)
+
+ call_fetch_metric_method(docstring: docstring)
+ end
+
+ context 'when metric is reloaded' do
+ before do
+ subject.reload_metric!(metric_name)
+ end
+
+ it "initializes #{metric_type} metric" do
+ allow(Gitlab::Metrics).to receive(metric_type).and_return(null_metric)
+
+ call_fetch_metric_method(docstring: docstring)
+
+ expect(Gitlab::Metrics).to have_received(metric_type).with(metric_name, docstring, *args)
+ end
+ end
+ end
+
+ context 'when metric is configured with feature' do
+ let(:feature_name) { :some_metric_feature }
+ let(:metric) { call_fetch_metric_method(docstring: docstring, with_feature: feature_name) }
+
+ context 'when feature is enabled' do
+ before do
+ Feature.get(feature_name).enable
+ end
+
+ it "initializes #{metric_type} metric" do
+ allow(Gitlab::Metrics).to receive(metric_type).and_return(null_metric)
+
+ metric
+
+ expect(Gitlab::Metrics).to have_received(metric_type).with(metric_name, docstring, *args)
+ end
+ end
+
+ context 'when feature is disabled' do
+ before do
+ Feature.get(feature_name).disable
+ end
+
+ it "returns NullMetric" do
+ allow(Gitlab::Metrics).to receive(metric_type)
+
+ expect(metric).to be_instance_of(Gitlab::Metrics::NullMetric)
+
+ expect(Gitlab::Metrics).not_to have_received(metric_type)
+ end
+ end
+ end
+ end
+ end
+
+ include_examples 'metric', :counter, {}
+ include_examples 'metric', :gauge, {}, :all
+ include_examples 'metric', :histogram, {}, [0.005, 0.01, 0.1, 1, 10]
+end
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
index 375cbf8a9ca..54781dd52fc 100644
--- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -2,6 +2,11 @@ require 'spec_helper'
describe Gitlab::Metrics::Samplers::RubySampler do
let(:sampler) { described_class.new(5) }
+ let(:null_metric) { double('null_metric', set: nil, observe: nil) }
+
+ before do
+ allow(Gitlab::Metrics::NullMetric).to receive(:instance).and_return(null_metric)
+ end
after do
Allocations.stop if Gitlab::Metrics.mri?
@@ -17,12 +22,9 @@ describe Gitlab::Metrics::Samplers::RubySampler do
end
it 'adds a metric containing the memory usage' do
- expect(Gitlab::Metrics::System).to receive(:memory_usage)
- .and_return(9000)
+ expect(Gitlab::Metrics::System).to receive(:memory_usage).and_return(9000)
- expect(sampler.metrics[:memory_usage]).to receive(:set)
- .with({}, 9000)
- .and_call_original
+ expect(sampler.metrics[:memory_usage]).to receive(:set).with({}, 9000)
sampler.sample
end
@@ -31,9 +33,7 @@ describe Gitlab::Metrics::Samplers::RubySampler do
expect(Gitlab::Metrics::System).to receive(:file_descriptor_count)
.and_return(4)
- expect(sampler.metrics[:file_descriptors]).to receive(:set)
- .with({}, 4)
- .and_call_original
+ expect(sampler.metrics[:file_descriptors]).to receive(:set).with({}, 4)
sampler.sample
end
@@ -49,16 +49,14 @@ describe Gitlab::Metrics::Samplers::RubySampler do
it 'adds a metric containing garbage collection time statistics' do
expect(GC::Profiler).to receive(:total_time).and_return(0.24)
- expect(sampler.metrics[:total_time]).to receive(:set)
- .with({}, 240)
- .and_call_original
+ expect(sampler.metrics[:total_time]).to receive(:set).with({}, 240)
sampler.sample
end
it 'adds a metric containing garbage collection statistics' do
GC.stat.keys.each do |key|
- expect(sampler.metrics[key]).to receive(:set).with({}, anything).and_call_original
+ expect(sampler.metrics[key]).to receive(:set).with({}, anything)
end
sampler.sample
diff --git a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
index eca75a4fac1..9f3af1acef7 100644
--- a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
@@ -32,7 +32,7 @@ describe Gitlab::Metrics::Subscribers::ActionView do
end
it 'observes view rendering time' do
- expect(subscriber.send(:metric_view_rendering_duration_seconds))
+ expect(described_class.gitlab_view_rendering_duration_seconds)
.to receive(:observe)
.with({ view: 'app/views/x.html.haml' }, 2.1)
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index 9b3698fb4a8..4e7bd433a9c 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -25,7 +25,7 @@ describe Gitlab::Metrics::Subscribers::ActiveRecord do
expect(subscriber).to receive(:current_transaction)
.at_least(:once)
.and_return(transaction)
- expect(subscriber.send(:metric_sql_duration_seconds)).to receive(:observe).with({}, 0.002)
+ expect(described_class.send(:gitlab_sql_duration_seconds)).to receive(:observe).with({}, 0.002)
subscriber.sql(event)
end
diff --git a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
index 58e28592cf9..6795c1ab56b 100644
--- a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
@@ -144,7 +144,10 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
end
context 'with a transaction' do
+ let(:metric_cache_misses_total) { double('metric_cache_misses_total', increment: nil) }
+
before do
+ allow(subscriber).to receive(:metric_cache_misses_total).and_return(metric_cache_misses_total)
allow(subscriber).to receive(:current_transaction)
.and_return(transaction)
end
@@ -157,9 +160,9 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
end
it 'increments the cache_read_miss total' do
- expect(subscriber.send(:metric_cache_misses_total)).to receive(:increment).with({})
-
subscriber.cache_generate(event)
+
+ expect(metric_cache_misses_total).to have_received(:increment).with({})
end
end
end
diff --git a/spec/lib/gitlab/metrics_spec.rb b/spec/lib/gitlab/metrics_spec.rb
index 1619fbd88b1..9e405e9f736 100644
--- a/spec/lib/gitlab/metrics_spec.rb
+++ b/spec/lib/gitlab/metrics_spec.rb
@@ -20,7 +20,7 @@ describe Gitlab::Metrics do
context 'prometheus metrics enabled in config' do
before do
- allow(described_class).to receive(:current_application_settings).and_return(prometheus_metrics_enabled: true)
+ allow(Gitlab::CurrentSettings).to receive(:current_application_settings).and_return(prometheus_metrics_enabled: true)
end
context 'when metrics folder is present' do
diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/o_auth/user_spec.rb
index 45fff4c5787..03e0a9e2a03 100644
--- a/spec/lib/gitlab/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/o_auth/user_spec.rb
@@ -44,6 +44,18 @@ describe Gitlab::OAuth::User do
let(:provider) { 'twitter' }
+ describe 'when account exists on server' do
+ it 'does not mark the user as external' do
+ create(:omniauth_user, extern_uid: 'my-uid', provider: provider)
+ stub_omniauth_config(allow_single_sign_on: [provider], external_providers: [provider])
+
+ oauth_user.save
+
+ expect(gl_user).to be_valid
+ expect(gl_user.external).to be_falsey
+ end
+ end
+
describe 'signup' do
context 'when signup is disabled' do
before do
@@ -51,7 +63,7 @@ describe Gitlab::OAuth::User do
end
it 'creates the user' do
- stub_omniauth_config(allow_single_sign_on: ['twitter'])
+ stub_omniauth_config(allow_single_sign_on: [provider])
oauth_user.save
@@ -65,7 +77,7 @@ describe Gitlab::OAuth::User do
end
it 'creates and confirms the user anyway' do
- stub_omniauth_config(allow_single_sign_on: ['twitter'])
+ stub_omniauth_config(allow_single_sign_on: [provider])
oauth_user.save
@@ -75,7 +87,7 @@ describe Gitlab::OAuth::User do
end
it 'marks user as having password_automatically_set' do
- stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['twitter'])
+ stub_omniauth_config(allow_single_sign_on: [provider], external_providers: [provider])
oauth_user.save
@@ -86,7 +98,7 @@ describe Gitlab::OAuth::User do
shared_examples 'to verify compliance with allow_single_sign_on' do
context 'provider is marked as external' do
it 'marks user as external' do
- stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['twitter'])
+ stub_omniauth_config(allow_single_sign_on: [provider], external_providers: [provider])
oauth_user.save
expect(gl_user).to be_valid
expect(gl_user.external).to be_truthy
@@ -95,8 +107,8 @@ describe Gitlab::OAuth::User do
context 'provider was external, now has been removed' do
it 'does not mark external user as internal' do
- create(:omniauth_user, extern_uid: 'my-uid', provider: 'twitter', external: true)
- stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['facebook'])
+ create(:omniauth_user, extern_uid: 'my-uid', provider: provider, external: true)
+ stub_omniauth_config(allow_single_sign_on: [provider], external_providers: ['facebook'])
oauth_user.save
expect(gl_user).to be_valid
expect(gl_user.external).to be_truthy
@@ -118,7 +130,7 @@ describe Gitlab::OAuth::User do
context 'with new allow_single_sign_on enabled syntax' do
before do
- stub_omniauth_config(allow_single_sign_on: ['twitter'])
+ stub_omniauth_config(allow_single_sign_on: [provider])
end
it "creates a user from Omniauth" do
@@ -127,7 +139,7 @@ describe Gitlab::OAuth::User do
expect(gl_user).to be_valid
identity = gl_user.identities.first
expect(identity.extern_uid).to eql uid
- expect(identity.provider).to eql 'twitter'
+ expect(identity.provider).to eql provider
end
end
@@ -142,7 +154,7 @@ describe Gitlab::OAuth::User do
expect(gl_user).to be_valid
identity = gl_user.identities.first
expect(identity.extern_uid).to eql uid
- expect(identity.provider).to eql 'twitter'
+ expect(identity.provider).to eql provider
end
end
diff --git a/spec/models/concerns/discussion_on_diff_spec.rb b/spec/models/concerns/discussion_on_diff_spec.rb
index 2322eb206fb..30572ce9332 100644
--- a/spec/models/concerns/discussion_on_diff_spec.rb
+++ b/spec/models/concerns/discussion_on_diff_spec.rb
@@ -20,6 +20,16 @@ describe DiscussionOnDiff do
expect(truncated_lines).not_to include(be_meta)
end
end
+
+ context "when the diff line does not exist on a legacy diff note" do
+ it "returns an empty array" do
+ legacy_note = LegacyDiffNote.new
+
+ allow(subject).to receive(:first_note).and_return(legacy_note)
+
+ expect(truncated_lines).to eq([])
+ end
+ end
end
describe '#line_code_in_diffs' do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index eb9690df313..243eeddc7a8 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1339,6 +1339,10 @@ describe MergeRequest do
it 'returns false' do
expect(subject.mergeable_state?).to be_falsey
end
+
+ it 'returns true when skipping discussions check' do
+ expect(subject.mergeable_state?(skip_discussions_check: true)).to be(true)
+ end
end
end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index cc9d79da708..9840afe6c4e 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -387,13 +387,23 @@ describe WikiPage do
end
describe '#formatted_content' do
- it 'returns processed content of the page', :disable_gitaly do
- subject.create({ title: "RDoc", content: "*bold*", format: "rdoc" })
- page = wiki.find_page('RDoc')
+ shared_examples 'fetching page formatted content' do
+ it 'returns processed content of the page' do
+ subject.create({ title: "RDoc", content: "*bold*", format: "rdoc" })
+ page = wiki.find_page('RDoc')
- expect(page.formatted_content).to eq("\n<p><strong>bold</strong></p>\n")
+ expect(page.formatted_content).to eq("\n<p><strong>bold</strong></p>\n")
- destroy_page('RDoc')
+ destroy_page('RDoc')
+ end
+ end
+
+ context 'when Gitaly wiki_page_formatted_data is enabled' do
+ it_behaves_like 'fetching page formatted content'
+ end
+
+ context 'when Gitaly wiki_page_formatted_data is disabled', :disable_gitaly do
+ it_behaves_like 'fetching page formatted content'
end
end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 4dd8deb6404..f8d0b63afec 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -300,44 +300,53 @@ describe API::Jobs do
end
describe 'GET /projects/:id/jobs/:job_id/artifacts' do
- before do
- get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ shared_examples 'downloads artifact' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
+
+ it 'returns specific job artifacts' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.headers).to include(download_headers)
+ expect(response.body).to match_file(job.artifacts_file.file.file)
+ end
end
- context 'job with artifacts' do
- let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ context 'normal authentication' do
+ context 'job with artifacts' do
+ context 'when artifacts are stored locally' do
+ let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
- context 'authorized user' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
- end
+ before do
+ get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ end
- it 'returns specific job artifacts' do
- expect(response).to have_gitlab_http_status(200)
- expect(response.headers).to include(download_headers)
- expect(response.body).to match_file(job.artifacts_file.file.file)
+ context 'authorized user' do
+ it_behaves_like 'downloads artifact'
+ end
+
+ context 'unauthorized user' do
+ let(:api_user) { nil }
+
+ it 'does not return specific job artifacts' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
end
- end
- context 'when anonymous user is accessing private artifacts' do
- let(:api_user) { nil }
+ it 'does not return job artifacts if not uploaded' do
+ get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
- it 'hides artifacts and rejects request' do
- expect(project).to be_private
expect(response).to have_gitlab_http_status(404)
end
end
end
-
- it 'does not return job artifacts if not uploaded' do
- expect(response).to have_gitlab_http_status(404)
- end
end
describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do
let(:api_user) { reporter }
- let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
job.success
@@ -396,14 +405,16 @@ describe API::Jobs do
context 'find proper job' do
shared_examples 'a valid file' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' =>
- "attachment; filename=#{job.artifacts_file.filename}" }
- end
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' =>
+ "attachment; filename=#{job.artifacts_file.filename}" }
+ end
- it { expect(response).to have_gitlab_http_status(200) }
- it { expect(response.headers).to include(download_headers) }
+ it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response.headers).to include(download_headers) }
+ end
end
context 'with regular branch' do
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index af9e36a3b29..3f92288fef0 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -4,16 +4,18 @@ describe API::V3::Builds do
set(:user) { create(:user) }
let(:api_user) { user }
set(:project) { create(:project, :repository, creator: user, public_builds: false) }
- set(:developer) { create(:project_member, :developer, user: user, project: project) }
- set(:reporter) { create(:project_member, :reporter, project: project) }
- set(:guest) { create(:project_member, :guest, project: project) }
- set(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
- let!(:build) { create(:ci_build, pipeline: pipeline) }
+ let!(:developer) { create(:project_member, :developer, user: user, project: project) }
+ let(:reporter) { create(:project_member, :reporter, project: project) }
+ let(:guest) { create(:project_member, :guest, project: project) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
describe 'GET /projects/:id/builds ' do
let(:query) { '' }
before do |example|
+ build
+
create(:ci_build, :skipped, pipeline: pipeline)
unless example.metadata[:skip_before_request]
@@ -110,6 +112,10 @@ describe API::V3::Builds do
end
describe 'GET /projects/:id/repository/commits/:sha/builds' do
+ before do
+ build
+ end
+
context 'when commit does not exist in repository' do
before do
get v3_api("/projects/#{project.id}/repository/commits/1a271fd1/builds", api_user)
@@ -214,18 +220,20 @@ describe API::V3::Builds do
end
context 'job with artifacts' do
- let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ context 'when artifacts are stored locally' do
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
- context 'authorized user' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
- end
+ context 'authorized user' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
- it 'returns specific job artifacts' do
- expect(response).to have_gitlab_http_status(200)
- expect(response.headers).to include(download_headers)
- expect(response.body).to match_file(build.artifacts_file.file.file)
+ it 'returns specific job artifacts' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.headers).to include(download_headers)
+ expect(response.body).to match_file(build.artifacts_file.file.file)
+ end
end
end
@@ -303,14 +311,16 @@ describe API::V3::Builds do
context 'find proper job' do
shared_examples 'a valid file' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' =>
- "attachment; filename=#{build.artifacts_file.filename}" }
- end
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' =>
+ "attachment; filename=#{build.artifacts_file.filename}" }
+ end
- it { expect(response).to have_gitlab_http_status(200) }
- it { expect(response.headers).to include(download_headers) }
+ it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response.headers).to include(download_headers) }
+ end
end
context 'with regular branch' do