summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--Gemfile4
-rw-r--r--Gemfile.lock8
-rw-r--r--app/assets/javascripts/api.js132
-rw-r--r--app/assets/javascripts/blob/viewer/index.js22
-rw-r--r--app/assets/javascripts/commits.js34
-rw-r--r--app/assets/javascripts/ide/stores/actions.js8
-rw-r--r--app/assets/javascripts/ide/stores/actions/branch.js2
-rw-r--r--app/assets/javascripts/lib/utils/users_cache.js8
-rw-r--r--app/assets/javascripts/users/activity_calendar.js2
-rw-r--r--app/models/concerns/discussion_on_diff.rb2
-rw-r--r--app/models/project_wiki.rb6
-rw-r--r--app/models/repository.rb2
-rw-r--r--app/models/wiki_page.rb5
-rw-r--r--changelogs/unreleased/21554-mark-new-user-as-external.yml5
-rw-r--r--changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml5
-rw-r--r--changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml5
-rw-r--r--changelogs/unreleased/42591-update-nokogiri.yml5
-rw-r--r--changelogs/unreleased/contribution_calendar_label_cut_off.yml5
-rw-r--r--doc/api/repositories.md1
-rw-r--r--lib/gitlab/ee_compat_check.rb2
-rw-r--r--lib/gitlab/git/repository.rb54
-rw-r--r--lib/gitlab/git/wiki.rb14
-rw-r--r--lib/gitlab/gitaly_client.rb33
-rw-r--r--lib/gitlab/gitaly_client/operation_service.rb8
-rw-r--r--lib/gitlab/gitaly_client/ref_service.rb9
-rw-r--r--lib/gitlab/gitaly_client/repository_service.rb16
-rw-r--r--lib/gitlab/gitaly_client/wiki_service.rb12
-rw-r--r--lib/gitlab/metrics.rb4
-rw-r--r--lib/gitlab/metrics/influx_db.rb290
-rw-r--r--lib/gitlab/metrics/method_call.rb38
-rw-r--r--lib/gitlab/metrics/methods.rb129
-rw-r--r--lib/gitlab/metrics/methods/metric_options.rb61
-rw-r--r--lib/gitlab/metrics/null_metric.rb2
-rw-r--r--lib/gitlab/metrics/prometheus.rb94
-rw-r--r--lib/gitlab/metrics/subscribers/action_view.rb22
-rw-r--r--lib/gitlab/metrics/subscribers/active_record.rb18
-rw-r--r--lib/gitlab/metrics/transaction.rb83
-rw-r--r--lib/gitlab/o_auth/user.rb2
-rw-r--r--qa/README.md3
-rw-r--r--spec/features/projects/members/share_with_group_spec.rb2
-rw-r--r--spec/javascripts/api_spec.js173
-rw-r--r--spec/javascripts/blob/viewer/index_spec.js36
-rw-r--r--spec/javascripts/commits_spec.js43
-rw-r--r--spec/javascripts/lib/utils/users_cache_spec.js4
-rw-r--r--spec/javascripts/repo/components/new_dropdown/modal_spec.js6
-rw-r--r--spec/javascripts/repo/components/new_dropdown/upload_spec.js6
-rw-r--r--spec/javascripts/repo/components/repo_commit_section_spec.js6
-rw-r--r--spec/javascripts/repo/stores/actions_spec.js32
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb44
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/method_call_spec.rb44
-rw-r--r--spec/lib/gitlab/metrics/methods_spec.rb137
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb22
-rw-r--r--spec/lib/gitlab/metrics/subscribers/action_view_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb7
-rw-r--r--spec/lib/gitlab/metrics_spec.rb2
-rw-r--r--spec/lib/gitlab/o_auth/user_spec.rb30
-rw-r--r--spec/models/concerns/discussion_on_diff_spec.rb10
-rw-r--r--spec/models/wiki_page_spec.rb20
-rw-r--r--spec/requests/api/jobs_spec.rb71
-rw-r--r--spec/requests/api/v3/builds_spec.rb54
63 files changed, 1139 insertions, 773 deletions
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index b7c0622b4f4..62df9f538d8 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-0.74.0
+0.76.0
diff --git a/Gemfile b/Gemfile
index 05f72b6482f..7bffae06b51 100644
--- a/Gemfile
+++ b/Gemfile
@@ -132,7 +132,7 @@ gem 'asciidoctor-plantuml', '0.0.7'
gem 'rouge', '~> 2.0'
gem 'truncato', '~> 0.7.9'
gem 'bootstrap_form', '~> 2.7.0'
-gem 'nokogiri', '~> 1.8.1'
+gem 'nokogiri', '~> 1.8.2'
# Diffs
gem 'diffy', '~> 3.1.0'
@@ -406,7 +406,7 @@ group :ed25519 do
end
# Gitaly GRPC client
-gem 'gitaly-proto', '~> 0.78.0', require: 'gitaly'
+gem 'gitaly-proto', '~> 0.83.0', require: 'gitaly'
gem 'toml-rb', '~> 0.3.15', require: false
diff --git a/Gemfile.lock b/Gemfile.lock
index 1a3c8f42469..4558b43af48 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -285,7 +285,7 @@ GEM
po_to_json (>= 1.0.0)
rails (>= 3.2.0)
gherkin-ruby (0.3.2)
- gitaly-proto (0.78.0)
+ gitaly-proto (0.83.0)
google-protobuf (~> 3.1)
grpc (~> 1.0)
github-linguist (4.7.6)
@@ -513,7 +513,7 @@ GEM
net-ldap (0.16.0)
net-ssh (4.1.0)
netrc (0.11.0)
- nokogiri (1.8.1)
+ nokogiri (1.8.2)
mini_portile2 (~> 2.3.0)
numerizer (0.1.1)
oauth (0.5.1)
@@ -1056,7 +1056,7 @@ DEPENDENCIES
gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.2.0)
- gitaly-proto (~> 0.78.0)
+ gitaly-proto (~> 0.83.0)
github-linguist (~> 4.7.0)
gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-markup (~> 1.6.2)
@@ -1100,7 +1100,7 @@ DEPENDENCIES
mysql2 (~> 0.4.10)
net-ldap
net-ssh (~> 4.1.0)
- nokogiri (~> 1.8.1)
+ nokogiri (~> 1.8.2)
oauth2 (~> 1.4)
octokit (~> 4.6.2)
oj (~> 2.17.4)
diff --git a/app/assets/javascripts/api.js b/app/assets/javascripts/api.js
index 7cb81bf4d5b..1f34c6b50c2 100644
--- a/app/assets/javascripts/api.js
+++ b/app/assets/javascripts/api.js
@@ -1,9 +1,9 @@
-import $ from 'jquery';
+import _ from 'underscore';
import axios from './lib/utils/axios_utils';
const Api = {
groupsPath: '/api/:version/groups.json',
- groupPath: '/api/:version/groups/:id.json',
+ groupPath: '/api/:version/groups/:id',
namespacesPath: '/api/:version/namespaces.json',
groupProjectsPath: '/api/:version/groups/:id/projects.json',
projectsPath: '/api/:version/projects.json',
@@ -23,42 +23,44 @@ const Api = {
group(groupId, callback) {
const url = Api.buildUrl(Api.groupPath)
.replace(':id', groupId);
- return $.ajax({
- url,
- dataType: 'json',
- })
- .done(group => callback(group));
+ return axios.get(url)
+ .then(({ data }) => {
+ callback(data);
+
+ return data;
+ });
},
// Return groups list. Filtered by query
groups(query, options, callback) {
const url = Api.buildUrl(Api.groupsPath);
- return $.ajax({
- url,
- data: Object.assign({
+ return axios.get(url, {
+ params: Object.assign({
search: query,
per_page: 20,
}, options),
- dataType: 'json',
})
- .done(groups => callback(groups));
+ .then(({ data }) => {
+ callback(data);
+
+ return data;
+ });
},
// Return namespaces list. Filtered by query
namespaces(query, callback) {
const url = Api.buildUrl(Api.namespacesPath);
- return $.ajax({
- url,
- data: {
+ return axios.get(url, {
+ params: {
search: query,
per_page: 20,
},
- dataType: 'json',
- }).done(namespaces => callback(namespaces));
+ })
+ .then(({ data }) => callback(data));
},
// Return projects list. Filtered by query
- projects(query, options, callback) {
+ projects(query, options, callback = _.noop) {
const url = Api.buildUrl(Api.projectsPath);
const defaults = {
search: query,
@@ -70,12 +72,14 @@ const Api = {
defaults.membership = true;
}
- return $.ajax({
- url,
- data: Object.assign(defaults, options),
- dataType: 'json',
+ return axios.get(url, {
+ params: Object.assign(defaults, options),
})
- .done(projects => callback(projects));
+ .then(({ data }) => {
+ callback(data);
+
+ return data;
+ });
},
// Return single project
@@ -97,41 +101,34 @@ const Api = {
url = Api.buildUrl(Api.groupLabelsPath).replace(':namespace_path', namespacePath);
}
- return $.ajax({
- url,
- type: 'POST',
- data: { label: data },
- dataType: 'json',
+ return axios.post(url, {
+ label: data,
})
- .done(label => callback(label))
- .fail(message => callback(message.responseJSON));
+ .then(res => callback(res.data))
+ .catch(e => callback(e.response.data));
},
// Return group projects list. Filtered by query
groupProjects(groupId, query, callback) {
const url = Api.buildUrl(Api.groupProjectsPath)
.replace(':id', groupId);
- return $.ajax({
- url,
- data: {
+ return axios.get(url, {
+ params: {
search: query,
per_page: 20,
},
- dataType: 'json',
})
- .done(projects => callback(projects));
+ .then(({ data }) => callback(data));
},
commitMultiple(id, data) {
// see https://docs.gitlab.com/ce/api/commits.html#create-a-commit-with-multiple-files-and-actions
const url = Api.buildUrl(Api.commitPath)
.replace(':id', encodeURIComponent(id));
- return this.wrapAjaxCall({
- url,
- type: 'POST',
- contentType: 'application/json; charset=utf-8',
- data: JSON.stringify(data),
- dataType: 'json',
+ return axios.post(url, JSON.stringify(data), {
+ headers: {
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
});
},
@@ -140,40 +137,37 @@ const Api = {
.replace(':id', encodeURIComponent(id))
.replace(':branch', branch);
- return this.wrapAjaxCall({
- url,
- type: 'GET',
- contentType: 'application/json; charset=utf-8',
- dataType: 'json',
- });
+ return axios.get(url);
},
// Return text for a specific license
licenseText(key, data, callback) {
const url = Api.buildUrl(Api.licensePath)
.replace(':key', key);
- return $.ajax({
- url,
- data,
+ return axios.get(url, {
+ params: data,
})
- .done(license => callback(license));
+ .then(res => callback(res.data));
},
gitignoreText(key, callback) {
const url = Api.buildUrl(Api.gitignorePath)
.replace(':key', key);
- return $.get(url, gitignore => callback(gitignore));
+ return axios.get(url)
+ .then(({ data }) => callback(data));
},
gitlabCiYml(key, callback) {
const url = Api.buildUrl(Api.gitlabCiYmlPath)
.replace(':key', key);
- return $.get(url, file => callback(file));
+ return axios.get(url)
+ .then(({ data }) => callback(data));
},
dockerfileYml(key, callback) {
const url = Api.buildUrl(Api.dockerfilePath).replace(':key', key);
- $.get(url, callback);
+ return axios.get(url)
+ .then(({ data }) => callback(data));
},
issueTemplate(namespacePath, projectPath, key, type, callback) {
@@ -182,23 +176,18 @@ const Api = {
.replace(':type', type)
.replace(':project_path', projectPath)
.replace(':namespace_path', namespacePath);
- $.ajax({
- url,
- dataType: 'json',
- })
- .done(file => callback(null, file))
- .fail(callback);
+ return axios.get(url)
+ .then(({ data }) => callback(null, data))
+ .catch(callback);
},
users(query, options) {
const url = Api.buildUrl(this.usersPath);
- return Api.wrapAjaxCall({
- url,
- data: Object.assign({
+ return axios.get(url, {
+ params: Object.assign({
search: query,
per_page: 20,
}, options),
- dataType: 'json',
});
},
@@ -209,21 +198,6 @@ const Api = {
}
return urlRoot + url.replace(':version', gon.api_version);
},
-
- wrapAjaxCall(options) {
- return new Promise((resolve, reject) => {
- // jQuery 2 is not Promises/A+ compatible (missing catch)
- $.ajax(options) // eslint-disable-line promise/catch-or-return
- .then(data => resolve(data),
- (jqXHR, textStatus, errorThrown) => {
- const error = new Error(`${options.url}: ${errorThrown}`);
- error.textStatus = textStatus;
- if (jqXHR && jqXHR.responseJSON) error.responseJSON = jqXHR.responseJSON;
- reject(error);
- },
- );
- });
- },
};
export default Api;
diff --git a/app/assets/javascripts/blob/viewer/index.js b/app/assets/javascripts/blob/viewer/index.js
index 54132e8537b..612f604e725 100644
--- a/app/assets/javascripts/blob/viewer/index.js
+++ b/app/assets/javascripts/blob/viewer/index.js
@@ -1,5 +1,6 @@
import Flash from '../../flash';
import { handleLocationHash } from '../../lib/utils/common_utils';
+import axios from '../../lib/utils/axios_utils';
export default class BlobViewer {
constructor() {
@@ -127,25 +128,18 @@ export default class BlobViewer {
const viewer = viewerParam;
const url = viewer.getAttribute('data-url');
- return new Promise((resolve, reject) => {
- if (!url || viewer.getAttribute('data-loaded') || viewer.getAttribute('data-loading')) {
- resolve(viewer);
- return;
- }
+ if (!url || viewer.getAttribute('data-loaded') || viewer.getAttribute('data-loading')) {
+ return Promise.resolve(viewer);
+ }
- viewer.setAttribute('data-loading', 'true');
+ viewer.setAttribute('data-loading', 'true');
- $.ajax({
- url,
- dataType: 'JSON',
- })
- .fail(reject)
- .done((data) => {
+ return axios.get(url)
+ .then(({ data }) => {
viewer.innerHTML = data.html;
viewer.setAttribute('data-loaded', 'true');
- resolve(viewer);
+ return viewer;
});
- });
}
}
diff --git a/app/assets/javascripts/commits.js b/app/assets/javascripts/commits.js
index 3a03cbf6b90..4b2f75fffde 100644
--- a/app/assets/javascripts/commits.js
+++ b/app/assets/javascripts/commits.js
@@ -5,6 +5,7 @@
import { pluralize } from './lib/utils/text_utility';
import { localTimeAgo } from './lib/utils/datetime_utility';
import Pager from './pager';
+import axios from './lib/utils/axios_utils';
export default (function () {
const CommitsList = {};
@@ -43,29 +44,30 @@ export default (function () {
CommitsList.filterResults = function () {
const form = $('.commits-search-form');
const search = CommitsList.searchField.val();
- if (search === CommitsList.lastSearch) return;
+ if (search === CommitsList.lastSearch) return Promise.resolve();
const commitsUrl = form.attr('action') + '?' + form.serialize();
CommitsList.content.fadeTo('fast', 0.5);
- return $.ajax({
- type: 'GET',
- url: form.attr('action'),
- data: form.serialize(),
- complete: function () {
- return CommitsList.content.fadeTo('fast', 1.0);
- },
- success: function (data) {
+ const params = form.serializeArray().reduce((acc, obj) => Object.assign(acc, {
+ [obj.name]: obj.value,
+ }), {});
+
+ return axios.get(form.attr('action'), {
+ params,
+ })
+ .then(({ data }) => {
CommitsList.lastSearch = search;
CommitsList.content.html(data.html);
- return history.replaceState({
- page: commitsUrl,
+ CommitsList.content.fadeTo('fast', 1.0);
+
// Change url so if user reload a page - search results are saved
+ history.replaceState({
+ page: commitsUrl,
}, document.title, commitsUrl);
- },
- error: function () {
+ })
+ .catch(() => {
+ CommitsList.content.fadeTo('fast', 1.0);
CommitsList.lastSearch = null;
- },
- dataType: 'json',
- });
+ });
};
// Prepare loaded data.
diff --git a/app/assets/javascripts/ide/stores/actions.js b/app/assets/javascripts/ide/stores/actions.js
index 96a87744df5..d007d0ae78f 100644
--- a/app/assets/javascripts/ide/stores/actions.js
+++ b/app/assets/javascripts/ide/stores/actions.js
@@ -71,7 +71,7 @@ export const setResizingStatus = ({ commit }, resizing) => {
export const checkCommitStatus = ({ state }) =>
service
.getBranchData(state.currentProjectId, state.currentBranchId)
- .then((data) => {
+ .then(({ data }) => {
const { id } = data.commit;
const selectedBranch =
state.projects[state.currentProjectId].branches[state.currentBranchId];
@@ -90,7 +90,7 @@ export const commitChanges = (
) =>
service
.commit(state.currentProjectId, payload)
- .then((data) => {
+ .then(({ data }) => {
const { branch } = payload;
if (!data.short_id) {
flash(data.message, 'alert', document, null, false, true);
@@ -147,8 +147,8 @@ export const commitChanges = (
})
.catch((err) => {
let errMsg = 'Error committing changes. Please try again.';
- if (err.responseJSON && err.responseJSON.message) {
- errMsg += ` (${stripHtml(err.responseJSON.message)})`;
+ if (err.response.data && err.response.data.message) {
+ errMsg += ` (${stripHtml(err.response.data.message)})`;
}
flash(errMsg, 'alert', document, null, false, true);
window.dispatchEvent(new Event('resize'));
diff --git a/app/assets/javascripts/ide/stores/actions/branch.js b/app/assets/javascripts/ide/stores/actions/branch.js
index 589ec28c6a4..bc6fd2d4163 100644
--- a/app/assets/javascripts/ide/stores/actions/branch.js
+++ b/app/assets/javascripts/ide/stores/actions/branch.js
@@ -10,7 +10,7 @@ export const getBranchData = (
!state.projects[`${projectId}`].branches[branchId])
|| force) {
service.getBranchData(`${projectId}`, branchId)
- .then((data) => {
+ .then(({ data }) => {
const { id } = data.commit;
commit(types.SET_BRANCH, { projectPath: `${projectId}`, branchName: branchId, branch: data });
commit(types.SET_BRANCH_WORKING_REFERENCE, { projectId, branchId, reference: id });
diff --git a/app/assets/javascripts/lib/utils/users_cache.js b/app/assets/javascripts/lib/utils/users_cache.js
index 88f8a622c00..b01ec6b81a3 100644
--- a/app/assets/javascripts/lib/utils/users_cache.js
+++ b/app/assets/javascripts/lib/utils/users_cache.js
@@ -8,16 +8,16 @@ class UsersCache extends Cache {
}
return Api.users('', { username })
- .then((users) => {
- if (!users.length) {
+ .then(({ data }) => {
+ if (!data.length) {
throw new Error(`User "${username}" could not be found!`);
}
- if (users.length > 1) {
+ if (data.length > 1) {
throw new Error(`Expected username "${username}" to be unique!`);
}
- const user = users[0];
+ const user = data[0];
this.internalStorage[username] = user;
return user;
});
diff --git a/app/assets/javascripts/users/activity_calendar.js b/app/assets/javascripts/users/activity_calendar.js
index 0581239d5a5..0ca54faa71c 100644
--- a/app/assets/javascripts/users/activity_calendar.js
+++ b/app/assets/javascripts/users/activity_calendar.js
@@ -98,7 +98,7 @@ export default class ActivityCalendar {
const secondLastColMonth = this.timestampsTmp[group - 2][0].date.getMonth();
if (lastColMonth !== secondLastColMonth) {
- extraWidthPadding = 3;
+ extraWidthPadding = 6;
}
return extraWidthPadding;
diff --git a/app/models/concerns/discussion_on_diff.rb b/app/models/concerns/discussion_on_diff.rb
index db9770fabf4..8b3c55387b3 100644
--- a/app/models/concerns/discussion_on_diff.rb
+++ b/app/models/concerns/discussion_on_diff.rb
@@ -37,6 +37,8 @@ module DiscussionOnDiff
# Returns an array of at most 16 highlighted lines above a diff note
def truncated_diff_lines(highlight: true)
+ return [] if diff_line.nil? && first_note.is_a?(LegacyDiffNote)
+
lines = highlight ? highlighted_diff_lines : diff_lines
initial_line_index = [diff_line.index - NUMBER_OF_TRUNCATED_DIFF_LINES + 1, 0].max
diff --git a/app/models/project_wiki.rb b/app/models/project_wiki.rb
index a0af749a93f..459d1673125 100644
--- a/app/models/project_wiki.rb
+++ b/app/models/project_wiki.rb
@@ -124,6 +124,12 @@ class ProjectWiki
update_project_activity
end
+ def page_formatted_data(page)
+ page_title, page_dir = page_title_and_dir(page.title)
+
+ wiki.page_formatted_data(title: page_title, dir: page_dir, version: page.version)
+ end
+
def page_title_and_dir(title)
title_array = title.split("/")
title = title_array.pop
diff --git a/app/models/repository.rb b/app/models/repository.rb
index 872d4468ac8..6c776301ac2 100644
--- a/app/models/repository.rb
+++ b/app/models/repository.rb
@@ -255,6 +255,8 @@ class Repository
# This will still fail if the file is corrupted (e.g. 0 bytes)
raw_repository.write_ref(keep_around_ref_name(sha), sha, shell: false)
+ rescue Gitlab::Git::CommandError => ex
+ Rails.logger.error "Unable to create keep-around reference for repository #{path}: #{ex}"
end
def kept_around?(sha)
diff --git a/app/models/wiki_page.rb b/app/models/wiki_page.rb
index bdfef677ef3..e6254183baf 100644
--- a/app/models/wiki_page.rb
+++ b/app/models/wiki_page.rb
@@ -107,7 +107,10 @@ class WikiPage
# The processed/formatted content of this page.
def formatted_content
- @attributes[:formatted_content] ||= @page&.formatted_data
+ # Assuming @page exists, nil formatted_data means we didn't load it
+ # before hand (i.e. page was fetched by Gitaly), so we fetch it separately.
+ # If the page was fetched by Gollum, formatted_data would've been a String.
+ @attributes[:formatted_content] ||= @page&.formatted_data || @wiki.page_formatted_data(@page)
end
# The markup format for the page.
diff --git a/changelogs/unreleased/21554-mark-new-user-as-external.yml b/changelogs/unreleased/21554-mark-new-user-as-external.yml
new file mode 100644
index 00000000000..fb0826fc176
--- /dev/null
+++ b/changelogs/unreleased/21554-mark-new-user-as-external.yml
@@ -0,0 +1,5 @@
+---
+title: Login via OAuth now only marks new users as external
+merge_request: 16672
+author:
+type: fixed
diff --git a/changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml b/changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml
new file mode 100644
index 00000000000..f64fd66ef79
--- /dev/null
+++ b/changelogs/unreleased/41771-reduce-cardinality-of-metrics.yml
@@ -0,0 +1,5 @@
+---
+title: Reduce the number of Prometheus metrics
+merge_request: 16443
+author:
+type: performance
diff --git a/changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml b/changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml
new file mode 100644
index 00000000000..64340ab08cd
--- /dev/null
+++ b/changelogs/unreleased/42160-error-500-loading-merge-request-undefined-method-index-for-nil-nilclass.yml
@@ -0,0 +1,5 @@
+---
+title: Fix 500 error when loading a merge request with an invalid comment
+merge_request: 16795
+author:
+type: fixed
diff --git a/changelogs/unreleased/42591-update-nokogiri.yml b/changelogs/unreleased/42591-update-nokogiri.yml
new file mode 100644
index 00000000000..5f9587d2d92
--- /dev/null
+++ b/changelogs/unreleased/42591-update-nokogiri.yml
@@ -0,0 +1,5 @@
+---
+title: Update nokogiri to 1.8.2
+merge_request: 16807
+author:
+type: security
diff --git a/changelogs/unreleased/contribution_calendar_label_cut_off.yml b/changelogs/unreleased/contribution_calendar_label_cut_off.yml
new file mode 100644
index 00000000000..0b4a746bab8
--- /dev/null
+++ b/changelogs/unreleased/contribution_calendar_label_cut_off.yml
@@ -0,0 +1,5 @@
+---
+title: Contribution calendar label was cut off
+merge_request:
+author: Branka Martinovic
+type: fixed
diff --git a/doc/api/repositories.md b/doc/api/repositories.md
index 5fb25e40ed7..96609cd530f 100644
--- a/doc/api/repositories.md
+++ b/doc/api/repositories.md
@@ -114,6 +114,7 @@ Parameters:
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
- `sha` (optional) - The commit SHA to download. A tag, branch reference or sha can be used. This defaults to the tip of the default branch if not specified
+- `format` (optional) - The archive format. Default is `tar.gz`. Options are `tar.gz`, `tar.bz2`, `tbz`, `tbz2`, `tb2`, `bz2`, `tar`, `zip`
## Compare branches, tags or commits
diff --git a/lib/gitlab/ee_compat_check.rb b/lib/gitlab/ee_compat_check.rb
index d3b49b1ec75..0fb71976883 100644
--- a/lib/gitlab/ee_compat_check.rb
+++ b/lib/gitlab/ee_compat_check.rb
@@ -5,7 +5,7 @@ module Gitlab
DEFAULT_CE_PROJECT_URL = 'https://gitlab.com/gitlab-org/gitlab-ce'.freeze
EE_REPO_URL = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze
CHECK_DIR = Rails.root.join('ee_compat_check')
- IGNORED_FILES_REGEX = /(VERSION|CHANGELOG\.md:\d+)/.freeze
+ IGNORED_FILES_REGEX = %r{VERSION|CHANGELOG\.md|db/schema\.rb}i.freeze
PLEASE_READ_THIS_BANNER = %Q{
============================================================
===================== PLEASE READ THIS =====================
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index e710ad2940f..7127f7858ee 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -888,16 +888,12 @@ module Gitlab
end
def delete_refs(*ref_names)
- instructions = ref_names.map do |ref|
- "delete #{ref}\x00\x00"
- end
-
- message, status = run_git(%w[update-ref --stdin -z]) do |stdin|
- stdin.write(instructions.join)
- end
-
- unless status.zero?
- raise GitError.new("Could not delete refs #{ref_names}: #{message}")
+ gitaly_migrate(:delete_refs) do |is_enabled|
+ if is_enabled
+ gitaly_delete_refs(*ref_names)
+ else
+ git_delete_refs(*ref_names)
+ end
end
end
@@ -1106,10 +1102,14 @@ module Gitlab
end
def write_ref(ref_path, ref, old_ref: nil, shell: true)
- if shell
- shell_write_ref(ref_path, ref, old_ref)
- else
- rugged_write_ref(ref_path, ref)
+ ref_path = "#{Gitlab::Git::BRANCH_REF_PREFIX}#{ref_path}" unless ref_path.start_with?("refs/") || ref_path == "HEAD"
+
+ gitaly_migrate(:write_ref) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.write_ref(ref_path, ref, old_ref, shell)
+ else
+ local_write_ref(ref_path, ref, old_ref: old_ref, shell: shell)
+ end
end
end
@@ -1433,6 +1433,14 @@ module Gitlab
private
+ def local_write_ref(ref_path, ref, old_ref: nil, shell: true)
+ if shell
+ shell_write_ref(ref_path, ref, old_ref)
+ else
+ rugged_write_ref(ref_path, ref)
+ end
+ end
+
def shell_write_ref(ref_path, ref, old_ref)
raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ')
raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00")
@@ -2192,6 +2200,24 @@ module Gitlab
remote_update(remote_name, url: url)
end
+ def git_delete_refs(*ref_names)
+ instructions = ref_names.map do |ref|
+ "delete #{ref}\x00\x00"
+ end
+
+ message, status = run_git(%w[update-ref --stdin -z]) do |stdin|
+ stdin.write(instructions.join)
+ end
+
+ unless status.zero?
+ raise GitError.new("Could not delete refs #{ref_names}: #{message}")
+ end
+ end
+
+ def gitaly_delete_refs(*ref_names)
+ gitaly_ref_client.delete_refs(refs: ref_names)
+ end
+
def rugged_remove_remote(remote_name)
# When a remote is deleted all its remote refs are deleted too, but in
# the case of mirrors we map its refs (that would usualy go under
diff --git a/lib/gitlab/git/wiki.rb b/lib/gitlab/git/wiki.rb
index d4a53d32c28..ccdb8975342 100644
--- a/lib/gitlab/git/wiki.rb
+++ b/lib/gitlab/git/wiki.rb
@@ -117,6 +117,20 @@ module Gitlab
page.url_path
end
+ def page_formatted_data(title:, dir: nil, version: nil)
+ version = version&.id
+
+ @repository.gitaly_migrate(:wiki_page_formatted_data) do |is_enabled|
+ if is_enabled
+ gitaly_wiki_client.get_formatted_data(title: title, dir: dir, version: version)
+ else
+ # We don't use #page because if wiki_find_page feature is enabled, we would
+ # get a page without formatted_data.
+ gollum_find_page(title: title, dir: dir, version: version)&.formatted_data
+ end
+ end
+ end
+
private
# options:
diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb
index 6bd256f57c7..c5d3e944f7d 100644
--- a/lib/gitlab/gitaly_client.rb
+++ b/lib/gitlab/gitaly_client.rb
@@ -6,6 +6,7 @@ require 'grpc/health/v1/health_services_pb'
module Gitlab
module GitalyClient
+ include Gitlab::Metrics::Methods
module MigrationStatus
DISABLED = 1
OPT_IN = 2
@@ -33,8 +34,6 @@ module Gitlab
CLIENT_NAME = (Sidekiq.server? ? 'gitlab-sidekiq' : 'gitlab-web').freeze
MUTEX = Mutex.new
- METRICS_MUTEX = Mutex.new
- private_constant :MUTEX, :METRICS_MUTEX
class << self
attr_accessor :query_time
@@ -42,28 +41,14 @@ module Gitlab
self.query_time = 0
- def self.migrate_histogram
- @migrate_histogram ||=
- METRICS_MUTEX.synchronize do
- # If a thread was blocked on the mutex, the value was set already
- return @migrate_histogram if @migrate_histogram
-
- Gitlab::Metrics.histogram(:gitaly_migrate_call_duration_seconds,
- "Gitaly migration call execution timings",
- gitaly_enabled: nil, feature: nil)
- end
+ define_histogram :gitaly_migrate_call_duration_seconds do
+ docstring "Gitaly migration call execution timings"
+ base_labels gitaly_enabled: nil, feature: nil
end
- def self.gitaly_call_histogram
- @gitaly_call_histogram ||=
- METRICS_MUTEX.synchronize do
- # If a thread was blocked on the mutex, the value was set already
- return @gitaly_call_histogram if @gitaly_call_histogram
-
- Gitlab::Metrics.histogram(:gitaly_controller_action_duration_seconds,
- "Gitaly endpoint histogram by controller and action combination",
- Gitlab::Metrics::Transaction::BASE_LABELS.merge(gitaly_service: nil, rpc: nil))
- end
+ define_histogram :gitaly_controller_action_duration_seconds do
+ docstring "Gitaly endpoint histogram by controller and action combination"
+ base_labels Gitlab::Metrics::Transaction::BASE_LABELS.merge(gitaly_service: nil, rpc: nil)
end
def self.stub(name, storage)
@@ -145,7 +130,7 @@ module Gitlab
# Keep track, seperately, for the performance bar
self.query_time += duration
- gitaly_call_histogram.observe(
+ gitaly_controller_action_duration_seconds.observe(
current_transaction_labels.merge(gitaly_service: service.to_s, rpc: rpc.to_s),
duration)
end
@@ -247,7 +232,7 @@ module Gitlab
yield is_enabled
ensure
total_time = Gitlab::Metrics::System.monotonic_time - start
- migrate_histogram.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time)
+ gitaly_migrate_call_duration_seconds.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time)
feature_stack.shift
Thread.current[:gitaly_feature_stack] = nil if feature_stack.empty?
end
diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb
index c2b4155e6a5..cd2734b5a07 100644
--- a/lib/gitlab/gitaly_client/operation_service.rb
+++ b/lib/gitlab/gitaly_client/operation_service.rb
@@ -103,7 +103,13 @@ module Gitlab
request_enum.push(Gitaly::UserMergeBranchRequest.new(apply: true))
- branch_update = response_enum.next.branch_update
+ second_response = response_enum.next
+
+ if second_response.pre_receive_error.present?
+ raise Gitlab::Git::HooksService::PreReceiveError, second_response.pre_receive_error
+ end
+
+ branch_update = second_response.branch_update
return if branch_update.nil?
raise Gitlab::Git::CommitError.new('failed to apply merge to branch') unless branch_update.commit_id.present?
diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb
index f8e2a27f3fe..8b9a224b700 100644
--- a/lib/gitlab/gitaly_client/ref_service.rb
+++ b/lib/gitlab/gitaly_client/ref_service.rb
@@ -133,13 +133,16 @@ module Gitlab
GitalyClient.call(@repository.storage, :ref_service, :delete_branch, request)
end
- def delete_refs(except_with_prefixes:)
+ def delete_refs(refs: [], except_with_prefixes: [])
request = Gitaly::DeleteRefsRequest.new(
repository: @gitaly_repo,
- except_with_prefix: except_with_prefixes
+ refs: refs.map { |r| encode_binary(r) },
+ except_with_prefix: except_with_prefixes.map { |r| encode_binary(r) }
)
- GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request)
+ response = GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request)
+
+ raise Gitlab::Git::Repository::GitError, response.git_error if response.git_error.present?
end
private
diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb
index b0dbaf11598..7adf32af209 100644
--- a/lib/gitlab/gitaly_client/repository_service.rb
+++ b/lib/gitlab/gitaly_client/repository_service.rb
@@ -203,6 +203,22 @@ module Gitlab
timeout: GitalyClient.default_timeout
)
end
+
+ def write_ref(ref_path, ref, old_ref, shell)
+ request = Gitaly::WriteRefRequest.new(
+ repository: @gitaly_repo,
+ ref: ref_path.b,
+ revision: ref.b,
+ shell: shell
+ )
+ request.old_revision = old_ref.b unless old_ref.nil?
+
+ response = GitalyClient.call(@storage, :repository_service, :write_ref, request)
+
+ raise Gitlab::Git::CommandError, encode!(response.error) if response.error.present?
+
+ true
+ end
end
end
end
diff --git a/lib/gitlab/gitaly_client/wiki_service.rb b/lib/gitlab/gitaly_client/wiki_service.rb
index 5c5b170a3e0..8e87a8cc36f 100644
--- a/lib/gitlab/gitaly_client/wiki_service.rb
+++ b/lib/gitlab/gitaly_client/wiki_service.rb
@@ -127,6 +127,18 @@ module Gitlab
wiki_file
end
+ def get_formatted_data(title:, dir: nil, version: nil)
+ request = Gitaly::WikiGetFormattedDataRequest.new(
+ repository: @gitaly_repo,
+ title: encode_binary(title),
+ revision: encode_binary(version),
+ directory: encode_binary(dir)
+ )
+
+ response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_get_formatted_data, request)
+ response.reduce("") { |memo, msg| memo << msg.data }
+ end
+
private
# If a block is given and the yielded value is true, iteration will be
diff --git a/lib/gitlab/metrics.rb b/lib/gitlab/metrics.rb
index 4779755bb22..7d63ca5627d 100644
--- a/lib/gitlab/metrics.rb
+++ b/lib/gitlab/metrics.rb
@@ -1,7 +1,7 @@
module Gitlab
module Metrics
- extend Gitlab::Metrics::InfluxDb
- extend Gitlab::Metrics::Prometheus
+ include Gitlab::Metrics::InfluxDb
+ include Gitlab::Metrics::Prometheus
def self.enabled?
influx_metrics_enabled? || prometheus_metrics_enabled?
diff --git a/lib/gitlab/metrics/influx_db.rb b/lib/gitlab/metrics/influx_db.rb
index ef44a13df51..66f30e3b397 100644
--- a/lib/gitlab/metrics/influx_db.rb
+++ b/lib/gitlab/metrics/influx_db.rb
@@ -1,179 +1,187 @@
module Gitlab
module Metrics
module InfluxDb
- include Gitlab::CurrentSettings
- extend self
+ extend ActiveSupport::Concern
+ include Gitlab::Metrics::Methods
+
+ EXECUTION_MEASUREMENT_BUCKETS = [0.001, 0.01, 0.1, 1].freeze
MUTEX = Mutex.new
private_constant :MUTEX
- def influx_metrics_enabled?
- settings[:enabled] || false
- end
+ class_methods do
+ def influx_metrics_enabled?
+ settings[:enabled] || false
+ end
- # Prometheus histogram buckets used for arbitrary code measurements
- EXECUTION_MEASUREMENT_BUCKETS = [0.001, 0.002, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1].freeze
- RAILS_ROOT = Rails.root.to_s
- METRICS_ROOT = Rails.root.join('lib', 'gitlab', 'metrics').to_s
- PATH_REGEX = /^#{RAILS_ROOT}\/?/
-
- def settings
- @settings ||= {
- enabled: current_application_settings[:metrics_enabled],
- pool_size: current_application_settings[:metrics_pool_size],
- timeout: current_application_settings[:metrics_timeout],
- method_call_threshold: current_application_settings[:metrics_method_call_threshold],
- host: current_application_settings[:metrics_host],
- port: current_application_settings[:metrics_port],
- sample_interval: current_application_settings[:metrics_sample_interval] || 15,
- packet_size: current_application_settings[:metrics_packet_size] || 1
- }
- end
+ # Prometheus histogram buckets used for arbitrary code measurements
+
+ def settings
+ @settings ||= begin
+ current_settings = Gitlab::CurrentSettings.current_application_settings
+
+ {
+ enabled: current_settings[:metrics_enabled],
+ pool_size: current_settings[:metrics_pool_size],
+ timeout: current_settings[:metrics_timeout],
+ method_call_threshold: current_settings[:metrics_method_call_threshold],
+ host: current_settings[:metrics_host],
+ port: current_settings[:metrics_port],
+ sample_interval: current_settings[:metrics_sample_interval] || 15,
+ packet_size: current_settings[:metrics_packet_size] || 1
+ }
+ end
+ end
- def mri?
- RUBY_ENGINE == 'ruby'
- end
+ def mri?
+ RUBY_ENGINE == 'ruby'
+ end
- def method_call_threshold
- # This is memoized since this method is called for every instrumented
- # method. Loading data from an external cache on every method call slows
- # things down too much.
- # in milliseconds
- @method_call_threshold ||= settings[:method_call_threshold]
- end
+ def method_call_threshold
+ # This is memoized since this method is called for every instrumented
+ # method. Loading data from an external cache on every method call slows
+ # things down too much.
+ # in milliseconds
+ @method_call_threshold ||= settings[:method_call_threshold]
+ end
- def submit_metrics(metrics)
- prepared = prepare_metrics(metrics)
+ def submit_metrics(metrics)
+ prepared = prepare_metrics(metrics)
- pool&.with do |connection|
- prepared.each_slice(settings[:packet_size]) do |slice|
- begin
- connection.write_points(slice)
- rescue StandardError
+ pool&.with do |connection|
+ prepared.each_slice(settings[:packet_size]) do |slice|
+ begin
+ connection.write_points(slice)
+ rescue StandardError
+ end
end
end
+ rescue Errno::EADDRNOTAVAIL, SocketError => ex
+ Gitlab::EnvironmentLogger.error('Cannot resolve InfluxDB address. GitLab Performance Monitoring will not work.')
+ Gitlab::EnvironmentLogger.error(ex)
end
- rescue Errno::EADDRNOTAVAIL, SocketError => ex
- Gitlab::EnvironmentLogger.error('Cannot resolve InfluxDB address. GitLab Performance Monitoring will not work.')
- Gitlab::EnvironmentLogger.error(ex)
- end
- def prepare_metrics(metrics)
- metrics.map do |hash|
- new_hash = hash.symbolize_keys
+ def prepare_metrics(metrics)
+ metrics.map do |hash|
+ new_hash = hash.symbolize_keys
- new_hash[:tags].each do |key, value|
- if value.blank?
- new_hash[:tags].delete(key)
- else
- new_hash[:tags][key] = escape_value(value)
+ new_hash[:tags].each do |key, value|
+ if value.blank?
+ new_hash[:tags].delete(key)
+ else
+ new_hash[:tags][key] = escape_value(value)
+ end
end
+
+ new_hash
end
+ end
- new_hash
+ def escape_value(value)
+ value.to_s.gsub('=', '\\=')
end
- end
- def escape_value(value)
- value.to_s.gsub('=', '\\=')
- end
+ # Measures the execution time of a block.
+ #
+ # Example:
+ #
+ # Gitlab::Metrics.measure(:find_by_username_duration) do
+ # User.find_by_username(some_username)
+ # end
+ #
+ # name - The name of the field to store the execution time in.
+ #
+ # Returns the value yielded by the supplied block.
+ def measure(name)
+ trans = current_transaction
+
+ return yield unless trans
+
+ real_start = Time.now.to_f
+ cpu_start = System.cpu_time
+
+ retval = yield
+
+ cpu_stop = System.cpu_time
+ real_stop = Time.now.to_f
+
+ real_time = (real_stop - real_start)
+ cpu_time = cpu_stop - cpu_start
+
+ real_duration_seconds = fetch_histogram("gitlab_#{name}_real_duration_seconds".to_sym) do
+ docstring "Measure #{name}"
+ base_labels Transaction::BASE_LABELS
+ buckets EXECUTION_MEASUREMENT_BUCKETS
+ end
- # Measures the execution time of a block.
- #
- # Example:
- #
- # Gitlab::Metrics.measure(:find_by_username_duration) do
- # User.find_by_username(some_username)
- # end
- #
- # name - The name of the field to store the execution time in.
- #
- # Returns the value yielded by the supplied block.
- def measure(name)
- trans = current_transaction
-
- return yield unless trans
-
- real_start = Time.now.to_f
- cpu_start = System.cpu_time
-
- retval = yield
-
- cpu_stop = System.cpu_time
- real_stop = Time.now.to_f
-
- real_time = (real_stop - real_start)
- cpu_time = cpu_stop - cpu_start
-
- Gitlab::Metrics.histogram("gitlab_#{name}_real_duration_seconds".to_sym,
- "Measure #{name}",
- Transaction::BASE_LABELS,
- EXECUTION_MEASUREMENT_BUCKETS)
- .observe(trans.labels, real_time)
-
- Gitlab::Metrics.histogram("gitlab_#{name}_cpu_duration_seconds".to_sym,
- "Measure #{name}",
- Transaction::BASE_LABELS,
- EXECUTION_MEASUREMENT_BUCKETS)
- .observe(trans.labels, cpu_time / 1000.0)
-
- # InfluxDB stores the _real_time time values as milliseconds
- trans.increment("#{name}_real_time", real_time * 1000, false)
- trans.increment("#{name}_cpu_time", cpu_time, false)
- trans.increment("#{name}_call_count", 1, false)
-
- retval
- end
+ real_duration_seconds.observe(trans.labels, real_time)
- # Sets the action of the current transaction (if any)
- #
- # action - The name of the action.
- def action=(action)
- trans = current_transaction
+ cpu_duration_seconds = fetch_histogram("gitlab_#{name}_cpu_duration_seconds".to_sym) do
+ docstring "Measure #{name}"
+ base_labels Transaction::BASE_LABELS
+ buckets EXECUTION_MEASUREMENT_BUCKETS
+ with_feature "prometheus_metrics_measure_#{name}_cpu_duration"
+ end
+ cpu_duration_seconds.observe(trans.labels, cpu_time)
- trans&.action = action
- end
+ # InfluxDB stores the _real_time and _cpu_time time values as milliseconds
+ trans.increment("#{name}_real_time", real_time.in_milliseconds, false)
+ trans.increment("#{name}_cpu_time", cpu_time.in_milliseconds, false)
+ trans.increment("#{name}_call_count", 1, false)
- # Tracks an event.
- #
- # See `Gitlab::Metrics::Transaction#add_event` for more details.
- def add_event(*args)
- trans = current_transaction
+ retval
+ end
- trans&.add_event(*args)
- end
+ # Sets the action of the current transaction (if any)
+ #
+ # action - The name of the action.
+ def action=(action)
+ trans = current_transaction
- # Returns the prefix to use for the name of a series.
- def series_prefix
- @series_prefix ||= Sidekiq.server? ? 'sidekiq_' : 'rails_'
- end
+ trans&.action = action
+ end
- # Allow access from other metrics related middlewares
- def current_transaction
- Transaction.current
- end
+ # Tracks an event.
+ #
+ # See `Gitlab::Metrics::Transaction#add_event` for more details.
+ def add_event(*args)
+ trans = current_transaction
- # When enabled this should be set before being used as the usual pattern
- # "@foo ||= bar" is _not_ thread-safe.
- # rubocop:disable Gitlab/ModuleWithInstanceVariables
- def pool
- if influx_metrics_enabled?
- if @pool.nil?
- MUTEX.synchronize do
- @pool ||= ConnectionPool.new(size: settings[:pool_size], timeout: settings[:timeout]) do
- host = settings[:host]
- port = settings[:port]
-
- InfluxDB::Client
- .new(udp: { host: host, port: port })
+ trans&.add_event(*args)
+ end
+
+ # Returns the prefix to use for the name of a series.
+ def series_prefix
+ @series_prefix ||= Sidekiq.server? ? 'sidekiq_' : 'rails_'
+ end
+
+ # Allow access from other metrics related middlewares
+ def current_transaction
+ Transaction.current
+ end
+
+ # When enabled this should be set before being used as the usual pattern
+ # "@foo ||= bar" is _not_ thread-safe.
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def pool
+ if influx_metrics_enabled?
+ if @pool.nil?
+ MUTEX.synchronize do
+ @pool ||= ConnectionPool.new(size: settings[:pool_size], timeout: settings[:timeout]) do
+ host = settings[:host]
+ port = settings[:port]
+
+ InfluxDB::Client
+ .new(udp: { host: host, port: port })
+ end
end
end
- end
- @pool
+ @pool
+ end
end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
end
- # rubocop:enable Gitlab/ModuleWithInstanceVariables
end
end
end
diff --git a/lib/gitlab/metrics/method_call.rb b/lib/gitlab/metrics/method_call.rb
index c2f9db56824..b11520a79bb 100644
--- a/lib/gitlab/metrics/method_call.rb
+++ b/lib/gitlab/metrics/method_call.rb
@@ -4,26 +4,15 @@ module Gitlab
module Metrics
# Class for tracking timing information about method calls
class MethodCall
- @@measurement_enabled_cache = Concurrent::AtomicBoolean.new(false)
- @@measurement_enabled_cache_expires_at = Concurrent::AtomicReference.new(Time.now.to_i)
- MUTEX = Mutex.new
+ include Gitlab::Metrics::Methods
BASE_LABELS = { module: nil, method: nil }.freeze
attr_reader :real_time, :cpu_time, :call_count, :labels
- def self.call_duration_histogram
- return @call_duration_histogram if @call_duration_histogram
-
- MUTEX.synchronize do
- @call_duration_histogram ||= Gitlab::Metrics.histogram(
- :gitlab_method_call_duration_seconds,
- 'Method calls real duration',
- Transaction::BASE_LABELS.merge(BASE_LABELS),
- [0.01, 0.05, 0.1, 0.5, 1])
- end
- end
-
- def self.measurement_enabled_cache_expires_at
- @@measurement_enabled_cache_expires_at
+ define_histogram :gitlab_method_call_duration_seconds do
+ docstring 'Method calls real duration'
+ base_labels Transaction::BASE_LABELS.merge(BASE_LABELS)
+ buckets [0.01, 0.05, 0.1, 0.5, 1]
+ with_feature :prometheus_metrics_method_instrumentation
end
# name - The full name of the method (including namespace) such as
@@ -53,8 +42,8 @@ module Gitlab
@cpu_time += cpu_time
@call_count += 1
- if call_measurement_enabled? && above_threshold?
- self.class.call_duration_histogram.observe(@transaction.labels.merge(labels), real_time)
+ if above_threshold?
+ self.class.gitlab_method_call_duration_seconds.observe(@transaction.labels.merge(labels), real_time)
end
retval
@@ -78,17 +67,6 @@ module Gitlab
def above_threshold?
real_time.in_milliseconds >= Metrics.method_call_threshold
end
-
- def call_measurement_enabled?
- expires_at = @@measurement_enabled_cache_expires_at.value
- if expires_at < Time.now.to_i
- if @@measurement_enabled_cache_expires_at.compare_and_set(expires_at, 1.minute.from_now.to_i)
- @@measurement_enabled_cache.value = Feature.get(:prometheus_metrics_method_instrumentation).enabled?
- end
- end
-
- @@measurement_enabled_cache.value
- end
end
end
end
diff --git a/lib/gitlab/metrics/methods.rb b/lib/gitlab/metrics/methods.rb
new file mode 100644
index 00000000000..cd7c1e507f7
--- /dev/null
+++ b/lib/gitlab/metrics/methods.rb
@@ -0,0 +1,129 @@
+# rubocop:disable Style/ClassVars
+
+module Gitlab
+ module Metrics
+ module Methods
+ extend ActiveSupport::Concern
+
+ included do
+ @@_metric_provider_mutex ||= Mutex.new
+ @@_metrics_provider_cache = {}
+ end
+
+ class_methods do
+ def reload_metric!(name)
+ @@_metrics_provider_cache.delete(name)
+ end
+
+ private
+
+ def define_metric(type, name, opts = {}, &block)
+ if respond_to?(name)
+ raise ArgumentError, "method #{name} already exists"
+ end
+
+ define_singleton_method(name) do
+ # inlining fetch_metric method to avoid method call overhead when instrumenting hot spots
+ @@_metrics_provider_cache[name] || init_metric(type, name, opts, &block)
+ end
+ end
+
+ def fetch_metric(type, name, opts = {}, &block)
+ @@_metrics_provider_cache[name] || init_metric(type, name, opts, &block)
+ end
+
+ def init_metric(type, name, opts = {}, &block)
+ options = MetricOptions.new(opts)
+ options.evaluate(&block)
+
+ if disabled_by_feature(options)
+ synchronized_cache_fill(name) { NullMetric.instance }
+ else
+ synchronized_cache_fill(name) { build_metric!(type, name, options) }
+ end
+ end
+
+ def synchronized_cache_fill(key)
+ @@_metric_provider_mutex.synchronize do
+ @@_metrics_provider_cache[key] ||= yield
+ end
+ end
+
+ def disabled_by_feature(options)
+ options.with_feature && !Feature.get(options.with_feature).enabled?
+ end
+
+ def build_metric!(type, name, options)
+ case type
+ when :gauge
+ Gitlab::Metrics.gauge(name, options.docstring, options.base_labels, options.multiprocess_mode)
+ when :counter
+ Gitlab::Metrics.counter(name, options.docstring, options.base_labels)
+ when :histogram
+ Gitlab::Metrics.histogram(name, options.docstring, options.base_labels, options.buckets)
+ when :summary
+ raise NotImplementedError, "summary metrics are not currently supported"
+ else
+ raise ArgumentError, "uknown metric type #{type}"
+ end
+ end
+
+ # Fetch and/or initialize counter metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_counter(name, opts = {}, &block)
+ fetch_metric(:counter, name, opts, &block)
+ end
+
+ # Fetch and/or initialize gauge metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_gauge(name, opts = {}, &block)
+ fetch_metric(:gauge, name, opts, &block)
+ end
+
+ # Fetch and/or initialize histogram metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_histogram(name, opts = {}, &block)
+ fetch_metric(:histogram, name, opts, &block)
+ end
+
+ # Fetch and/or initialize summary metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_summary(name, opts = {}, &block)
+ fetch_metric(:summary, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Counter
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_counter(name, opts = {}, &block)
+ define_metric(:counter, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Gauge
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_gauge(name, opts = {}, &block)
+ define_metric(:gauge, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Histogram
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_histogram(name, opts = {}, &block)
+ define_metric(:histogram, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Summary
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_summary(name, opts = {}, &block)
+ define_metric(:summary, name, opts, &block)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/methods/metric_options.rb b/lib/gitlab/metrics/methods/metric_options.rb
new file mode 100644
index 00000000000..70e122d4e15
--- /dev/null
+++ b/lib/gitlab/metrics/methods/metric_options.rb
@@ -0,0 +1,61 @@
+module Gitlab
+ module Metrics
+ module Methods
+ class MetricOptions
+ SMALL_NETWORK_BUCKETS = [0.005, 0.01, 0.1, 1, 10].freeze
+
+ def initialize(options = {})
+ @multiprocess_mode = options[:multiprocess_mode] || :all
+ @buckets = options[:buckets] || SMALL_NETWORK_BUCKETS
+ @base_labels = options[:base_labels] || {}
+ @docstring = options[:docstring]
+ @with_feature = options[:with_feature]
+ end
+
+ # Documentation describing metric in metrics endpoint '/-/metrics'
+ def docstring(docstring = nil)
+ @docstring = docstring unless docstring.nil?
+
+ @docstring
+ end
+
+ # Gauge aggregation mode for multiprocess metrics
+ # - :all (default) returns each gauge for every process
+ # - :livesum all process'es gauges summed up
+ # - :max maximum value of per process gauges
+ # - :min minimum value of per process gauges
+ def multiprocess_mode(mode = nil)
+ @multiprocess_mode = mode unless mode.nil?
+
+ @multiprocess_mode
+ end
+
+ # Measurement buckets for histograms
+ def buckets(buckets = nil)
+ @buckets = buckets unless buckets.nil?
+
+ @buckets
+ end
+
+ # Base labels are merged with per metric labels
+ def base_labels(base_labels = nil)
+ @base_labels = base_labels unless base_labels.nil?
+
+ @base_labels
+ end
+
+ # Use feature toggle to control whether certain metric is enabled/disabled
+ def with_feature(name = nil)
+ @with_feature = name unless name.nil?
+
+ @with_feature
+ end
+
+ def evaluate(&block)
+ instance_eval(&block) if block_given?
+ self
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/null_metric.rb b/lib/gitlab/metrics/null_metric.rb
index 3b5a2907195..aabada5c21a 100644
--- a/lib/gitlab/metrics/null_metric.rb
+++ b/lib/gitlab/metrics/null_metric.rb
@@ -2,6 +2,8 @@ module Gitlab
module Metrics
# Mocks ::Prometheus::Client::Metric and all derived metrics
class NullMetric
+ include Singleton
+
def method_missing(name, *args, &block)
nil
end
diff --git a/lib/gitlab/metrics/prometheus.rb b/lib/gitlab/metrics/prometheus.rb
index b0b8e8436db..f07ea3560ff 100644
--- a/lib/gitlab/metrics/prometheus.rb
+++ b/lib/gitlab/metrics/prometheus.rb
@@ -3,73 +3,77 @@ require 'prometheus/client'
module Gitlab
module Metrics
module Prometheus
- include Gitlab::CurrentSettings
- include Gitlab::Utils::StrongMemoize
+ extend ActiveSupport::Concern
REGISTRY_MUTEX = Mutex.new
PROVIDER_MUTEX = Mutex.new
- def metrics_folder_present?
- multiprocess_files_dir = ::Prometheus::Client.configuration.multiprocess_files_dir
+ class_methods do
+ include Gitlab::Utils::StrongMemoize
- multiprocess_files_dir &&
- ::Dir.exist?(multiprocess_files_dir) &&
- ::File.writable?(multiprocess_files_dir)
- end
+ def metrics_folder_present?
+ multiprocess_files_dir = ::Prometheus::Client.configuration.multiprocess_files_dir
- def prometheus_metrics_enabled?
- strong_memoize(:prometheus_metrics_enabled) do
- prometheus_metrics_enabled_unmemoized
+ multiprocess_files_dir &&
+ ::Dir.exist?(multiprocess_files_dir) &&
+ ::File.writable?(multiprocess_files_dir)
+ end
+
+ def prometheus_metrics_enabled?
+ strong_memoize(:prometheus_metrics_enabled) do
+ prometheus_metrics_enabled_unmemoized
+ end
end
- end
- def registry
- strong_memoize(:registry) do
- REGISTRY_MUTEX.synchronize do
- strong_memoize(:registry) do
- ::Prometheus::Client.registry
+ def registry
+ strong_memoize(:registry) do
+ REGISTRY_MUTEX.synchronize do
+ strong_memoize(:registry) do
+ ::Prometheus::Client.registry
+ end
end
end
end
- end
- def counter(name, docstring, base_labels = {})
- safe_provide_metric(:counter, name, docstring, base_labels)
- end
+ def counter(name, docstring, base_labels = {})
+ safe_provide_metric(:counter, name, docstring, base_labels)
+ end
- def summary(name, docstring, base_labels = {})
- safe_provide_metric(:summary, name, docstring, base_labels)
- end
+ def summary(name, docstring, base_labels = {})
+ safe_provide_metric(:summary, name, docstring, base_labels)
+ end
- def gauge(name, docstring, base_labels = {}, multiprocess_mode = :all)
- safe_provide_metric(:gauge, name, docstring, base_labels, multiprocess_mode)
- end
+ def gauge(name, docstring, base_labels = {}, multiprocess_mode = :all)
+ safe_provide_metric(:gauge, name, docstring, base_labels, multiprocess_mode)
+ end
- def histogram(name, docstring, base_labels = {}, buckets = ::Prometheus::Client::Histogram::DEFAULT_BUCKETS)
- safe_provide_metric(:histogram, name, docstring, base_labels, buckets)
- end
+ def histogram(name, docstring, base_labels = {}, buckets = ::Prometheus::Client::Histogram::DEFAULT_BUCKETS)
+ safe_provide_metric(:histogram, name, docstring, base_labels, buckets)
+ end
- private
+ private
- def safe_provide_metric(method, name, *args)
- metric = provide_metric(name)
- return metric if metric
+ def safe_provide_metric(method, name, *args)
+ metric = provide_metric(name)
+ return metric if metric
- PROVIDER_MUTEX.synchronize do
- provide_metric(name) || registry.method(method).call(name, *args)
+ PROVIDER_MUTEX.synchronize do
+ provide_metric(name) || registry.method(method).call(name, *args)
+ end
end
- end
- def provide_metric(name)
- if prometheus_metrics_enabled?
- registry.get(name)
- else
- NullMetric.new
+ def provide_metric(name)
+ if prometheus_metrics_enabled?
+ registry.get(name)
+ else
+ NullMetric.instance
+ end
end
- end
- def prometheus_metrics_enabled_unmemoized
- metrics_folder_present? && current_application_settings[:prometheus_metrics_enabled] || false
+ def prometheus_metrics_enabled_unmemoized
+ metrics_folder_present? &&
+ Gitlab::CurrentSettings.current_application_settings[:prometheus_metrics_enabled] || false
+ end
end
end
end
diff --git a/lib/gitlab/metrics/subscribers/action_view.rb b/lib/gitlab/metrics/subscribers/action_view.rb
index 3da474fc1ec..274436ca2b4 100644
--- a/lib/gitlab/metrics/subscribers/action_view.rb
+++ b/lib/gitlab/metrics/subscribers/action_view.rb
@@ -3,6 +3,14 @@ module Gitlab
module Subscribers
# Class for tracking the rendering timings of views.
class ActionView < ActiveSupport::Subscriber
+ include Gitlab::Metrics::Methods
+ define_histogram :gitlab_view_rendering_duration_seconds do
+ docstring 'View rendering time'
+ base_labels Transaction::BASE_LABELS.merge({ path: nil })
+ buckets [0.001, 0.01, 0.1, 1, 10.0]
+ with_feature :prometheus_metrics_view_instrumentation
+ end
+
attach_to :action_view
SERIES = 'views'.freeze
@@ -15,23 +23,11 @@ module Gitlab
private
- def metric_view_rendering_duration_seconds
- @metric_view_rendering_duration_seconds ||= Gitlab::Metrics.histogram(
- :gitlab_view_rendering_duration_seconds,
- 'View rendering time',
- Transaction::BASE_LABELS.merge({ path: nil }),
- [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0]
- )
- end
-
def track(event)
values = values_for(event)
tags = tags_for(event)
- metric_view_rendering_duration_seconds.observe(
- current_transaction.labels.merge(tags),
- event.duration
- )
+ self.class.gitlab_view_rendering_duration_seconds.observe(current_transaction.labels.merge(tags), event.duration)
current_transaction.increment(:view_duration, event.duration)
current_transaction.add_metric(SERIES, values, tags)
diff --git a/lib/gitlab/metrics/subscribers/active_record.rb b/lib/gitlab/metrics/subscribers/active_record.rb
index ead1acb8d44..4b3e8d0a6a0 100644
--- a/lib/gitlab/metrics/subscribers/active_record.rb
+++ b/lib/gitlab/metrics/subscribers/active_record.rb
@@ -3,12 +3,13 @@ module Gitlab
module Subscribers
# Class for tracking the total query duration of a transaction.
class ActiveRecord < ActiveSupport::Subscriber
+ include Gitlab::Metrics::Methods
attach_to :active_record
def sql(event)
return unless current_transaction
- metric_sql_duration_seconds.observe(current_transaction.labels, event.duration / 1000.0)
+ self.class.gitlab_sql_duration_seconds.observe(current_transaction.labels, event.duration / 1000.0)
current_transaction.increment(:sql_duration, event.duration, false)
current_transaction.increment(:sql_count, 1, false)
@@ -16,17 +17,14 @@ module Gitlab
private
- def current_transaction
- Transaction.current
+ define_histogram :gitlab_sql_duration_seconds do
+ docstring 'SQL time'
+ base_labels Transaction::BASE_LABELS
+ buckets [0.001, 0.01, 0.1, 1.0, 10.0]
end
- def metric_sql_duration_seconds
- @metric_sql_duration_seconds ||= Gitlab::Metrics.histogram(
- :gitlab_sql_duration_seconds,
- 'SQL time',
- Transaction::BASE_LABELS,
- [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0]
- )
+ def current_transaction
+ Transaction.current
end
end
end
diff --git a/lib/gitlab/metrics/transaction.rb b/lib/gitlab/metrics/transaction.rb
index e7975c023a9..45b9e14ba55 100644
--- a/lib/gitlab/metrics/transaction.rb
+++ b/lib/gitlab/metrics/transaction.rb
@@ -2,11 +2,12 @@ module Gitlab
module Metrics
# Class for storing metrics information of a single transaction.
class Transaction
+ include Gitlab::Metrics::Methods
+
# base labels shared among all transactions
BASE_LABELS = { controller: nil, action: nil }.freeze
THREAD_KEY = :_gitlab_metrics_transaction
- METRICS_MUTEX = Mutex.new
# The series to store events (e.g. Git pushes) in.
EVENT_SERIES = 'events'.freeze
@@ -54,8 +55,8 @@ module Gitlab
@memory_after = System.memory_usage
@finished_at = System.monotonic_time
- self.class.metric_transaction_duration_seconds.observe(labels, duration)
- self.class.metric_transaction_allocated_memory_bytes.observe(labels, allocated_memory * 1024.0)
+ self.class.gitlab_transaction_duration_seconds.observe(labels, duration)
+ self.class.gitlab_transaction_allocated_memory_bytes.observe(labels, allocated_memory * 1024.0)
Thread.current[THREAD_KEY] = nil
end
@@ -72,7 +73,7 @@ module Gitlab
# event_name - The name of the event (e.g. "git_push").
# tags - A set of tags to attach to the event.
def add_event(event_name, tags = {})
- self.class.metric_event_counter(event_name, tags).increment(tags.merge(labels))
+ self.class.transaction_metric(event_name, :counter, prefix: 'event_', tags: tags).increment(tags.merge(labels))
@metrics << Metric.new(EVENT_SERIES, { count: 1 }, tags.merge(event: event_name), :event)
end
@@ -86,12 +87,12 @@ module Gitlab
end
def increment(name, value, use_prometheus = true)
- self.class.metric_transaction_counter(name).increment(labels, value) if use_prometheus
+ self.class.transaction_metric(name, :counter).increment(labels, value) if use_prometheus
@values[name] += value
end
def set(name, value, use_prometheus = true)
- self.class.metric_transaction_gauge(name).set(labels, value) if use_prometheus
+ self.class.transaction_metric(name, :gauge).set(labels, value) if use_prometheus
@values[name] = value
end
@@ -136,64 +137,28 @@ module Gitlab
"#{labels[:controller]}##{labels[:action]}" if labels && !labels.empty?
end
- def self.metric_transaction_duration_seconds
- return @metric_transaction_duration_seconds if @metric_transaction_duration_seconds
-
- METRICS_MUTEX.synchronize do
- @metric_transaction_duration_seconds ||= Gitlab::Metrics.histogram(
- :gitlab_transaction_duration_seconds,
- 'Transaction duration',
- BASE_LABELS,
- [0.001, 0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.500, 2.0, 10.0]
- )
- end
- end
-
- def self.metric_transaction_allocated_memory_bytes
- return @metric_transaction_allocated_memory_bytes if @metric_transaction_allocated_memory_bytes
-
- METRICS_MUTEX.synchronize do
- @metric_transaction_allocated_memory_bytes ||= Gitlab::Metrics.histogram(
- :gitlab_transaction_allocated_memory_bytes,
- 'Transaction allocated memory bytes',
- BASE_LABELS,
- [1000, 10000, 20000, 500000, 1000000, 2000000, 5000000, 10000000, 20000000, 100000000]
- )
- end
+ define_histogram :gitlab_transaction_duration_seconds do
+ docstring 'Transaction duration'
+ base_labels BASE_LABELS
+ buckets [0.001, 0.01, 0.1, 1.0, 10.0]
end
- def self.metric_event_counter(event_name, tags)
- return @metric_event_counters[event_name] if @metric_event_counters&.has_key?(event_name)
-
- METRICS_MUTEX.synchronize do
- @metric_event_counters ||= {}
- @metric_event_counters[event_name] ||= Gitlab::Metrics.counter(
- "gitlab_transaction_event_#{event_name}_total".to_sym,
- "Transaction event #{event_name} counter",
- tags.merge(BASE_LABELS)
- )
- end
- end
-
- def self.metric_transaction_counter(name)
- return @metric_transaction_counters[name] if @metric_transaction_counters&.has_key?(name)
-
- METRICS_MUTEX.synchronize do
- @metric_transaction_counters ||= {}
- @metric_transaction_counters[name] ||= Gitlab::Metrics.counter(
- "gitlab_transaction_#{name}_total".to_sym, "Transaction #{name} counter", BASE_LABELS
- )
- end
+ define_histogram :gitlab_transaction_allocated_memory_bytes do
+ docstring 'Transaction allocated memory bytes'
+ base_labels BASE_LABELS
+ buckets [100, 1000, 10000, 100000, 1000000, 10000000]
+ with_feature :prometheus_metrics_transaction_allocated_memory
end
- def self.metric_transaction_gauge(name)
- return @metric_transaction_gauges[name] if @metric_transaction_gauges&.has_key?(name)
+ def self.transaction_metric(name, type, prefix: nil, tags: {})
+ metric_name = "gitlab_transaction_#{prefix}#{name}_total".to_sym
+ fetch_metric(type, metric_name) do
+ docstring "Transaction #{prefix}#{name} #{type}"
+ base_labels tags.merge(BASE_LABELS)
- METRICS_MUTEX.synchronize do
- @metric_transaction_gauges ||= {}
- @metric_transaction_gauges[name] ||= Gitlab::Metrics.gauge(
- "gitlab_transaction_#{name}".to_sym, "Transaction gauge #{name}", BASE_LABELS, :livesum
- )
+ if type == :gauge
+ multiprocess_mode :livesum
+ end
end
end
end
diff --git a/lib/gitlab/o_auth/user.rb b/lib/gitlab/o_auth/user.rb
index fff9360ea27..e40a001d20c 100644
--- a/lib/gitlab/o_auth/user.rb
+++ b/lib/gitlab/o_auth/user.rb
@@ -55,7 +55,7 @@ module Gitlab
user ||= find_or_build_ldap_user if auto_link_ldap_user?
user ||= build_new_user if signup_enabled?
- user.external = true if external_provider? && user
+ user.external = true if external_provider? && user&.new_record?
user
end
diff --git a/qa/README.md b/qa/README.md
index 3c1b61900d9..b937dc4c7a0 100644
--- a/qa/README.md
+++ b/qa/README.md
@@ -34,6 +34,9 @@ You can use GitLab QA to exercise tests on any live instance! For example, the
following call would login to a local [GDK] instance and run all specs in
`qa/specs/features`:
+First, `cd` into the `$gdk/gitlab/qa` directory.
+The `bin/qa` script expects you to be in the `qa` folder of the app.
+
```
bin/qa Test::Instance http://localhost:3000
```
diff --git a/spec/features/projects/members/share_with_group_spec.rb b/spec/features/projects/members/share_with_group_spec.rb
index 3198798306c..4cf48098401 100644
--- a/spec/features/projects/members/share_with_group_spec.rb
+++ b/spec/features/projects/members/share_with_group_spec.rb
@@ -122,7 +122,7 @@ feature 'Project > Members > Share with Group', :js do
select2 group.id, from: '#link_group_id'
fill_in 'expires_at_groups', with: (Time.now + 4.5.days).strftime('%Y-%m-%d')
- page.find('body').click
+ click_on 'share-with-group-tab'
find('.btn-create').click
end
diff --git a/spec/javascripts/api_spec.js b/spec/javascripts/api_spec.js
index cc5fa42aafe..cf3a76d0d2e 100644
--- a/spec/javascripts/api_spec.js
+++ b/spec/javascripts/api_spec.js
@@ -1,3 +1,5 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import Api from '~/api';
describe('Api', () => {
@@ -7,20 +9,17 @@ describe('Api', () => {
api_version: dummyApiVersion,
relative_url_root: dummyUrlRoot,
};
- const dummyResponse = 'hello from outer space!';
- const sendDummyResponse = () => {
- const deferred = $.Deferred();
- deferred.resolve(dummyResponse);
- return deferred.promise();
- };
let originalGon;
+ let mock;
beforeEach(() => {
+ mock = new MockAdapter(axios);
originalGon = window.gon;
window.gon = Object.assign({}, dummyGon);
});
afterEach(() => {
+ mock.restore();
window.gon = originalGon;
});
@@ -38,15 +37,13 @@ describe('Api', () => {
describe('group', () => {
it('fetches a group', (done) => {
const groupId = '123456';
- const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}.json`;
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- return sendDummyResponse();
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}`;
+ mock.onGet(expectedUrl).reply(200, {
+ name: 'test',
});
Api.group(groupId, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.name).toBe('test');
done();
});
});
@@ -57,19 +54,13 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups.json`;
- const expectedData = Object.assign({
- search: query,
- per_page: 20,
- }, options);
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.groups(query, options, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
done();
});
});
@@ -79,19 +70,13 @@ describe('Api', () => {
it('fetches namespaces', (done) => {
const query = 'dummy query';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/namespaces.json`;
- const expectedData = {
- search: query,
- per_page: 20,
- };
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.namespaces(query, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
done();
});
});
@@ -103,21 +88,13 @@ describe('Api', () => {
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`;
window.gon.current_user_id = 1;
- const expectedData = Object.assign({
- search: query,
- per_page: 20,
- membership: true,
- simple: true,
- }, options);
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.projects(query, options, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
done();
});
});
@@ -126,20 +103,13 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`;
- const expectedData = Object.assign({
- search: query,
- per_page: 20,
- simple: true,
- }, options);
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.projects(query, options, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
done();
});
});
@@ -154,16 +124,16 @@ describe('Api', () => {
const expectedData = {
label: labelData,
};
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.type).toEqual('POST');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
+ mock.onPost(expectedUrl).reply((config) => {
+ expect(config.data).toBe(JSON.stringify(expectedData));
+
+ return [200, {
+ name: 'test',
+ }];
});
Api.newLabel(namespace, project, labelData, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.name).toBe('test');
done();
});
});
@@ -174,19 +144,13 @@ describe('Api', () => {
const groupId = '123456';
const query = 'dummy query';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`;
- const expectedData = {
- search: query,
- per_page: 20,
- };
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.groupProjects(groupId, query, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response.length).toBe(1);
+ expect(response[0].name).toBe('test');
done();
});
});
@@ -197,14 +161,10 @@ describe('Api', () => {
const licenseKey = "driver's license";
const data = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/licenses/${licenseKey}`;
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.data).toEqual(data);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, 'test');
Api.licenseText(licenseKey, data, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response).toBe('test');
done();
});
});
@@ -214,13 +174,10 @@ describe('Api', () => {
it('fetches a gitignore text', (done) => {
const gitignoreKey = 'ignore git';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitignores/${gitignoreKey}`;
- spyOn(jQuery, 'get').and.callFake((url, callback) => {
- expect(url).toEqual(expectedUrl);
- callback(dummyResponse);
- });
+ mock.onGet(expectedUrl).reply(200, 'test');
Api.gitignoreText(gitignoreKey, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response).toBe('test');
done();
});
});
@@ -230,13 +187,10 @@ describe('Api', () => {
it('fetches a .gitlab-ci.yml', (done) => {
const gitlabCiYmlKey = 'Y CI ML';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/gitlab_ci_ymls/${gitlabCiYmlKey}`;
- spyOn(jQuery, 'get').and.callFake((url, callback) => {
- expect(url).toEqual(expectedUrl);
- callback(dummyResponse);
- });
+ mock.onGet(expectedUrl).reply(200, 'test');
Api.gitlabCiYml(gitlabCiYmlKey, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response).toBe('test');
done();
});
});
@@ -246,13 +200,10 @@ describe('Api', () => {
it('fetches a Dockerfile', (done) => {
const dockerfileYmlKey = 'a giant whale';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/templates/dockerfiles/${dockerfileYmlKey}`;
- spyOn(jQuery, 'get').and.callFake((url, callback) => {
- expect(url).toEqual(expectedUrl);
- callback(dummyResponse);
- });
+ mock.onGet(expectedUrl).reply(200, 'test');
Api.dockerfileYml(dockerfileYmlKey, (response) => {
- expect(response).toBe(dummyResponse);
+ expect(response).toBe('test');
done();
});
});
@@ -265,14 +216,10 @@ describe('Api', () => {
const templateKey = ' template #%?.key ';
const templateType = 'template type';
const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/templates/${templateType}/${encodeURIComponent(templateKey)}`;
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, 'test');
Api.issueTemplate(namespace, project, templateKey, templateType, (error, response) => {
- expect(error).toBe(null);
- expect(response).toBe(dummyResponse);
+ expect(response).toBe('test');
done();
});
});
@@ -283,20 +230,14 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users.json`;
- const expectedData = Object.assign({
- search: query,
- per_page: 20,
- }, options);
- spyOn(jQuery, 'ajax').and.callFake((request) => {
- expect(request.url).toEqual(expectedUrl);
- expect(request.dataType).toEqual('json');
- expect(request.data).toEqual(expectedData);
- return sendDummyResponse();
- });
+ mock.onGet(expectedUrl).reply(200, [{
+ name: 'test',
+ }]);
Api.users(query, options)
- .then((response) => {
- expect(response).toBe(dummyResponse);
+ .then(({ data }) => {
+ expect(data.length).toBe(1);
+ expect(data[0].name).toBe('test');
})
.then(done)
.catch(done.fail);
diff --git a/spec/javascripts/blob/viewer/index_spec.js b/spec/javascripts/blob/viewer/index_spec.js
index cfa6650d85f..892411a6a40 100644
--- a/spec/javascripts/blob/viewer/index_spec.js
+++ b/spec/javascripts/blob/viewer/index_spec.js
@@ -1,28 +1,35 @@
/* eslint-disable no-new */
+import MockAdapter from 'axios-mock-adapter';
import BlobViewer from '~/blob/viewer/index';
+import axios from '~/lib/utils/axios_utils';
describe('Blob viewer', () => {
let blob;
+ let mock;
+
preloadFixtures('snippets/show.html.raw');
beforeEach(() => {
+ mock = new MockAdapter(axios);
+
loadFixtures('snippets/show.html.raw');
$('#modal-upload-blob').remove();
blob = new BlobViewer();
- spyOn($, 'ajax').and.callFake(() => {
- const d = $.Deferred();
-
- d.resolve({
- html: '<div>testing</div>',
- });
+ mock.onGet('http://test.host/snippets/1.json?viewer=rich').reply(200, {
+ html: '<div>testing</div>',
+ });
- return d.promise();
+ mock.onGet('http://test.host/snippets/1.json?viewer=simple').reply(200, {
+ html: '<div>testing</div>',
});
+
+ spyOn(axios, 'get').and.callThrough();
});
afterEach(() => {
+ mock.restore();
location.hash = '';
});
@@ -30,7 +37,6 @@ describe('Blob viewer', () => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => {
- expect($.ajax).toHaveBeenCalled();
expect(
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
.classList.contains('hidden'),
@@ -46,7 +52,6 @@ describe('Blob viewer', () => {
new BlobViewer();
setTimeout(() => {
- expect($.ajax).toHaveBeenCalled();
expect(
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
.classList.contains('hidden'),
@@ -64,12 +69,8 @@ describe('Blob viewer', () => {
});
asyncClick()
+ .then(() => asyncClick())
.then(() => {
- expect($.ajax).toHaveBeenCalled();
- return asyncClick();
- })
- .then(() => {
- expect($.ajax.calls.count()).toBe(1);
expect(
document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'),
).toBe('true');
@@ -122,7 +123,6 @@ describe('Blob viewer', () => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => {
- expect($.ajax).toHaveBeenCalled();
expect(
copyButton.classList.contains('disabled'),
).toBeFalsy();
@@ -135,8 +135,6 @@ describe('Blob viewer', () => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => {
- expect($.ajax).toHaveBeenCalled();
-
expect(
copyButton.getAttribute('data-original-title'),
).toBe('Copy source to clipboard');
@@ -171,14 +169,14 @@ describe('Blob viewer', () => {
it('sends AJAX request when switching to simple view', () => {
blob.switchToViewer('simple');
- expect($.ajax).toHaveBeenCalled();
+ expect(axios.get).toHaveBeenCalled();
});
it('does not send AJAX request when switching to rich view', () => {
blob.switchToViewer('simple');
blob.switchToViewer('rich');
- expect($.ajax.calls.count()).toBe(1);
+ expect(axios.get.calls.count()).toBe(1);
});
});
});
diff --git a/spec/javascripts/commits_spec.js b/spec/javascripts/commits_spec.js
index d0176520440..44ec9e4eabf 100644
--- a/spec/javascripts/commits_spec.js
+++ b/spec/javascripts/commits_spec.js
@@ -1,4 +1,6 @@
import 'vendor/jquery.endless-scroll';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import CommitsList from '~/commits';
describe('Commits List', () => {
@@ -43,30 +45,47 @@ describe('Commits List', () => {
describe('on entering input', () => {
let ajaxSpy;
+ let mock;
beforeEach(() => {
CommitsList.init(25);
CommitsList.searchField.val('');
spyOn(history, 'replaceState').and.stub();
- ajaxSpy = spyOn(jQuery, 'ajax').and.callFake((req) => {
- req.success({
- data: '<li>Result</li>',
- });
+ mock = new MockAdapter(axios);
+
+ mock.onGet('/h5bp/html5-boilerplate/commits/master').reply(200, {
+ html: '<li>Result</li>',
});
+
+ ajaxSpy = spyOn(axios, 'get').and.callThrough();
+ });
+
+ afterEach(() => {
+ mock.restore();
});
- it('should save the last search string', () => {
+ it('should save the last search string', (done) => {
CommitsList.searchField.val('GitLab');
- CommitsList.filterResults();
- expect(ajaxSpy).toHaveBeenCalled();
- expect(CommitsList.lastSearch).toEqual('GitLab');
+ CommitsList.filterResults()
+ .then(() => {
+ expect(ajaxSpy).toHaveBeenCalled();
+ expect(CommitsList.lastSearch).toEqual('GitLab');
+
+ done();
+ })
+ .catch(done.fail);
});
- it('should not make ajax call if the input does not change', () => {
- CommitsList.filterResults();
- expect(ajaxSpy).not.toHaveBeenCalled();
- expect(CommitsList.lastSearch).toEqual('');
+ it('should not make ajax call if the input does not change', (done) => {
+ CommitsList.filterResults()
+ .then(() => {
+ expect(ajaxSpy).not.toHaveBeenCalled();
+ expect(CommitsList.lastSearch).toEqual('');
+
+ done();
+ })
+ .catch(done.fail);
});
});
});
diff --git a/spec/javascripts/lib/utils/users_cache_spec.js b/spec/javascripts/lib/utils/users_cache_spec.js
index ec6ea35952b..50371c8c5f6 100644
--- a/spec/javascripts/lib/utils/users_cache_spec.js
+++ b/spec/javascripts/lib/utils/users_cache_spec.js
@@ -92,7 +92,9 @@ describe('UsersCache', () => {
apiSpy = (query, options) => {
expect(query).toBe('');
expect(options).toEqual({ username: dummyUsername });
- return Promise.resolve([dummyUser]);
+ return Promise.resolve({
+ data: [dummyUser],
+ });
};
UsersCache.retrieve(dummyUsername)
diff --git a/spec/javascripts/repo/components/new_dropdown/modal_spec.js b/spec/javascripts/repo/components/new_dropdown/modal_spec.js
index 233cca06ed0..8bbc3100357 100644
--- a/spec/javascripts/repo/components/new_dropdown/modal_spec.js
+++ b/spec/javascripts/repo/components/new_dropdown/modal_spec.js
@@ -18,8 +18,10 @@ describe('new file modal component', () => {
}));
spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
- commit: {
- id: '123branch',
+ data: {
+ commit: {
+ id: '123branch',
+ },
},
}));
diff --git a/spec/javascripts/repo/components/new_dropdown/upload_spec.js b/spec/javascripts/repo/components/new_dropdown/upload_spec.js
index 788c08e5279..667112ab21a 100644
--- a/spec/javascripts/repo/components/new_dropdown/upload_spec.js
+++ b/spec/javascripts/repo/components/new_dropdown/upload_spec.js
@@ -17,8 +17,10 @@ describe('new dropdown upload', () => {
}));
spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
- commit: {
- id: '123branch',
+ data: {
+ commit: {
+ id: '123branch',
+ },
},
}));
diff --git a/spec/javascripts/repo/components/repo_commit_section_spec.js b/spec/javascripts/repo/components/repo_commit_section_spec.js
index 676ac09f2c9..93e94b4f24c 100644
--- a/spec/javascripts/repo/components/repo_commit_section_spec.js
+++ b/spec/javascripts/repo/components/repo_commit_section_spec.js
@@ -87,8 +87,10 @@ describe('RepoCommitSection', () => {
changedFiles = JSON.parse(JSON.stringify(vm.$store.getters.changedFiles));
spyOn(service, 'commit').and.returnValue(Promise.resolve({
- short_id: '1',
- stats: {},
+ data: {
+ short_id: '1',
+ stats: {},
+ },
}));
});
diff --git a/spec/javascripts/repo/stores/actions_spec.js b/spec/javascripts/repo/stores/actions_spec.js
index 8d830c67290..f678967b092 100644
--- a/spec/javascripts/repo/stores/actions_spec.js
+++ b/spec/javascripts/repo/stores/actions_spec.js
@@ -178,7 +178,9 @@ describe('Multi-file store actions', () => {
it('calls service', (done) => {
spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
- commit: { id: '123' },
+ data: {
+ commit: { id: '123' },
+ },
}));
store.dispatch('checkCommitStatus')
@@ -192,7 +194,9 @@ describe('Multi-file store actions', () => {
it('returns true if current ref does not equal returned ID', (done) => {
spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
- commit: { id: '123' },
+ data: {
+ commit: { id: '123' },
+ },
}));
store.dispatch('checkCommitStatus')
@@ -206,7 +210,9 @@ describe('Multi-file store actions', () => {
it('returns false if current ref equals returned ID', (done) => {
spyOn(service, 'getBranchData').and.returnValue(Promise.resolve({
- commit: { id: '1' },
+ data: {
+ commit: { id: '1' },
+ },
}));
store.dispatch('checkCommitStatus')
@@ -250,13 +256,15 @@ describe('Multi-file store actions', () => {
describe('success', () => {
beforeEach(() => {
spyOn(service, 'commit').and.returnValue(Promise.resolve({
- id: '123456',
- short_id: '123',
- message: 'test message',
- committed_date: 'date',
- stats: {
- additions: '1',
- deletions: '2',
+ data: {
+ id: '123456',
+ short_id: '123',
+ message: 'test message',
+ committed_date: 'date',
+ stats: {
+ additions: '1',
+ deletions: '2',
+ },
},
}));
});
@@ -324,7 +332,9 @@ describe('Multi-file store actions', () => {
describe('failed', () => {
beforeEach(() => {
spyOn(service, 'commit').and.returnValue(Promise.resolve({
- message: 'failed message',
+ data: {
+ message: 'failed message',
+ },
}));
});
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index ac7c0270916..bf01e6ef8e8 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -562,35 +562,39 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
describe '#delete_refs' do
- before(:all) do
- @repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
- end
+ shared_examples 'deleting refs' do
+ let(:repo) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
- it 'deletes the ref' do
- @repo.delete_refs('refs/heads/feature')
+ after do
+ ensure_seeds
+ end
- expect(@repo.rugged.references['refs/heads/feature']).to be_nil
- end
+ it 'deletes the ref' do
+ repo.delete_refs('refs/heads/feature')
- it 'deletes all refs' do
- refs = %w[refs/heads/wip refs/tags/v1.1.0]
- @repo.delete_refs(*refs)
+ expect(repo.rugged.references['refs/heads/feature']).to be_nil
+ end
- refs.each do |ref|
- expect(@repo.rugged.references[ref]).to be_nil
+ it 'deletes all refs' do
+ refs = %w[refs/heads/wip refs/tags/v1.1.0]
+ repo.delete_refs(*refs)
+
+ refs.each do |ref|
+ expect(repo.rugged.references[ref]).to be_nil
+ end
end
- end
- it 'raises an error if it failed' do
- expect(@repo).to receive(:popen).and_return(['Error', 1])
+ it 'raises an error if it failed' do
+ expect { repo.delete_refs('refs\heads\fix') }.to raise_error(Gitlab::Git::Repository::GitError)
+ end
+ end
- expect do
- @repo.delete_refs('refs/heads/fix')
- end.to raise_error(Gitlab::Git::Repository::GitError)
+ context 'when Gitaly delete_refs feature is enabled' do
+ it_behaves_like 'deleting refs'
end
- after(:all) do
- ensure_seeds
+ context 'when Gitaly delete_refs feature is disabled', :disable_gitaly do
+ it_behaves_like 'deleting refs'
end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 951e146a30a..257e4c50f2d 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -112,7 +112,7 @@ describe Gitlab::GitalyClient::RefService do
expect_any_instance_of(Gitaly::RefService::Stub)
.to receive(:delete_refs)
.with(gitaly_request_with_params(except_with_prefix: prefixes), kind_of(Hash))
- .and_return(double('delete_refs_response'))
+ .and_return(double('delete_refs_response', git_error: ""))
client.delete_refs(except_with_prefixes: prefixes)
end
diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb
index 41a9d1d9c90..d9379cfe674 100644
--- a/spec/lib/gitlab/metrics/method_call_spec.rb
+++ b/spec/lib/gitlab/metrics/method_call_spec.rb
@@ -5,6 +5,10 @@ describe Gitlab::Metrics::MethodCall do
let(:method_call) { described_class.new('Foo#bar', :Foo, '#bar', transaction) }
describe '#measure' do
+ after do
+ described_class.reload_metric!(:gitlab_method_call_duration_seconds)
+ end
+
it 'measures the performance of the supplied block' do
method_call.measure { 'foo' }
@@ -20,8 +24,6 @@ describe Gitlab::Metrics::MethodCall do
context 'prometheus instrumentation is enabled' do
before do
- allow(Feature.get(:prometheus_metrics_method_instrumentation)).to receive(:enabled?).and_call_original
- described_class.measurement_enabled_cache_expires_at.value = Time.now.to_i - 1
Feature.get(:prometheus_metrics_method_instrumentation).enable
end
@@ -31,30 +33,12 @@ describe Gitlab::Metrics::MethodCall do
end
end
- it 'caches subsequent invocations of feature check' do
- 10.times do
- method_call.measure { 'foo' }
- end
-
- expect(Feature.get(:prometheus_metrics_method_instrumentation)).to have_received(:enabled?).once
- end
-
- it 'expires feature check cache after 1 minute' do
- method_call.measure { 'foo' }
-
- Timecop.travel(1.minute.from_now) do
- method_call.measure { 'foo' }
- end
-
- Timecop.travel(1.minute.from_now + 1.second) do
- method_call.measure { 'foo' }
- end
-
- expect(Feature.get(:prometheus_metrics_method_instrumentation)).to have_received(:enabled?).twice
+ it 'metric is not a NullMetric' do
+ expect(described_class).not_to be_instance_of(Gitlab::Metrics::NullMetric)
end
it 'observes the performance of the supplied block' do
- expect(described_class.call_duration_histogram)
+ expect(described_class.gitlab_method_call_duration_seconds)
.to receive(:observe)
.with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric))
@@ -64,14 +48,12 @@ describe Gitlab::Metrics::MethodCall do
context 'prometheus instrumentation is disabled' do
before do
- described_class.measurement_enabled_cache_expires_at.value = Time.now.to_i - 1
-
Feature.get(:prometheus_metrics_method_instrumentation).disable
end
- it 'does not observe the performance' do
- expect(described_class.call_duration_histogram)
- .not_to receive(:observe)
+ it 'observes using NullMetric' do
+ expect(described_class.gitlab_method_call_duration_seconds).to be_instance_of(Gitlab::Metrics::NullMetric)
+ expect(described_class.gitlab_method_call_duration_seconds).to receive(:observe)
method_call.measure { 'foo' }
end
@@ -81,12 +63,10 @@ describe Gitlab::Metrics::MethodCall do
context 'when measurement is below threshold' do
before do
allow(method_call).to receive(:above_threshold?).and_return(false)
-
- Feature.get(:prometheus_metrics_method_instrumentation).enable
end
it 'does not observe the performance' do
- expect(described_class.call_duration_histogram)
+ expect(described_class.gitlab_method_call_duration_seconds)
.not_to receive(:observe)
method_call.measure { 'foo' }
@@ -96,7 +76,7 @@ describe Gitlab::Metrics::MethodCall do
describe '#to_metric' do
it 'returns a Metric instance' do
- expect(method_call).to receive(:real_time).and_return(4.0001)
+ expect(method_call).to receive(:real_time).and_return(4.0001).twice
expect(method_call).to receive(:cpu_time).and_return(3.0001)
method_call.measure { 'foo' }
diff --git a/spec/lib/gitlab/metrics/methods_spec.rb b/spec/lib/gitlab/metrics/methods_spec.rb
new file mode 100644
index 00000000000..9d41ed2442b
--- /dev/null
+++ b/spec/lib/gitlab/metrics/methods_spec.rb
@@ -0,0 +1,137 @@
+require 'spec_helper'
+
+describe Gitlab::Metrics::Methods do
+ subject { Class.new { include Gitlab::Metrics::Methods } }
+
+ shared_context 'metric' do |metric_type, *args|
+ let(:docstring) { 'description' }
+ let(:metric_name) { :sample_metric }
+
+ describe "#define_#{metric_type}" do
+ define_method(:call_define_metric_method) do |**args|
+ subject.__send__("define_#{metric_type}", metric_name, **args)
+ end
+
+ context 'metrics access method not defined' do
+ it "defines metrics accessing method" do
+ expect(subject).not_to respond_to(metric_name)
+
+ call_define_metric_method(docstring: docstring)
+
+ expect(subject).to respond_to(metric_name)
+ end
+ end
+
+ context 'metrics access method defined' do
+ before do
+ call_define_metric_method(docstring: docstring)
+ end
+
+ it 'raises error when trying to redefine method' do
+ expect { call_define_metric_method(docstring: docstring) }.to raise_error(ArgumentError)
+ end
+
+ context 'metric is not cached' do
+ it 'calls fetch_metric' do
+ expect(subject).to receive(:init_metric).with(metric_type, metric_name, docstring: docstring)
+
+ subject.public_send(metric_name)
+ end
+ end
+
+ context 'metric is cached' do
+ before do
+ subject.public_send(metric_name)
+ end
+
+ it 'returns cached metric' do
+ expect(subject).not_to receive(:init_metric)
+
+ subject.public_send(metric_name)
+ end
+ end
+ end
+ end
+
+ describe "#fetch_#{metric_type}" do
+ let(:null_metric) { Gitlab::Metrics::NullMetric.instance }
+
+ define_method(:call_fetch_metric_method) do |**args|
+ subject.__send__("fetch_#{metric_type}", metric_name, **args)
+ end
+
+ context "when #{metric_type} is not cached" do
+ it 'initializes counter metric' do
+ allow(Gitlab::Metrics).to receive(metric_type).and_return(null_metric)
+
+ call_fetch_metric_method(docstring: docstring)
+
+ expect(Gitlab::Metrics).to have_received(metric_type).with(metric_name, docstring, *args)
+ end
+ end
+
+ context "when #{metric_type} is cached" do
+ before do
+ call_fetch_metric_method(docstring: docstring)
+ end
+
+ it 'uses class metric cache' do
+ expect(Gitlab::Metrics).not_to receive(metric_type)
+
+ call_fetch_metric_method(docstring: docstring)
+ end
+
+ context 'when metric is reloaded' do
+ before do
+ subject.reload_metric!(metric_name)
+ end
+
+ it "initializes #{metric_type} metric" do
+ allow(Gitlab::Metrics).to receive(metric_type).and_return(null_metric)
+
+ call_fetch_metric_method(docstring: docstring)
+
+ expect(Gitlab::Metrics).to have_received(metric_type).with(metric_name, docstring, *args)
+ end
+ end
+ end
+
+ context 'when metric is configured with feature' do
+ let(:feature_name) { :some_metric_feature }
+ let(:metric) { call_fetch_metric_method(docstring: docstring, with_feature: feature_name) }
+
+ context 'when feature is enabled' do
+ before do
+ Feature.get(feature_name).enable
+ end
+
+ it "initializes #{metric_type} metric" do
+ allow(Gitlab::Metrics).to receive(metric_type).and_return(null_metric)
+
+ metric
+
+ expect(Gitlab::Metrics).to have_received(metric_type).with(metric_name, docstring, *args)
+ end
+ end
+
+ context 'when feature is disabled' do
+ before do
+ Feature.get(feature_name).disable
+ end
+
+ it "returns NullMetric" do
+ allow(Gitlab::Metrics).to receive(metric_type)
+
+ expect(metric).to be_instance_of(Gitlab::Metrics::NullMetric)
+
+ expect(Gitlab::Metrics).not_to have_received(metric_type)
+ end
+ end
+ end
+ end
+ end
+
+ include_examples 'metric', :counter, {}
+ include_examples 'metric', :gauge, {}, :all
+ include_examples 'metric', :histogram, {}, [0.005, 0.01, 0.1, 1, 10]
+end
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
index 375cbf8a9ca..54781dd52fc 100644
--- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -2,6 +2,11 @@ require 'spec_helper'
describe Gitlab::Metrics::Samplers::RubySampler do
let(:sampler) { described_class.new(5) }
+ let(:null_metric) { double('null_metric', set: nil, observe: nil) }
+
+ before do
+ allow(Gitlab::Metrics::NullMetric).to receive(:instance).and_return(null_metric)
+ end
after do
Allocations.stop if Gitlab::Metrics.mri?
@@ -17,12 +22,9 @@ describe Gitlab::Metrics::Samplers::RubySampler do
end
it 'adds a metric containing the memory usage' do
- expect(Gitlab::Metrics::System).to receive(:memory_usage)
- .and_return(9000)
+ expect(Gitlab::Metrics::System).to receive(:memory_usage).and_return(9000)
- expect(sampler.metrics[:memory_usage]).to receive(:set)
- .with({}, 9000)
- .and_call_original
+ expect(sampler.metrics[:memory_usage]).to receive(:set).with({}, 9000)
sampler.sample
end
@@ -31,9 +33,7 @@ describe Gitlab::Metrics::Samplers::RubySampler do
expect(Gitlab::Metrics::System).to receive(:file_descriptor_count)
.and_return(4)
- expect(sampler.metrics[:file_descriptors]).to receive(:set)
- .with({}, 4)
- .and_call_original
+ expect(sampler.metrics[:file_descriptors]).to receive(:set).with({}, 4)
sampler.sample
end
@@ -49,16 +49,14 @@ describe Gitlab::Metrics::Samplers::RubySampler do
it 'adds a metric containing garbage collection time statistics' do
expect(GC::Profiler).to receive(:total_time).and_return(0.24)
- expect(sampler.metrics[:total_time]).to receive(:set)
- .with({}, 240)
- .and_call_original
+ expect(sampler.metrics[:total_time]).to receive(:set).with({}, 240)
sampler.sample
end
it 'adds a metric containing garbage collection statistics' do
GC.stat.keys.each do |key|
- expect(sampler.metrics[key]).to receive(:set).with({}, anything).and_call_original
+ expect(sampler.metrics[key]).to receive(:set).with({}, anything)
end
sampler.sample
diff --git a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
index eca75a4fac1..9f3af1acef7 100644
--- a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
@@ -32,7 +32,7 @@ describe Gitlab::Metrics::Subscribers::ActionView do
end
it 'observes view rendering time' do
- expect(subscriber.send(:metric_view_rendering_duration_seconds))
+ expect(described_class.gitlab_view_rendering_duration_seconds)
.to receive(:observe)
.with({ view: 'app/views/x.html.haml' }, 2.1)
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index 9b3698fb4a8..4e7bd433a9c 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -25,7 +25,7 @@ describe Gitlab::Metrics::Subscribers::ActiveRecord do
expect(subscriber).to receive(:current_transaction)
.at_least(:once)
.and_return(transaction)
- expect(subscriber.send(:metric_sql_duration_seconds)).to receive(:observe).with({}, 0.002)
+ expect(described_class.send(:gitlab_sql_duration_seconds)).to receive(:observe).with({}, 0.002)
subscriber.sql(event)
end
diff --git a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
index 58e28592cf9..6795c1ab56b 100644
--- a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
@@ -144,7 +144,10 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
end
context 'with a transaction' do
+ let(:metric_cache_misses_total) { double('metric_cache_misses_total', increment: nil) }
+
before do
+ allow(subscriber).to receive(:metric_cache_misses_total).and_return(metric_cache_misses_total)
allow(subscriber).to receive(:current_transaction)
.and_return(transaction)
end
@@ -157,9 +160,9 @@ describe Gitlab::Metrics::Subscribers::RailsCache do
end
it 'increments the cache_read_miss total' do
- expect(subscriber.send(:metric_cache_misses_total)).to receive(:increment).with({})
-
subscriber.cache_generate(event)
+
+ expect(metric_cache_misses_total).to have_received(:increment).with({})
end
end
end
diff --git a/spec/lib/gitlab/metrics_spec.rb b/spec/lib/gitlab/metrics_spec.rb
index 1619fbd88b1..9e405e9f736 100644
--- a/spec/lib/gitlab/metrics_spec.rb
+++ b/spec/lib/gitlab/metrics_spec.rb
@@ -20,7 +20,7 @@ describe Gitlab::Metrics do
context 'prometheus metrics enabled in config' do
before do
- allow(described_class).to receive(:current_application_settings).and_return(prometheus_metrics_enabled: true)
+ allow(Gitlab::CurrentSettings).to receive(:current_application_settings).and_return(prometheus_metrics_enabled: true)
end
context 'when metrics folder is present' do
diff --git a/spec/lib/gitlab/o_auth/user_spec.rb b/spec/lib/gitlab/o_auth/user_spec.rb
index 45fff4c5787..03e0a9e2a03 100644
--- a/spec/lib/gitlab/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/o_auth/user_spec.rb
@@ -44,6 +44,18 @@ describe Gitlab::OAuth::User do
let(:provider) { 'twitter' }
+ describe 'when account exists on server' do
+ it 'does not mark the user as external' do
+ create(:omniauth_user, extern_uid: 'my-uid', provider: provider)
+ stub_omniauth_config(allow_single_sign_on: [provider], external_providers: [provider])
+
+ oauth_user.save
+
+ expect(gl_user).to be_valid
+ expect(gl_user.external).to be_falsey
+ end
+ end
+
describe 'signup' do
context 'when signup is disabled' do
before do
@@ -51,7 +63,7 @@ describe Gitlab::OAuth::User do
end
it 'creates the user' do
- stub_omniauth_config(allow_single_sign_on: ['twitter'])
+ stub_omniauth_config(allow_single_sign_on: [provider])
oauth_user.save
@@ -65,7 +77,7 @@ describe Gitlab::OAuth::User do
end
it 'creates and confirms the user anyway' do
- stub_omniauth_config(allow_single_sign_on: ['twitter'])
+ stub_omniauth_config(allow_single_sign_on: [provider])
oauth_user.save
@@ -75,7 +87,7 @@ describe Gitlab::OAuth::User do
end
it 'marks user as having password_automatically_set' do
- stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['twitter'])
+ stub_omniauth_config(allow_single_sign_on: [provider], external_providers: [provider])
oauth_user.save
@@ -86,7 +98,7 @@ describe Gitlab::OAuth::User do
shared_examples 'to verify compliance with allow_single_sign_on' do
context 'provider is marked as external' do
it 'marks user as external' do
- stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['twitter'])
+ stub_omniauth_config(allow_single_sign_on: [provider], external_providers: [provider])
oauth_user.save
expect(gl_user).to be_valid
expect(gl_user.external).to be_truthy
@@ -95,8 +107,8 @@ describe Gitlab::OAuth::User do
context 'provider was external, now has been removed' do
it 'does not mark external user as internal' do
- create(:omniauth_user, extern_uid: 'my-uid', provider: 'twitter', external: true)
- stub_omniauth_config(allow_single_sign_on: ['twitter'], external_providers: ['facebook'])
+ create(:omniauth_user, extern_uid: 'my-uid', provider: provider, external: true)
+ stub_omniauth_config(allow_single_sign_on: [provider], external_providers: ['facebook'])
oauth_user.save
expect(gl_user).to be_valid
expect(gl_user.external).to be_truthy
@@ -118,7 +130,7 @@ describe Gitlab::OAuth::User do
context 'with new allow_single_sign_on enabled syntax' do
before do
- stub_omniauth_config(allow_single_sign_on: ['twitter'])
+ stub_omniauth_config(allow_single_sign_on: [provider])
end
it "creates a user from Omniauth" do
@@ -127,7 +139,7 @@ describe Gitlab::OAuth::User do
expect(gl_user).to be_valid
identity = gl_user.identities.first
expect(identity.extern_uid).to eql uid
- expect(identity.provider).to eql 'twitter'
+ expect(identity.provider).to eql provider
end
end
@@ -142,7 +154,7 @@ describe Gitlab::OAuth::User do
expect(gl_user).to be_valid
identity = gl_user.identities.first
expect(identity.extern_uid).to eql uid
- expect(identity.provider).to eql 'twitter'
+ expect(identity.provider).to eql provider
end
end
diff --git a/spec/models/concerns/discussion_on_diff_spec.rb b/spec/models/concerns/discussion_on_diff_spec.rb
index 2322eb206fb..30572ce9332 100644
--- a/spec/models/concerns/discussion_on_diff_spec.rb
+++ b/spec/models/concerns/discussion_on_diff_spec.rb
@@ -20,6 +20,16 @@ describe DiscussionOnDiff do
expect(truncated_lines).not_to include(be_meta)
end
end
+
+ context "when the diff line does not exist on a legacy diff note" do
+ it "returns an empty array" do
+ legacy_note = LegacyDiffNote.new
+
+ allow(subject).to receive(:first_note).and_return(legacy_note)
+
+ expect(truncated_lines).to eq([])
+ end
+ end
end
describe '#line_code_in_diffs' do
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index cc9d79da708..9840afe6c4e 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -387,13 +387,23 @@ describe WikiPage do
end
describe '#formatted_content' do
- it 'returns processed content of the page', :disable_gitaly do
- subject.create({ title: "RDoc", content: "*bold*", format: "rdoc" })
- page = wiki.find_page('RDoc')
+ shared_examples 'fetching page formatted content' do
+ it 'returns processed content of the page' do
+ subject.create({ title: "RDoc", content: "*bold*", format: "rdoc" })
+ page = wiki.find_page('RDoc')
- expect(page.formatted_content).to eq("\n<p><strong>bold</strong></p>\n")
+ expect(page.formatted_content).to eq("\n<p><strong>bold</strong></p>\n")
- destroy_page('RDoc')
+ destroy_page('RDoc')
+ end
+ end
+
+ context 'when Gitaly wiki_page_formatted_data is enabled' do
+ it_behaves_like 'fetching page formatted content'
+ end
+
+ context 'when Gitaly wiki_page_formatted_data is disabled', :disable_gitaly do
+ it_behaves_like 'fetching page formatted content'
end
end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 4dd8deb6404..f8d0b63afec 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -300,44 +300,53 @@ describe API::Jobs do
end
describe 'GET /projects/:id/jobs/:job_id/artifacts' do
- before do
- get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ shared_examples 'downloads artifact' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
+
+ it 'returns specific job artifacts' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.headers).to include(download_headers)
+ expect(response.body).to match_file(job.artifacts_file.file.file)
+ end
end
- context 'job with artifacts' do
- let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ context 'normal authentication' do
+ context 'job with artifacts' do
+ context 'when artifacts are stored locally' do
+ let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
- context 'authorized user' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
- end
+ before do
+ get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ end
- it 'returns specific job artifacts' do
- expect(response).to have_gitlab_http_status(200)
- expect(response.headers).to include(download_headers)
- expect(response.body).to match_file(job.artifacts_file.file.file)
+ context 'authorized user' do
+ it_behaves_like 'downloads artifact'
+ end
+
+ context 'unauthorized user' do
+ let(:api_user) { nil }
+
+ it 'does not return specific job artifacts' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
end
- end
- context 'when anonymous user is accessing private artifacts' do
- let(:api_user) { nil }
+ it 'does not return job artifacts if not uploaded' do
+ get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
- it 'hides artifacts and rejects request' do
- expect(project).to be_private
expect(response).to have_gitlab_http_status(404)
end
end
end
-
- it 'does not return job artifacts if not uploaded' do
- expect(response).to have_gitlab_http_status(404)
- end
end
describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do
let(:api_user) { reporter }
- let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
job.success
@@ -396,14 +405,16 @@ describe API::Jobs do
context 'find proper job' do
shared_examples 'a valid file' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' =>
- "attachment; filename=#{job.artifacts_file.filename}" }
- end
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' =>
+ "attachment; filename=#{job.artifacts_file.filename}" }
+ end
- it { expect(response).to have_gitlab_http_status(200) }
- it { expect(response.headers).to include(download_headers) }
+ it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response.headers).to include(download_headers) }
+ end
end
context 'with regular branch' do
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index af9e36a3b29..3f92288fef0 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -4,16 +4,18 @@ describe API::V3::Builds do
set(:user) { create(:user) }
let(:api_user) { user }
set(:project) { create(:project, :repository, creator: user, public_builds: false) }
- set(:developer) { create(:project_member, :developer, user: user, project: project) }
- set(:reporter) { create(:project_member, :reporter, project: project) }
- set(:guest) { create(:project_member, :guest, project: project) }
- set(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
- let!(:build) { create(:ci_build, pipeline: pipeline) }
+ let!(:developer) { create(:project_member, :developer, user: user, project: project) }
+ let(:reporter) { create(:project_member, :reporter, project: project) }
+ let(:guest) { create(:project_member, :guest, project: project) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
describe 'GET /projects/:id/builds ' do
let(:query) { '' }
before do |example|
+ build
+
create(:ci_build, :skipped, pipeline: pipeline)
unless example.metadata[:skip_before_request]
@@ -110,6 +112,10 @@ describe API::V3::Builds do
end
describe 'GET /projects/:id/repository/commits/:sha/builds' do
+ before do
+ build
+ end
+
context 'when commit does not exist in repository' do
before do
get v3_api("/projects/#{project.id}/repository/commits/1a271fd1/builds", api_user)
@@ -214,18 +220,20 @@ describe API::V3::Builds do
end
context 'job with artifacts' do
- let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ context 'when artifacts are stored locally' do
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
- context 'authorized user' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
- end
+ context 'authorized user' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
- it 'returns specific job artifacts' do
- expect(response).to have_gitlab_http_status(200)
- expect(response.headers).to include(download_headers)
- expect(response.body).to match_file(build.artifacts_file.file.file)
+ it 'returns specific job artifacts' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.headers).to include(download_headers)
+ expect(response.body).to match_file(build.artifacts_file.file.file)
+ end
end
end
@@ -303,14 +311,16 @@ describe API::V3::Builds do
context 'find proper job' do
shared_examples 'a valid file' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' =>
- "attachment; filename=#{build.artifacts_file.filename}" }
- end
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' =>
+ "attachment; filename=#{build.artifacts_file.filename}" }
+ end
- it { expect(response).to have_gitlab_http_status(200) }
- it { expect(response.headers).to include(download_headers) }
+ it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response.headers).to include(download_headers) }
+ end
end
context 'with regular branch' do