summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb4
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb4
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js42
-rw-r--r--spec/frontend/monitoring/charts/time_series_spec.js5
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js103
-rw-r--r--spec/frontend/monitoring/store/getters_spec.js12
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js149
-rw-r--r--spec/javascripts/monitoring/components/dashboard_spec.js11
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/highlight_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/gpg_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/shared_spec.rb21
-rw-r--r--spec/lib/gitlab/import_export/version_checker_spec.rb12
-rw-r--r--spec/lib/gitlab/sentry_spec.rb119
-rw-r--r--spec/models/ci/build_spec.rb2
-rw-r--r--spec/models/clusters/cluster_spec.rb4
-rw-r--r--spec/models/concerns/group_descendant_spec.rb4
-rw-r--r--spec/models/upload_spec.rb3
-rw-r--r--spec/requests/api/graphql_spec.rb4
-rw-r--r--spec/requests/api/helpers_spec.rb4
-rw-r--r--spec/services/ci/archive_trace_service_spec.rb4
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/ci/prepare_build_service_spec.rb4
-rw-r--r--spec/services/ci/register_job_service_spec.rb8
-rw-r--r--spec/support/shared_examples/services/base_helm_service_shared_examples.rb14
-rw-r--r--spec/workers/ci/archive_traces_cron_worker_spec.rb2
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb4
-rw-r--r--spec/workers/stuck_ci_jobs_worker_spec.rb4
30 files changed, 404 insertions, 164 deletions
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index e4e2487f1e1..68b73150b31 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1159,7 +1159,7 @@ describe Projects::IssuesController do
end
it "prevents deletion if destroy_confirm is not set" do
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
+ expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
@@ -1168,7 +1168,7 @@ describe Projects::IssuesController do
end
it "prevents deletion in JSON format if destroy_confirm is not set" do
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
+ expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: issue.iid, format: 'json' }
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index ef5f286502b..4b1304a9103 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -620,7 +620,7 @@ describe Projects::MergeRequestsController do
end
it "prevents deletion if destroy_confirm is not set" do
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
+ expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid }
@@ -629,7 +629,7 @@ describe Projects::MergeRequestsController do
end
it "prevents deletion in JSON format if destroy_confirm is not set" do
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
+ expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid, format: 'json' }
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 97f7f05cd85..0e818d6c402 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -323,3 +323,45 @@ describe('URL utility', () => {
});
});
});
+
+describe('setUrlParams', () => {
+ it('adds new params as query string', () => {
+ const url = 'https://gitlab.com/test';
+
+ expect(
+ urlUtils.setUrlParams({ group_id: 'gitlab-org', project_id: 'my-project' }, url),
+ ).toEqual('https://gitlab.com/test?group_id=gitlab-org&project_id=my-project');
+ });
+
+ it('updates an existing parameter', () => {
+ const url = 'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project';
+
+ expect(urlUtils.setUrlParams({ project_id: 'gitlab-test' }, url)).toEqual(
+ 'https://gitlab.com/test?group_id=gitlab-org&project_id=gitlab-test',
+ );
+ });
+
+ it("removes the project_id param when it's value is null", () => {
+ const url = 'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project';
+
+ expect(urlUtils.setUrlParams({ project_id: null }, url)).toEqual(
+ 'https://gitlab.com/test?group_id=gitlab-org',
+ );
+ });
+
+ it('handles arrays properly', () => {
+ const url = 'https://gitlab.com/test';
+
+ expect(urlUtils.setUrlParams({ label_name: ['foo', 'bar'] }, url)).toEqual(
+ 'https://gitlab.com/test?label_name=foo&label_name=bar',
+ );
+ });
+
+ it('removes all existing URL params and sets a new param when cleanParams=true', () => {
+ const url = 'https://gitlab.com/test?group_id=gitlab-org&project_id=my-project';
+
+ expect(urlUtils.setUrlParams({ foo: 'bar' }, url, true)).toEqual(
+ 'https://gitlab.com/test?foo=bar',
+ );
+ });
+});
diff --git a/spec/frontend/monitoring/charts/time_series_spec.js b/spec/frontend/monitoring/charts/time_series_spec.js
index 15967992374..128c4bc49f1 100644
--- a/spec/frontend/monitoring/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/charts/time_series_spec.js
@@ -46,7 +46,10 @@ describe('Time series component', () => {
store.commit(`monitoringDashboard/${types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS}`, deploymentData);
// Mock data contains 2 panel groups, with 1 and 2 panels respectively
- store.commit(`monitoringDashboard/${types.SET_QUERY_RESULT}`, mockedQueryResultPayload);
+ store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
+ mockedQueryResultPayload,
+ );
// Pick the second panel group and the first panel in it
[mockGraphData] = store.state.monitoringDashboard.dashboard.panel_groups[1].panels;
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index e41158eb4b0..92d469270c9 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -434,13 +434,11 @@ describe('Monitoring store actions', () => {
start: '2019-08-06T12:40:02.184Z',
end: '2019-08-06T20:40:02.184Z',
};
- let commit;
let metric;
let state;
let data;
beforeEach(() => {
- commit = jest.fn();
state = storeState();
[metric] = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics;
[data] = metricsGroupsAPIResponse[0].panels[0].metrics;
@@ -449,17 +447,31 @@ describe('Monitoring store actions', () => {
it('commits result', done => {
mock.onGet('http://test').reply(200, { data }); // One attempt
- fetchPrometheusMetric({ state, commit }, { metric, params })
- .then(() => {
- expect(commit).toHaveBeenCalledWith(types.SET_QUERY_RESULT, {
- metricId: metric.metric_id,
- result: data.result,
- });
-
+ testAction(
+ fetchPrometheusMetric,
+ { metric, params },
+ state,
+ [
+ {
+ type: types.REQUEST_METRIC_RESULT,
+ payload: {
+ metricId: metric.metric_id,
+ },
+ },
+ {
+ type: types.RECEIVE_METRIC_RESULT_SUCCESS,
+ payload: {
+ metricId: metric.metric_id,
+ result: data.result,
+ },
+ },
+ ],
+ [],
+ () => {
expect(mock.history.get).toHaveLength(1);
done();
- })
- .catch(done.fail);
+ },
+ ).catch(done.fail);
});
it('commits result, when waiting for results', done => {
@@ -469,18 +481,31 @@ describe('Monitoring store actions', () => {
mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
mock.onGet('http://test').reply(200, { data }); // 4th attempt
- const fetch = fetchPrometheusMetric({ state, commit }, { metric, params });
-
- fetch
- .then(() => {
- expect(commit).toHaveBeenCalledWith(types.SET_QUERY_RESULT, {
- metricId: metric.metric_id,
- result: data.result,
- });
+ testAction(
+ fetchPrometheusMetric,
+ { metric, params },
+ state,
+ [
+ {
+ type: types.REQUEST_METRIC_RESULT,
+ payload: {
+ metricId: metric.metric_id,
+ },
+ },
+ {
+ type: types.RECEIVE_METRIC_RESULT_SUCCESS,
+ payload: {
+ metricId: metric.metric_id,
+ result: data.result,
+ },
+ },
+ ],
+ [],
+ () => {
expect(mock.history.get).toHaveLength(4);
done();
- })
- .catch(done.fail);
+ },
+ ).catch(done.fail);
});
it('commits failure, when waiting for results and getting a server error', done => {
@@ -490,15 +515,33 @@ describe('Monitoring store actions', () => {
mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
mock.onGet('http://test').reply(500); // 4th attempt
- fetchPrometheusMetric({ state, commit }, { metric, params })
- .then(() => {
- done.fail();
- })
- .catch(() => {
- expect(commit).not.toHaveBeenCalled();
- expect(mock.history.get).toHaveLength(4);
- done();
- });
+ const error = new Error('Request failed with status code 500');
+
+ testAction(
+ fetchPrometheusMetric,
+ { metric, params },
+ state,
+ [
+ {
+ type: types.REQUEST_METRIC_RESULT,
+ payload: {
+ metricId: metric.metric_id,
+ },
+ },
+ {
+ type: types.RECEIVE_METRIC_RESULT_ERROR,
+ payload: {
+ metricId: metric.metric_id,
+ error,
+ },
+ },
+ ],
+ [],
+ ).catch(e => {
+ expect(mock.history.get).toHaveLength(4);
+ expect(e).toEqual(error);
+ done();
+ });
});
});
});
diff --git a/spec/frontend/monitoring/store/getters_spec.js b/spec/frontend/monitoring/store/getters_spec.js
index 066f927dce7..3b6f33ed8b1 100644
--- a/spec/frontend/monitoring/store/getters_spec.js
+++ b/spec/frontend/monitoring/store/getters_spec.js
@@ -57,22 +57,22 @@ describe('Monitoring store Getters', () => {
it('an empty metric, returns empty', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
- mutations[types.SET_QUERY_RESULT](state, mockedEmptyResult);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyResult);
expect(metricsWithData()).toEqual([]);
});
it('a metric with results, it returns a metric', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
- mutations[types.SET_QUERY_RESULT](state, mockedQueryResultPayload);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
expect(metricsWithData()).toEqual([mockedQueryResultPayload.metricId]);
});
it('multiple metrics with results, it return multiple metrics', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
- mutations[types.SET_QUERY_RESULT](state, mockedQueryResultPayload);
- mutations[types.SET_QUERY_RESULT](state, mockedQueryResultPayloadCoresTotal);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
expect(metricsWithData()).toEqual([
mockedQueryResultPayload.metricId,
@@ -82,8 +82,8 @@ describe('Monitoring store Getters', () => {
it('multiple metrics with results, it returns metrics filtered by group', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsGroupsAPIResponse);
- mutations[types.SET_QUERY_RESULT](state, mockedQueryResultPayload);
- mutations[types.SET_QUERY_RESULT](state, mockedQueryResultPayloadCoresTotal);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
// First group has no metrics
expect(metricsWithData(state.dashboard.panel_groups[0].key)).toEqual([]);
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 47177180aa1..8da172ec634 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -1,6 +1,9 @@
+import httpStatusCodes from '~/lib/utils/http_status';
+
import mutations from '~/monitoring/stores/mutations';
import * as types from '~/monitoring/stores/mutation_types';
import state from '~/monitoring/stores/state';
+import { metricsErrors } from '~/monitoring/constants';
import {
metricsGroupsAPIResponse,
deploymentData,
@@ -90,7 +93,7 @@ describe('Monitoring mutations', () => {
expect(stateCopy.projectPath).toEqual('/gitlab-org/gitlab-foss');
});
});
- describe('SET_QUERY_RESULT', () => {
+ describe('Individual panel/metric results', () => {
const metricId = '12_system_metrics_kubernetes_container_memory_total';
const result = [
{
@@ -98,31 +101,145 @@ describe('Monitoring mutations', () => {
},
];
const dashboardGroups = metricsDashboardResponse.dashboard.panel_groups;
- const getMetrics = () => stateCopy.dashboard.panel_groups[0].panels[0].metrics;
+ const getMetric = () => stateCopy.dashboard.panel_groups[0].panels[0].metrics[0];
- beforeEach(() => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
+ describe('REQUEST_METRIC_RESULT', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
+ });
+ it('stores a loading state on a metric', () => {
+ expect(stateCopy.showEmptyState).toBe(true);
+
+ mutations[types.REQUEST_METRIC_RESULT](stateCopy, {
+ metricId,
+ result,
+ });
+
+ expect(stateCopy.showEmptyState).toBe(true);
+ expect(getMetric()).toEqual(
+ expect.objectContaining({
+ loading: true,
+ result: null,
+ error: null,
+ }),
+ );
+ });
});
- it('clears empty state', () => {
- expect(stateCopy.showEmptyState).toBe(true);
- mutations[types.SET_QUERY_RESULT](stateCopy, {
- metricId,
- result,
+ describe('RECEIVE_METRIC_RESULT_SUCCESS', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
});
+ it('clears empty state', () => {
+ expect(stateCopy.showEmptyState).toBe(true);
- expect(stateCopy.showEmptyState).toBe(false);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](stateCopy, {
+ metricId,
+ result,
+ });
+
+ expect(stateCopy.showEmptyState).toBe(false);
+ });
+
+ it('adds results to the store', () => {
+ expect(getMetric().result).toBe(undefined);
+
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](stateCopy, {
+ metricId,
+ result,
+ });
+
+ expect(getMetric().result).toHaveLength(result.length);
+ expect(getMetric()).toEqual(
+ expect.objectContaining({
+ loading: false,
+ error: null,
+ }),
+ );
+ });
});
- it('adds results to the store', () => {
- expect(getMetrics()[0].result).toBe(undefined);
+ describe('RECEIVE_METRIC_RESULT_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
+ });
+ it('maintains the loading state when a metric fails', () => {
+ expect(stateCopy.showEmptyState).toBe(true);
+
+ mutations[types.RECEIVE_METRIC_RESULT_ERROR](stateCopy, {
+ metricId,
+ error: 'an error',
+ });
+
+ expect(stateCopy.showEmptyState).toBe(true);
+ });
+
+ it('stores a timeout error in a metric', () => {
+ mutations[types.RECEIVE_METRIC_RESULT_ERROR](stateCopy, {
+ metricId,
+ error: { message: 'BACKOFF_TIMEOUT' },
+ });
+
+ expect(getMetric()).toEqual(
+ expect.objectContaining({
+ loading: false,
+ result: null,
+ error: metricsErrors.TIMEOUT,
+ }),
+ );
+ });
+
+ it('stores a connection failed error in a metric', () => {
+ mutations[types.RECEIVE_METRIC_RESULT_ERROR](stateCopy, {
+ metricId,
+ error: {
+ response: {
+ status: httpStatusCodes.SERVICE_UNAVAILABLE,
+ },
+ },
+ });
+ expect(getMetric()).toEqual(
+ expect.objectContaining({
+ loading: false,
+ result: null,
+ error: metricsErrors.CONNECTION_FAILED,
+ }),
+ );
+ });
- mutations[types.SET_QUERY_RESULT](stateCopy, {
- metricId,
- result,
+ it('stores a bad data error in a metric', () => {
+ mutations[types.RECEIVE_METRIC_RESULT_ERROR](stateCopy, {
+ metricId,
+ error: {
+ response: {
+ status: httpStatusCodes.BAD_REQUEST,
+ },
+ },
+ });
+
+ expect(getMetric()).toEqual(
+ expect.objectContaining({
+ loading: false,
+ result: null,
+ error: metricsErrors.BAD_DATA,
+ }),
+ );
});
- expect(getMetrics()[0].result).toHaveLength(result.length);
+ it('stores an unknown error in a metric', () => {
+ mutations[types.RECEIVE_METRIC_RESULT_ERROR](stateCopy, {
+ metricId,
+ error: null, // no reason in response
+ });
+
+ expect(getMetric()).toEqual(
+ expect.objectContaining({
+ loading: false,
+ result: null,
+ error: metricsErrors.UNKNOWN_ERROR,
+ }),
+ );
+ });
});
});
describe('SET_ALL_DASHBOARDS', () => {
diff --git a/spec/javascripts/monitoring/components/dashboard_spec.js b/spec/javascripts/monitoring/components/dashboard_spec.js
index 7d6a1d7b097..37a811f153f 100644
--- a/spec/javascripts/monitoring/components/dashboard_spec.js
+++ b/spec/javascripts/monitoring/components/dashboard_spec.js
@@ -56,13 +56,16 @@ function setupComponentStore(component) {
);
// Load 3 panels to the dashboard, one with an empty result
- component.$store.commit(`monitoringDashboard/${types.SET_QUERY_RESULT}`, mockedEmptyResult);
component.$store.commit(
- `monitoringDashboard/${types.SET_QUERY_RESULT}`,
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
+ mockedEmptyResult,
+ );
+ component.$store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
mockedQueryResultPayload,
);
component.$store.commit(
- `monitoringDashboard/${types.SET_QUERY_RESULT}`,
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
mockedQueryResultPayloadCoresTotal,
);
@@ -269,7 +272,7 @@ describe('Dashboard', () => {
metricsGroupsAPIResponse,
);
component.$store.commit(
- `monitoringDashboard/${types.SET_QUERY_RESULT}`,
+ `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
mockedQueryResultPayload,
);
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index 6594a3dfa8b..b039c572677 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -157,7 +157,7 @@ describe Gitlab::Ci::Config do
describe '.new' do
it 'raises error' do
- expect(Gitlab::Sentry).to receive(:track_exception)
+ expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
expect { config }.to raise_error(
described_class::ConfigError,
@@ -367,7 +367,7 @@ describe Gitlab::Ci::Config do
end
it 'raises error TimeoutError' do
- expect(Gitlab::Sentry).to receive(:track_exception)
+ expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
expect { config }.to raise_error(
described_class::ConfigError,
diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb
index f5d3d14ccc5..1a14f6d4f67 100644
--- a/spec/lib/gitlab/diff/highlight_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_spec.rb
@@ -105,7 +105,7 @@ describe Gitlab::Diff::Highlight do
end
it 'keeps the original rich line' do
- allow(Gitlab::Sentry).to receive(:track_exception)
+ allow(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
code = %q{+ raise RuntimeError, "System commands must be given as an array of strings"}
@@ -114,7 +114,7 @@ describe Gitlab::Diff::Highlight do
end
it 'reports to Sentry if configured' do
- expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
+ expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception).and_call_original
expect { subject }. to raise_exception(RangeError)
end
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index 80c1493d01b..b7ad6cebf81 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -86,12 +86,11 @@ describe Gitlab::GitalyClient do
describe '.stub_certs' do
it 'skips certificates if OpenSSLError is raised and report it' do
- expect(Rails.logger).to receive(:error).at_least(:once)
expect(Gitlab::Sentry)
- .to receive(:track_exception)
+ .to receive(:track_and_raise_for_dev_exception)
.with(
a_kind_of(OpenSSL::X509::CertificateError),
- extra: { cert_file: a_kind_of(String) }).at_least(:once)
+ cert_file: a_kind_of(String)).at_least(:once)
expect(OpenSSL::X509::Certificate)
.to receive(:new)
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
index 30d23fcf9d4..7065b3f9bcb 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
@@ -301,7 +301,7 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
it 'ignores Git command errors when creating a branch' do
expect(project.repository).to receive(:add_branch).and_raise(Gitlab::Git::CommandError)
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
+ expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
mr = insert_git_data
diff --git a/spec/lib/gitlab/gpg_spec.rb b/spec/lib/gitlab/gpg_spec.rb
index 5d43023502c..c073d86b4a5 100644
--- a/spec/lib/gitlab/gpg_spec.rb
+++ b/spec/lib/gitlab/gpg_spec.rb
@@ -182,17 +182,17 @@ describe Gitlab::Gpg do
expected_tmp_dir = nil
expect(described_class).to receive(:cleanup_tmp_dir).and_raise(expected_exception)
- allow(Gitlab::Sentry).to receive(:track_exception)
+ allow(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
described_class.using_tmp_keychain do
expected_tmp_dir = described_class.current_home_dir
FileUtils.touch(File.join(expected_tmp_dir, 'dummy.file'))
end
- expect(Gitlab::Sentry).to have_received(:track_exception).with(
+ expect(Gitlab::Sentry).to have_received(:track_and_raise_for_dev_exception).with(
expected_exception,
issue_url: 'https://gitlab.com/gitlab-org/gitlab/issues/20918',
- extra: { tmp_dir: expected_tmp_dir, contents: ['dummy.file'] }
+ tmp_dir: expected_tmp_dir, contents: ['dummy.file']
)
end
diff --git a/spec/lib/gitlab/import_export/shared_spec.rb b/spec/lib/gitlab/import_export/shared_spec.rb
index 8d522252e2d..9a2339e576a 100644
--- a/spec/lib/gitlab/import_export/shared_spec.rb
+++ b/spec/lib/gitlab/import_export/shared_spec.rb
@@ -49,24 +49,9 @@ describe Gitlab::ImportExport::Shared do
it 'updates the import JID' do
import_state = create(:import_state, project: project, jid: 'jid-test')
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:error).with(hash_including(import_jid: import_state.jid))
- end
-
- subject.error(error)
- end
-
- it 'calls the error logger without a backtrace' do
- expect(subject).to receive(:log_error).with(message: error.message)
-
- subject.error(error)
- end
-
- it 'calls the error logger with the full message' do
- backtrace = caller
- allow(error).to receive(:backtrace).and_return(caller)
-
- expect(subject).to receive(:log_error).with(message: error.message, error_backtrace: Gitlab::Profiler.clean_backtrace(backtrace))
+ expect(Gitlab::Sentry)
+ .to receive(:track_exception)
+ .with(error, hash_including(import_jid: import_state.jid))
subject.error(error)
end
diff --git a/spec/lib/gitlab/import_export/version_checker_spec.rb b/spec/lib/gitlab/import_export/version_checker_spec.rb
index 6a626d864a9..befbd1b4c19 100644
--- a/spec/lib/gitlab/import_export/version_checker_spec.rb
+++ b/spec/lib/gitlab/import_export/version_checker_spec.rb
@@ -8,10 +8,20 @@ describe Gitlab::ImportExport::VersionChecker do
describe 'bundle a project Git repo' do
let(:version) { Gitlab::ImportExport.version }
+ let(:version_file) { Tempfile.new('VERSION') }
before do
allow_any_instance_of(Gitlab::ImportExport::Shared).to receive(:relative_archive_path).and_return('')
- allow(File).to receive(:open).and_return(version)
+
+ version_file.write(version)
+ version_file.rewind
+
+ allow_any_instance_of(described_class).to receive(:version_file).and_return(version_file.path)
+ end
+
+ after do
+ version_file.close
+ version_file.unlink
end
it 'returns true if Import/Export have the same version' do
diff --git a/spec/lib/gitlab/sentry_spec.rb b/spec/lib/gitlab/sentry_spec.rb
index 024ac733a07..6d05241e72d 100644
--- a/spec/lib/gitlab/sentry_spec.rb
+++ b/spec/lib/gitlab/sentry_spec.rb
@@ -3,12 +3,31 @@
require 'spec_helper'
describe Gitlab::Sentry do
- describe '.context' do
- it 'adds the expected tags' do
- expect(described_class).to receive(:enabled?).and_return(true)
- allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
+ let(:exception) { RuntimeError.new('boom') }
+ let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
+
+ let(:expected_payload_includes) do
+ [
+ { 'exception.class' => 'RuntimeError' },
+ { 'exception.message' => 'boom' },
+ { 'tags.correlation_id' => 'cid' },
+ { 'extra.some_other_info' => 'info' },
+ { 'extra.issue_url' => 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
+ ]
+ end
+
+ before do
+ stub_sentry_settings
- described_class.context(nil)
+ allow(described_class).to receive(:sentry_dsn).and_return(Gitlab.config.sentry.dsn)
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
+
+ described_class.configure
+ end
+
+ describe '.with_context' do
+ it 'adds the expected tags' do
+ described_class.with_context {}
expect(Raven.tags_context[:locale].to_s).to eq(I18n.locale.to_s)
expect(Raven.tags_context[Labkit::Correlation::CorrelationId::LOG_KEY.to_sym].to_s)
@@ -16,23 +35,23 @@ describe Gitlab::Sentry do
end
end
- describe '.track_exception' do
- let(:exception) { RuntimeError.new('boom') }
+ describe '.track_and_raise_for_dev_exception' do
+ context 'when exceptions for dev should be raised' do
+ before do
+ expect(described_class).to receive(:should_raise_for_dev?).and_return(true)
+ end
- before do
- allow(described_class).to receive(:enabled?).and_return(true)
- end
+ it 'raises the exception' do
+ expect(Raven).to receive(:capture_exception)
- it 'raises the exception if it should' do
- expect(described_class).to receive(:should_raise_for_dev?).and_return(true)
- expect { described_class.track_exception(exception) }
- .to raise_error(RuntimeError)
+ expect { described_class.track_and_raise_for_dev_exception(exception) }
+ .to raise_error(RuntimeError)
+ end
end
- context 'when exceptions should not be raised' do
+ context 'when exceptions for dev should not be raised' do
before do
- allow(described_class).to receive(:should_raise_for_dev?).and_return(false)
- allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
+ expect(described_class).to receive(:should_raise_for_dev?).and_return(false)
end
it 'logs the exception with all attributes passed' do
@@ -50,30 +69,49 @@ describe Gitlab::Sentry do
tags: a_hash_including(expected_tags),
extra: a_hash_including(expected_extras))
- described_class.track_exception(
+ described_class.track_and_raise_for_dev_exception(
exception,
- issue_url: 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1',
- extra: { some_other_info: 'info' }
+ issue_url: issue_url,
+ some_other_info: 'info'
)
end
- it 'sets the context' do
- expect(described_class).to receive(:context)
+ it 'calls Gitlab::Sentry::Logger.error with formatted payload' do
+ expect(Gitlab::Sentry::Logger).to receive(:error)
+ .with(a_hash_including(*expected_payload_includes))
- described_class.track_exception(exception)
+ described_class.track_and_raise_for_dev_exception(
+ exception,
+ issue_url: issue_url,
+ some_other_info: 'info'
+ )
end
end
end
- context '.track_acceptable_exception' do
- let(:exception) { RuntimeError.new('boom') }
- let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
+ describe '.track_and_raise_exception' do
+ it 'always raises the exception' do
+ expect(Raven).to receive(:capture_exception)
+
+ expect { described_class.track_and_raise_exception(exception) }
+ .to raise_error(RuntimeError)
+ end
- before do
- allow(described_class).to receive(:enabled?).and_return(true)
- allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
+ it 'calls Gitlab::Sentry::Logger.error with formatted payload' do
+ expect(Gitlab::Sentry::Logger).to receive(:error)
+ .with(a_hash_including(*expected_payload_includes))
+
+ expect do
+ described_class.track_and_raise_exception(
+ exception,
+ issue_url: issue_url,
+ some_other_info: 'info'
+ )
+ end.to raise_error(RuntimeError)
end
+ end
+ describe '.track_exception' do
it 'calls Raven.capture_exception' do
expected_extras = {
some_other_info: 'info',
@@ -89,34 +127,45 @@ describe Gitlab::Sentry do
tags: a_hash_including(expected_tags),
extra: a_hash_including(expected_extras))
- described_class.track_acceptable_exception(
+ described_class.track_exception(
+ exception,
+ issue_url: issue_url,
+ some_other_info: 'info'
+ )
+ end
+
+ it 'calls Gitlab::Sentry::Logger.error with formatted payload' do
+ expect(Gitlab::Sentry::Logger).to receive(:error)
+ .with(a_hash_including(*expected_payload_includes))
+
+ described_class.track_exception(
exception,
issue_url: issue_url,
- extra: { some_other_info: 'info' }
+ some_other_info: 'info'
)
end
context 'the exception implements :sentry_extra_data' do
let(:extra_info) { { event: 'explosion', size: :massive } }
- let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info) }
+ let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info, backtrace: caller) }
it 'includes the extra data from the exception in the tracking information' do
expect(Raven).to receive(:capture_exception)
.with(exception, a_hash_including(extra: a_hash_including(extra_info)))
- described_class.track_acceptable_exception(exception)
+ described_class.track_exception(exception)
end
end
context 'the exception implements :sentry_extra_data, which returns nil' do
- let(:exception) { double(message: 'bang!', sentry_extra_data: nil) }
+ let(:exception) { double(message: 'bang!', sentry_extra_data: nil, backtrace: caller) }
it 'just includes the other extra info' do
extra_info = { issue_url: issue_url }
expect(Raven).to receive(:capture_exception)
.with(exception, a_hash_including(extra: a_hash_including(extra_info)))
- described_class.track_acceptable_exception(exception, extra_info)
+ described_class.track_exception(exception, extra_info)
end
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index bcab621ab6a..dde37e62e6c 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -3522,7 +3522,7 @@ describe Ci::Build do
end
it 'can drop the build' do
- expect(Gitlab::Sentry).to receive(:track_exception)
+ expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
expect { build.drop! }.not_to raise_error
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 01abaea7399..036b6c7902f 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -1004,8 +1004,8 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to eq(connection_status: :unknown_failure) }
it 'notifies Sentry' do
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
- .with(instance_of(StandardError), hash_including(extra: { cluster_id: cluster.id }))
+ expect(Gitlab::Sentry).to receive(:track_exception)
+ .with(instance_of(StandardError), hash_including(cluster_id: cluster.id))
subject
end
diff --git a/spec/models/concerns/group_descendant_spec.rb b/spec/models/concerns/group_descendant_spec.rb
index 192e884f3e8..6af57283a2a 100644
--- a/spec/models/concerns/group_descendant_spec.rb
+++ b/spec/models/concerns/group_descendant_spec.rb
@@ -82,7 +82,7 @@ describe GroupDescendant do
end
it 'tracks the exception when a parent was not preloaded' do
- expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
+ expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception).and_call_original
expect { described_class.build_hierarchy([subsub_group]) }.to raise_error(ArgumentError)
end
@@ -91,7 +91,7 @@ describe GroupDescendant do
expected_hierarchy = { parent => { subgroup => subsub_group } }
# this does not raise in production, so stubbing it here.
- allow(Gitlab::Sentry).to receive(:track_exception)
+ allow(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
expect(described_class.build_hierarchy([subsub_group])).to eq(expected_hierarchy)
end
diff --git a/spec/models/upload_spec.rb b/spec/models/upload_spec.rb
index 03434c95218..6128e7d2a24 100644
--- a/spec/models/upload_spec.rb
+++ b/spec/models/upload_spec.rb
@@ -171,8 +171,7 @@ describe Upload do
it 'sends a message to Sentry' do
upload = create(:upload, :issuable_upload)
- expect(Gitlab::Sentry).to receive(:enabled?).and_return(true)
- expect(Raven).to receive(:capture_message).with("Upload file does not exist", extra: upload.attributes)
+ expect(Gitlab::Sentry).to receive(:track_exception).with(instance_of(RuntimeError), upload.attributes)
upload.exist?
end
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index 54401ec4085..bfdfd034cca 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -46,7 +46,7 @@ describe 'GraphQL' do
end
it 'logs the exception in Sentry and continues with the request' do
- expect(Gitlab::Sentry).to receive(:track_exception).at_least(1).times
+ expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception).at_least(1).times
expect(Gitlab::GraphqlLogger).to receive(:info)
post_graphql(query, variables: {})
@@ -146,7 +146,7 @@ describe 'GraphQL' do
end
it "logs a warning that the 'calls_gitaly' field declaration is missing" do
- expect(Gitlab::Sentry).to receive(:track_exception).once
+ expect(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception).once
post_graphql(query, current_user: user)
end
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 0c53c04ba40..1bec860bf0e 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -226,11 +226,11 @@ describe API::Helpers do
describe '.handle_api_exception' do
before do
allow_any_instance_of(self.class).to receive(:rack_response)
- allow(Gitlab::Sentry).to receive(:enabled?).and_return(true)
stub_sentry_settings
- configure_sentry
+ expect(Gitlab::Sentry).to receive(:sentry_dsn).and_return(Gitlab.config.sentry.dsn)
+ Gitlab::Sentry.configure
Raven.client.configuration.encoding = 'json'
end
diff --git a/spec/services/ci/archive_trace_service_spec.rb b/spec/services/ci/archive_trace_service_spec.rb
index 47bc26c0521..64fa74ccce5 100644
--- a/spec/services/ci/archive_trace_service_spec.rb
+++ b/spec/services/ci/archive_trace_service_spec.rb
@@ -60,10 +60,10 @@ describe Ci::ArchiveTraceService, '#execute' do
it 'increments Prometheus counter, sends crash report to Sentry and ignore an error for continuing to archive' do
expect(Gitlab::Sentry)
- .to receive(:track_exception)
+ .to receive(:track_and_raise_for_dev_exception)
.with(::Gitlab::Ci::Trace::ArchiveError,
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/51502',
- extra: { job_id: job.id } ).once
+ job_id: job.id).once
expect(Sidekiq.logger).to receive(:warn).with(
class: ArchiveTraceWorker.name,
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 4c83f9229d9..c4274f0bd17 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -528,7 +528,7 @@ describe Ci::CreatePipelineService do
end
it 'logs error' do
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
+ expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
execute_service
end
@@ -613,7 +613,7 @@ describe Ci::CreatePipelineService do
end
it 'logs error' do
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception).and_call_original
+ expect(Gitlab::Sentry).to receive(:track_exception).and_call_original
execute_service
end
diff --git a/spec/services/ci/prepare_build_service_spec.rb b/spec/services/ci/prepare_build_service_spec.rb
index 2d027f13e52..87061b3b15a 100644
--- a/spec/services/ci/prepare_build_service_spec.rb
+++ b/spec/services/ci/prepare_build_service_spec.rb
@@ -51,8 +51,8 @@ describe Ci::PrepareBuildService do
it 'drops the build and notifies Sentry' do
expect(build).to receive(:drop).with(:unmet_prerequisites).once
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
- .with(instance_of(Kubeclient::HttpError), hash_including(extra: { build_id: build.id }))
+ expect(Gitlab::Sentry).to receive(:track_exception)
+ .with(instance_of(Kubeclient::HttpError), hash_including(build_id: build.id))
subject
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 04334fb8915..aa31d98c4fb 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -514,8 +514,8 @@ module Ci
subject { execute(specific_runner, {}) }
it 'does drop the build and logs both failures' do
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
- .with(anything, a_hash_including(extra: a_hash_including(build_id: pending_job.id)))
+ expect(Gitlab::Sentry).to receive(:track_exception)
+ .with(anything, a_hash_including(build_id: pending_job.id))
.twice
.and_call_original
@@ -540,8 +540,8 @@ module Ci
subject { execute(specific_runner, {}) }
it 'does drop the build and logs failure' do
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
- .with(anything, a_hash_including(extra: a_hash_including(build_id: pending_job.id)))
+ expect(Gitlab::Sentry).to receive(:track_exception)
+ .with(anything, a_hash_including(build_id: pending_job.id))
.once
.and_call_original
diff --git a/spec/support/shared_examples/services/base_helm_service_shared_examples.rb b/spec/support/shared_examples/services/base_helm_service_shared_examples.rb
index fa76b95f768..f867cb69cfd 100644
--- a/spec/support/shared_examples/services/base_helm_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/base_helm_service_shared_examples.rb
@@ -11,20 +11,10 @@ shared_examples 'logs kubernetes errors' do
}
end
- let(:logger_hash) do
- error_hash.merge(
- exception: error_name,
- message: error_message,
- backtrace: instance_of(Array)
- )
- end
-
it 'logs into kubernetes.log and Sentry' do
- expect(service.send(:logger)).to receive(:error).with(hash_including(logger_hash))
-
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception).with(
+ expect(Gitlab::Sentry).to receive(:track_exception).with(
error,
- extra: hash_including(error_hash)
+ hash_including(error_hash)
)
service.execute
diff --git a/spec/workers/ci/archive_traces_cron_worker_spec.rb b/spec/workers/ci/archive_traces_cron_worker_spec.rb
index 01232e2a58b..291fe54c4e3 100644
--- a/spec/workers/ci/archive_traces_cron_worker_spec.rb
+++ b/spec/workers/ci/archive_traces_cron_worker_spec.rb
@@ -63,7 +63,7 @@ describe Ci::ArchiveTracesCronWorker do
let!(:build) { create(:ci_build, :success, :trace_live, finished_at: finished_at) }
before do
- allow(Gitlab::Sentry).to receive(:track_exception)
+ allow(Gitlab::Sentry).to receive(:track_and_raise_for_dev_exception)
allow_any_instance_of(Gitlab::Ci::Trace).to receive(:archive!).and_raise('Unexpected error')
end
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index 2bf1d470b09..1d376e1f02a 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -43,10 +43,10 @@ describe RunPipelineScheduleWorker do
allow(Ci::CreatePipelineService).to receive(:new) { raise ActiveRecord::StatementInvalid }
expect(Gitlab::Sentry)
- .to receive(:track_exception)
+ .to receive(:track_and_raise_for_dev_exception)
.with(ActiveRecord::StatementInvalid,
issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/41231',
- extra: { schedule_id: pipeline_schedule.id } ).once
+ schedule_id: pipeline_schedule.id).once
end
it 'increments Prometheus counter' do
diff --git a/spec/workers/stuck_ci_jobs_worker_spec.rb b/spec/workers/stuck_ci_jobs_worker_spec.rb
index 59707409b5a..48a20f498b7 100644
--- a/spec/workers/stuck_ci_jobs_worker_spec.rb
+++ b/spec/workers/stuck_ci_jobs_worker_spec.rb
@@ -30,8 +30,8 @@ describe StuckCiJobsWorker do
it "does drop the job and logs the reason" do
job.update_columns(yaml_variables: '[{"key" => "value"}]')
- expect(Gitlab::Sentry).to receive(:track_acceptable_exception)
- .with(anything, a_hash_including(extra: a_hash_including(build_id: job.id)))
+ expect(Gitlab::Sentry).to receive(:track_exception)
+ .with(anything, a_hash_including(build_id: job.id))
.once
.and_call_original