summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/application_controller_spec.rb14
-rw-r--r--spec/controllers/uploads_controller_spec.rb24
-rw-r--r--spec/features/issues/user_toggles_subscription_spec.rb1
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request_sidebar_extras.json2
-rw-r--r--spec/frontend/issuables_list/components/issuable_spec.js7
-rw-r--r--spec/frontend/monitoring/mock_data.js135
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js367
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js (renamed from spec/javascripts/monitoring/store/mutations_spec.js)45
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js (renamed from spec/javascripts/monitoring/store/utils_spec.js)0
-rw-r--r--spec/javascripts/monitoring/mock_data.js146
-rw-r--r--spec/javascripts/monitoring/store/actions_spec.js335
-rw-r--r--spec/javascripts/sidebar/subscriptions_spec.js21
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml2
-rw-r--r--spec/lib/gitlab/import_export/group_tree_saver_spec.rb20
-rw-r--r--spec/models/environment_spec.rb6
-rw-r--r--spec/requests/user_avatar_spec.rb36
-rw-r--r--spec/serializers/issuable_sidebar_extras_entity_spec.rb20
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb50
-rw-r--r--spec/spec_helper.rb5
19 files changed, 662 insertions, 574 deletions
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 04bbffc587f..4a10e7b5325 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -90,16 +90,18 @@ describe ApplicationController do
let(:format) { :html }
it_behaves_like 'setting gon variables'
- end
- context 'with json format' do
- let(:format) { :json }
+ context 'for peek requests' do
+ before do
+ request.path = '/-/peek'
+ end
- it_behaves_like 'not setting gon variables'
+ it_behaves_like 'not setting gon variables'
+ end
end
- context 'with atom format' do
- let(:format) { :atom }
+ context 'with json format' do
+ let(:format) { :json }
it_behaves_like 'not setting gon variables'
end
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index f35babc1b56..1bcf3bb106b 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -228,10 +228,10 @@ describe UploadsController do
user.block
end
- it "responds with status 401" do
+ it "redirects to the sign in page" do
get :show, params: { model: "user", mounted_as: "avatar", id: user.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to redirect_to(new_user_session_path)
end
end
@@ -320,10 +320,10 @@ describe UploadsController do
end
context "when not signed in" do
- it "responds with status 401" do
+ it "redirects to the sign in page" do
get :show, params: { model: "project", mounted_as: "avatar", id: project.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to redirect_to(new_user_session_path)
end
end
@@ -343,10 +343,10 @@ describe UploadsController do
project.add_maintainer(user)
end
- it "responds with status 401" do
+ it "redirects to the sign in page" do
get :show, params: { model: "project", mounted_as: "avatar", id: project.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to redirect_to(new_user_session_path)
end
end
@@ -439,10 +439,10 @@ describe UploadsController do
user.block
end
- it "responds with status 401" do
+ it "redirects to the sign in page" do
get :show, params: { model: "group", mounted_as: "avatar", id: group.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to redirect_to(new_user_session_path)
end
end
@@ -526,10 +526,10 @@ describe UploadsController do
end
context "when not signed in" do
- it "responds with status 401" do
+ it "redirects to the sign in page" do
get :show, params: { model: "note", mounted_as: "attachment", id: note.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to redirect_to(new_user_session_path)
end
end
@@ -549,10 +549,10 @@ describe UploadsController do
project.add_maintainer(user)
end
- it "responds with status 401" do
+ it "redirects to the sign in page" do
get :show, params: { model: "note", mounted_as: "attachment", id: note.id, filename: "dk.png" }
- expect(response).to have_gitlab_http_status(401)
+ expect(response).to redirect_to(new_user_session_path)
end
end
diff --git a/spec/features/issues/user_toggles_subscription_spec.rb b/spec/features/issues/user_toggles_subscription_spec.rb
index 165d41950da..ba167362511 100644
--- a/spec/features/issues/user_toggles_subscription_spec.rb
+++ b/spec/features/issues/user_toggles_subscription_spec.rb
@@ -33,7 +33,6 @@ describe "User toggles subscription", :js do
it 'is disabled' do
expect(page).to have_content('Notifications have been disabled by the project or group owner')
- expect(page).to have_selector('.js-emails-disabled', visible: true)
expect(page).not_to have_selector('.js-issuable-subscribe-button')
end
end
diff --git a/spec/fixtures/api/schemas/entities/merge_request_sidebar_extras.json b/spec/fixtures/api/schemas/entities/merge_request_sidebar_extras.json
index 682e345d5f5..11076ec73de 100644
--- a/spec/fixtures/api/schemas/entities/merge_request_sidebar_extras.json
+++ b/spec/fixtures/api/schemas/entities/merge_request_sidebar_extras.json
@@ -3,6 +3,8 @@
"properties" : {
"id": { "type": "integer" },
"iid": { "type": "integer" },
+ "project_emails_disabled": { "type": "boolean" },
+ "subscribe_disabled_description": { "type": "string" },
"subscribed": { "type": "boolean" },
"time_estimate": { "type": "integer" },
"total_time_spent": { "type": "integer" },
diff --git a/spec/frontend/issuables_list/components/issuable_spec.js b/spec/frontend/issuables_list/components/issuable_spec.js
index 915e908dd81..6148f3c68f2 100644
--- a/spec/frontend/issuables_list/components/issuable_spec.js
+++ b/spec/frontend/issuables_list/components/issuable_spec.js
@@ -196,6 +196,13 @@ describe('Issuable component', () => {
`${formatDate(dueDate, DATE_FORMAT)} (${expectedTooltipPart})`,
);
});
+
+ it('renders milestone with the correct href', () => {
+ const { title } = issuable.milestone;
+ const expected = mergeUrlParams({ milestone_title: title }, TEST_BASE_URL);
+
+ expect(findMilestone().attributes('href')).toBe(expected);
+ });
});
describe.each`
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index 74e2d079d9b..c42366ab484 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -328,3 +328,138 @@ export const metricsGroupsAPIResponse = [
],
},
];
+
+export const environmentData = [
+ {
+ id: 34,
+ name: 'production',
+ state: 'available',
+ external_url: 'http://root-autodevops-deploy.my-fake-domain.com',
+ environment_type: null,
+ stop_action: false,
+ metrics_path: '/root/hello-prometheus/environments/34/metrics',
+ environment_path: '/root/hello-prometheus/environments/34',
+ stop_path: '/root/hello-prometheus/environments/34/stop',
+ terminal_path: '/root/hello-prometheus/environments/34/terminal',
+ folder_path: '/root/hello-prometheus/environments/folders/production',
+ created_at: '2018-06-29T16:53:38.301Z',
+ updated_at: '2018-06-29T16:57:09.825Z',
+ last_deployment: {
+ id: 127,
+ },
+ },
+ {
+ id: 35,
+ name: 'review/noop-branch',
+ state: 'available',
+ external_url: 'http://root-autodevops-deploy-review-noop-branc-die93w.my-fake-domain.com',
+ environment_type: 'review',
+ stop_action: true,
+ metrics_path: '/root/hello-prometheus/environments/35/metrics',
+ environment_path: '/root/hello-prometheus/environments/35',
+ stop_path: '/root/hello-prometheus/environments/35/stop',
+ terminal_path: '/root/hello-prometheus/environments/35/terminal',
+ folder_path: '/root/hello-prometheus/environments/folders/review',
+ created_at: '2018-07-03T18:39:41.702Z',
+ updated_at: '2018-07-03T18:44:54.010Z',
+ last_deployment: {
+ id: 128,
+ },
+ },
+ {
+ id: 36,
+ name: 'no-deployment/noop-branch',
+ state: 'available',
+ created_at: '2018-07-04T18:39:41.702Z',
+ updated_at: '2018-07-04T18:44:54.010Z',
+ },
+];
+
+export const metricsDashboardResponse = {
+ dashboard: {
+ dashboard: 'Environment metrics',
+ priority: 1,
+ panel_groups: [
+ {
+ group: 'System metrics (Kubernetes)',
+ priority: 5,
+ panels: [
+ {
+ title: 'Memory Usage (Total)',
+ type: 'area-chart',
+ y_label: 'Total Memory Used',
+ weight: 4,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_memory_total',
+ query_range:
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
+ label: 'Total',
+ unit: 'GB',
+ metric_id: 12,
+ prometheus_endpoint_path: 'http://test',
+ },
+ ],
+ },
+ {
+ title: 'Core Usage (Total)',
+ type: 'area-chart',
+ y_label: 'Total Cores',
+ weight: 3,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_cores_total',
+ query_range:
+ 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
+ label: 'Total',
+ unit: 'cores',
+ metric_id: 13,
+ },
+ ],
+ },
+ {
+ title: 'Memory Usage (Pod average)',
+ type: 'line-chart',
+ y_label: 'Memory Used per Pod',
+ weight: 2,
+ metrics: [
+ {
+ id: 'system_metrics_kubernetes_container_memory_average',
+ query_range:
+ 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
+ label: 'Pod average',
+ unit: 'MB',
+ metric_id: 14,
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ },
+ status: 'success',
+};
+
+export const dashboardGitResponse = [
+ {
+ default: true,
+ display_name: 'Default',
+ can_edit: false,
+ project_blob_path: null,
+ path: 'config/prometheus/common_metrics.yml',
+ },
+ {
+ default: false,
+ display_name: 'Custom Dashboard 1',
+ can_edit: true,
+ project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_1.yml`,
+ path: '.gitlab/dashboards/dashboard_1.yml',
+ },
+ {
+ default: false,
+ display_name: 'Custom Dashboard 2',
+ can_edit: true,
+ project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_2.yml`,
+ path: '.gitlab/dashboards/dashboard_2.yml',
+ },
+];
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index 513a0e0d103..d4bc613ffea 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -1,12 +1,44 @@
-import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
-import { backOffRequest } from '~/monitoring/stores/actions';
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import { backOff } from '~/lib/utils/common_utils';
+import store from '~/monitoring/stores';
+import * as types from '~/monitoring/stores/mutation_types';
+import {
+ backOffRequest,
+ fetchDashboard,
+ receiveMetricsDashboardSuccess,
+ receiveMetricsDashboardFailure,
+ fetchDeploymentsData,
+ fetchEnvironmentsData,
+ fetchPrometheusMetrics,
+ fetchPrometheusMetric,
+ requestMetricsData,
+ setEndpoints,
+ setGettingStartedEmptyState,
+} from '~/monitoring/stores/actions';
+import storeState from '~/monitoring/stores/state';
+import {
+ deploymentData,
+ environmentData,
+ metricsDashboardResponse,
+ metricsGroupsAPIResponse,
+ dashboardGitResponse,
+} from '../mock_data';
+
jest.mock('~/lib/utils/common_utils');
+const resetStore = str => {
+ str.replaceState({
+ showEmptyState: true,
+ emptyState: 'loading',
+ groups: [],
+ });
+};
+
const MAX_REQUESTS = 3;
describe('Monitoring store helpers', () => {
@@ -51,3 +83,334 @@ describe('Monitoring store helpers', () => {
});
});
});
+
+describe('Monitoring store actions', () => {
+ let mock;
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+ afterEach(() => {
+ resetStore(store);
+ mock.restore();
+ });
+ describe('requestMetricsData', () => {
+ it('sets emptyState to loading', () => {
+ const commit = jest.fn();
+ const { state } = store;
+ requestMetricsData({
+ state,
+ commit,
+ });
+ expect(commit).toHaveBeenCalledWith(types.REQUEST_METRICS_DATA);
+ });
+ });
+ describe('fetchDeploymentsData', () => {
+ it('commits RECEIVE_DEPLOYMENTS_DATA_SUCCESS on error', done => {
+ const dispatch = jest.fn();
+ const { state } = store;
+ state.deploymentsEndpoint = '/success';
+ mock.onGet(state.deploymentsEndpoint).reply(200, {
+ deployments: deploymentData,
+ });
+ fetchDeploymentsData({
+ state,
+ dispatch,
+ })
+ .then(() => {
+ expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataSuccess', deploymentData);
+ done();
+ })
+ .catch(done.fail);
+ });
+ it('commits RECEIVE_DEPLOYMENTS_DATA_FAILURE on error', done => {
+ const dispatch = jest.fn();
+ const { state } = store;
+ state.deploymentsEndpoint = '/error';
+ mock.onGet(state.deploymentsEndpoint).reply(500);
+ fetchDeploymentsData({
+ state,
+ dispatch,
+ })
+ .then(() => {
+ expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataFailure');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+ describe('fetchEnvironmentsData', () => {
+ it('commits RECEIVE_ENVIRONMENTS_DATA_SUCCESS on error', done => {
+ const dispatch = jest.fn();
+ const { state } = store;
+ state.environmentsEndpoint = '/success';
+ mock.onGet(state.environmentsEndpoint).reply(200, {
+ environments: environmentData,
+ });
+ fetchEnvironmentsData({
+ state,
+ dispatch,
+ })
+ .then(() => {
+ expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataSuccess', environmentData);
+ done();
+ })
+ .catch(done.fail);
+ });
+ it('commits RECEIVE_ENVIRONMENTS_DATA_FAILURE on error', done => {
+ const dispatch = jest.fn();
+ const { state } = store;
+ state.environmentsEndpoint = '/error';
+ mock.onGet(state.environmentsEndpoint).reply(500);
+ fetchEnvironmentsData({
+ state,
+ dispatch,
+ })
+ .then(() => {
+ expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataFailure');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+ describe('Set endpoints', () => {
+ let mockedState;
+ beforeEach(() => {
+ mockedState = storeState();
+ });
+ it('should commit SET_ENDPOINTS mutation', done => {
+ testAction(
+ setEndpoints,
+ {
+ metricsEndpoint: 'additional_metrics.json',
+ deploymentsEndpoint: 'deployments.json',
+ environmentsEndpoint: 'deployments.json',
+ },
+ mockedState,
+ [
+ {
+ type: types.SET_ENDPOINTS,
+ payload: {
+ metricsEndpoint: 'additional_metrics.json',
+ deploymentsEndpoint: 'deployments.json',
+ environmentsEndpoint: 'deployments.json',
+ },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ describe('Set empty states', () => {
+ let mockedState;
+ beforeEach(() => {
+ mockedState = storeState();
+ });
+ it('should commit SET_METRICS_ENDPOINT mutation', done => {
+ testAction(
+ setGettingStartedEmptyState,
+ null,
+ mockedState,
+ [
+ {
+ type: types.SET_GETTING_STARTED_EMPTY_STATE,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ describe('fetchDashboard', () => {
+ let dispatch;
+ let state;
+ const response = metricsDashboardResponse;
+ beforeEach(() => {
+ dispatch = jest.fn();
+ state = storeState();
+ state.dashboardEndpoint = '/dashboard';
+ });
+ it('dispatches receive and success actions', done => {
+ const params = {};
+ mock.onGet(state.dashboardEndpoint).reply(200, response);
+ fetchDashboard(
+ {
+ state,
+ dispatch,
+ },
+ params,
+ )
+ .then(() => {
+ expect(dispatch).toHaveBeenCalledWith('requestMetricsDashboard');
+ expect(dispatch).toHaveBeenCalledWith('receiveMetricsDashboardSuccess', {
+ response,
+ params,
+ });
+ done();
+ })
+ .catch(done.fail);
+ });
+ it('dispatches failure action', done => {
+ const params = {};
+ mock.onGet(state.dashboardEndpoint).reply(500);
+ fetchDashboard(
+ {
+ state,
+ dispatch,
+ },
+ params,
+ )
+ .then(() => {
+ expect(dispatch).toHaveBeenCalledWith(
+ 'receiveMetricsDashboardFailure',
+ new Error('Request failed with status code 500'),
+ );
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+ describe('receiveMetricsDashboardSuccess', () => {
+ let commit;
+ let dispatch;
+ let state;
+ beforeEach(() => {
+ commit = jest.fn();
+ dispatch = jest.fn();
+ state = storeState();
+ });
+ it('stores groups ', () => {
+ const params = {};
+ const response = metricsDashboardResponse;
+ receiveMetricsDashboardSuccess(
+ {
+ state,
+ commit,
+ dispatch,
+ },
+ {
+ response,
+ params,
+ },
+ );
+ expect(commit).toHaveBeenCalledWith(
+ types.RECEIVE_METRICS_DATA_SUCCESS,
+ metricsDashboardResponse.dashboard.panel_groups,
+ );
+ expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetrics', params);
+ });
+ it('sets the dashboards loaded from the repository', () => {
+ const params = {};
+ const response = metricsDashboardResponse;
+ response.all_dashboards = dashboardGitResponse;
+ receiveMetricsDashboardSuccess(
+ {
+ state,
+ commit,
+ dispatch,
+ },
+ {
+ response,
+ params,
+ },
+ );
+ expect(commit).toHaveBeenCalledWith(types.SET_ALL_DASHBOARDS, dashboardGitResponse);
+ });
+ });
+ describe('receiveMetricsDashboardFailure', () => {
+ let commit;
+ beforeEach(() => {
+ commit = jest.fn();
+ });
+ it('commits failure action', () => {
+ receiveMetricsDashboardFailure({
+ commit,
+ });
+ expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, undefined);
+ });
+ it('commits failure action with error', () => {
+ receiveMetricsDashboardFailure(
+ {
+ commit,
+ },
+ 'uh-oh',
+ );
+ expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, 'uh-oh');
+ });
+ });
+ describe('fetchPrometheusMetrics', () => {
+ let commit;
+ let dispatch;
+ beforeEach(() => {
+ commit = jest.fn();
+ dispatch = jest.fn();
+ });
+ it('commits empty state when state.groups is empty', done => {
+ const state = storeState();
+ const params = {};
+ fetchPrometheusMetrics(
+ {
+ state,
+ commit,
+ dispatch,
+ },
+ params,
+ )
+ .then(() => {
+ expect(commit).toHaveBeenCalledWith(types.SET_NO_DATA_EMPTY_STATE);
+ expect(dispatch).not.toHaveBeenCalled();
+ done();
+ })
+ .catch(done.fail);
+ });
+ it('dispatches fetchPrometheusMetric for each panel query', done => {
+ const params = {};
+ const state = storeState();
+ state.dashboard.panel_groups = metricsDashboardResponse.dashboard.panel_groups;
+ const metric = state.dashboard.panel_groups[0].panels[0].metrics[0];
+ fetchPrometheusMetrics(
+ {
+ state,
+ commit,
+ dispatch,
+ },
+ params,
+ )
+ .then(() => {
+ expect(dispatch).toHaveBeenCalledTimes(3);
+ expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
+ metric,
+ params,
+ });
+ done();
+ })
+ .catch(done.fail);
+ done();
+ });
+ });
+ describe('fetchPrometheusMetric', () => {
+ it('commits prometheus query result', done => {
+ const commit = jest.fn();
+ const params = {
+ start: '2019-08-06T12:40:02.184Z',
+ end: '2019-08-06T20:40:02.184Z',
+ };
+ const metric = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics[0];
+ const state = storeState();
+ const data = metricsGroupsAPIResponse[0].panels[0].metrics[0];
+ const response = {
+ data,
+ };
+ mock.onGet('http://test').reply(200, response);
+ fetchPrometheusMetric({ state, commit }, { metric, params })
+ .then(() => {
+ expect(commit).toHaveBeenCalledWith(types.SET_QUERY_RESULT, {
+ metricId: metric.metric_id,
+ result: data.result,
+ });
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 91948b83eec..fdad290a8d6 100644
--- a/spec/javascripts/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -11,81 +11,62 @@ import { uniqMetricsId } from '~/monitoring/stores/utils';
describe('Monitoring mutations', () => {
let stateCopy;
-
beforeEach(() => {
stateCopy = state();
});
-
- describe(types.RECEIVE_METRICS_DATA_SUCCESS, () => {
+ describe('RECEIVE_METRICS_DATA_SUCCESS', () => {
let groups;
-
beforeEach(() => {
stateCopy.dashboard.panel_groups = [];
groups = metricsGroupsAPIResponse;
});
-
it('adds a key to the group', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
-
expect(stateCopy.dashboard.panel_groups[0].key).toBe('system-metrics-kubernetes--0');
});
-
it('normalizes values', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
-
const expectedLabel = 'Pod average';
const { label, query_range } = stateCopy.dashboard.panel_groups[0].metrics[0].metrics[0];
-
expect(label).toEqual(expectedLabel);
expect(query_range.length).toBeGreaterThan(0);
});
-
it('contains one group, which it has two panels and one metrics property', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
-
expect(stateCopy.dashboard.panel_groups).toBeDefined();
expect(stateCopy.dashboard.panel_groups.length).toEqual(1);
expect(stateCopy.dashboard.panel_groups[0].panels.length).toEqual(2);
expect(stateCopy.dashboard.panel_groups[0].panels[0].metrics.length).toEqual(1);
expect(stateCopy.dashboard.panel_groups[0].panels[1].metrics.length).toEqual(1);
});
-
it('assigns queries a metric id', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
-
expect(stateCopy.dashboard.panel_groups[0].metrics[0].queries[0].metricId).toEqual(
'17_system_metrics_kubernetes_container_memory_average',
);
});
-
describe('dashboard endpoint', () => {
const dashboardGroups = metricsDashboardResponse.dashboard.panel_groups;
-
it('aliases group panels to metrics for backwards compatibility', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
-
expect(stateCopy.dashboard.panel_groups[0].metrics[0]).toBeDefined();
});
-
it('aliases panel metrics to queries for backwards compatibility', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
-
expect(stateCopy.dashboard.panel_groups[0].metrics[0].queries).toBeDefined();
});
});
});
- describe(types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS, () => {
+ describe('RECEIVE_DEPLOYMENTS_DATA_SUCCESS', () => {
it('stores the deployment data', () => {
stateCopy.deploymentData = [];
mutations[types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS](stateCopy, deploymentData);
-
expect(stateCopy.deploymentData).toBeDefined();
expect(stateCopy.deploymentData.length).toEqual(3);
expect(typeof stateCopy.deploymentData[0]).toEqual('object');
});
});
-
describe('SET_ENDPOINTS', () => {
it('should set all the endpoints', () => {
mutations[types.SET_ENDPOINTS](stateCopy, {
@@ -95,7 +76,6 @@ describe('Monitoring mutations', () => {
dashboardEndpoint: 'dashboard.json',
projectPath: '/gitlab-org/gitlab-foss',
});
-
expect(stateCopy.metricsEndpoint).toEqual('additional_metrics.json');
expect(stateCopy.environmentsEndpoint).toEqual('environments.json');
expect(stateCopy.deploymentsEndpoint).toEqual('deployments.json');
@@ -103,46 +83,44 @@ describe('Monitoring mutations', () => {
expect(stateCopy.projectPath).toEqual('/gitlab-org/gitlab-foss');
});
});
-
describe('SET_QUERY_RESULT', () => {
const metricId = 12;
const id = 'system_metrics_kubernetes_container_memory_total';
- const result = [{ values: [[0, 1], [1, 1], [1, 3]] }];
-
+ const result = [
+ {
+ values: [[0, 1], [1, 1], [1, 3]],
+ },
+ ];
beforeEach(() => {
const dashboardGroups = metricsDashboardResponse.dashboard.panel_groups;
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
});
-
it('clears empty state', () => {
mutations[types.SET_QUERY_RESULT](stateCopy, {
metricId,
result,
});
-
expect(stateCopy.showEmptyState).toBe(false);
});
-
it('sets metricsWithData value', () => {
- const uniqId = uniqMetricsId({ metric_id: metricId, id });
+ const uniqId = uniqMetricsId({
+ metric_id: metricId,
+ id,
+ });
mutations[types.SET_QUERY_RESULT](stateCopy, {
metricId: uniqId,
result,
});
-
expect(stateCopy.metricsWithData).toEqual([uniqId]);
});
-
it('does not store empty results', () => {
mutations[types.SET_QUERY_RESULT](stateCopy, {
metricId,
result: [],
});
-
expect(stateCopy.metricsWithData).toEqual([]);
});
});
-
describe('SET_ALL_DASHBOARDS', () => {
it('stores `undefined` dashboards as an empty array', () => {
mutations[types.SET_ALL_DASHBOARDS](stateCopy, undefined);
@@ -158,7 +136,6 @@ describe('Monitoring mutations', () => {
it('stores dashboards loaded from the git repository', () => {
mutations[types.SET_ALL_DASHBOARDS](stateCopy, dashboardGitResponse);
-
expect(stateCopy.allDashboards).toEqual(dashboardGitResponse);
});
});
diff --git a/spec/javascripts/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index 98388ac19f8..98388ac19f8 100644
--- a/spec/javascripts/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js
index b1cd27f49be..f9cc839bde6 100644
--- a/spec/javascripts/monitoring/mock_data.js
+++ b/spec/javascripts/monitoring/mock_data.js
@@ -1,20 +1,17 @@
import {
anomalyMockGraphData as importedAnomalyMockGraphData,
- deploymentData as importedDeploymentData,
- metricsNewGroupsAPIResponse as importedMetricsNewGroupsAPIResponse,
metricsGroupsAPIResponse as importedMetricsGroupsAPIResponse,
+ environmentData as importedEnvironmentData,
+ dashboardGitResponse as importedDashboardGitResponse,
} from '../../frontend/monitoring/mock_data';
-// TODO Check if these exports are still needed
export const anomalyMockGraphData = importedAnomalyMockGraphData;
-export const deploymentData = importedDeploymentData;
-export const metricsNewGroupsAPIResponse = importedMetricsNewGroupsAPIResponse;
export const metricsGroupsAPIResponse = importedMetricsGroupsAPIResponse;
+export const environmentData = importedEnvironmentData;
+export const dashboardGitResponse = importedDashboardGitResponse;
export const mockApiEndpoint = `${gl.TEST_HOST}/monitoring/mock`;
-export const mockProjectPath = '/frontend-fixtures/environments-project';
-
export const mockedQueryResultPayload = {
metricId: '17_system_metrics_kubernetes_container_memory_average',
result: [
@@ -101,141 +98,6 @@ export const mockedQueryResultPayloadCoresTotal = {
],
};
-export const environmentData = [
- {
- id: 34,
- name: 'production',
- state: 'available',
- external_url: 'http://root-autodevops-deploy.my-fake-domain.com',
- environment_type: null,
- stop_action: false,
- metrics_path: '/root/hello-prometheus/environments/34/metrics',
- environment_path: '/root/hello-prometheus/environments/34',
- stop_path: '/root/hello-prometheus/environments/34/stop',
- terminal_path: '/root/hello-prometheus/environments/34/terminal',
- folder_path: '/root/hello-prometheus/environments/folders/production',
- created_at: '2018-06-29T16:53:38.301Z',
- updated_at: '2018-06-29T16:57:09.825Z',
- last_deployment: {
- id: 127,
- },
- },
- {
- id: 35,
- name: 'review/noop-branch',
- state: 'available',
- external_url: 'http://root-autodevops-deploy-review-noop-branc-die93w.my-fake-domain.com',
- environment_type: 'review',
- stop_action: true,
- metrics_path: '/root/hello-prometheus/environments/35/metrics',
- environment_path: '/root/hello-prometheus/environments/35',
- stop_path: '/root/hello-prometheus/environments/35/stop',
- terminal_path: '/root/hello-prometheus/environments/35/terminal',
- folder_path: '/root/hello-prometheus/environments/folders/review',
- created_at: '2018-07-03T18:39:41.702Z',
- updated_at: '2018-07-03T18:44:54.010Z',
- last_deployment: {
- id: 128,
- },
- },
- {
- id: 36,
- name: 'no-deployment/noop-branch',
- state: 'available',
- created_at: '2018-07-04T18:39:41.702Z',
- updated_at: '2018-07-04T18:44:54.010Z',
- },
-];
-
-export const metricsDashboardResponse = {
- dashboard: {
- dashboard: 'Environment metrics',
- priority: 1,
- panel_groups: [
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Memory Used',
- weight: 4,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_total',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
- label: 'Total',
- unit: 'GB',
- metric_id: 12,
- prometheus_endpoint_path: 'http://test',
- },
- ],
- },
- {
- title: 'Core Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Cores',
- weight: 3,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_cores_total',
- query_range:
- 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
- label: 'Total',
- unit: 'cores',
- metric_id: 13,
- },
- ],
- },
- {
- title: 'Memory Usage (Pod average)',
- type: 'line-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 14,
- },
- ],
- },
- ],
- },
- ],
- },
- status: 'success',
-};
-
-export const dashboardGitResponse = [
- {
- default: true,
- display_name: 'Default',
- can_edit: false,
- project_blob_path: null,
- path: 'config/prometheus/common_metrics.yml',
- },
- {
- default: false,
- display_name: 'Custom Dashboard 1',
- can_edit: true,
- project_blob_path: `${mockProjectPath}/blob/master/dashboards/.gitlab/dashboards/dashboard_1.yml`,
- path: '.gitlab/dashboards/dashboard_1.yml',
- },
- {
- default: false,
- display_name: 'Custom Dashboard 2',
- can_edit: true,
- project_blob_path: `${mockProjectPath}/blob/master/dashboards/.gitlab/dashboards/dashboard_2.yml`,
- path: '.gitlab/dashboards/dashboard_2.yml',
- },
-];
-
export const graphDataPrometheusQuery = {
title: 'Super Chart A2',
type: 'single-stat',
diff --git a/spec/javascripts/monitoring/store/actions_spec.js b/spec/javascripts/monitoring/store/actions_spec.js
deleted file mode 100644
index 684e26641c7..00000000000
--- a/spec/javascripts/monitoring/store/actions_spec.js
+++ /dev/null
@@ -1,335 +0,0 @@
-import axios from '~/lib/utils/axios_utils';
-import MockAdapter from 'axios-mock-adapter';
-import store from '~/monitoring/stores';
-import * as types from '~/monitoring/stores/mutation_types';
-import {
- fetchDashboard,
- receiveMetricsDashboardSuccess,
- receiveMetricsDashboardFailure,
- fetchDeploymentsData,
- fetchEnvironmentsData,
- fetchPrometheusMetrics,
- fetchPrometheusMetric,
- requestMetricsData,
- setEndpoints,
- setGettingStartedEmptyState,
-} from '~/monitoring/stores/actions';
-import storeState from '~/monitoring/stores/state';
-import testAction from 'spec/helpers/vuex_action_helper';
-import { resetStore } from '../helpers';
-import {
- deploymentData,
- environmentData,
- metricsDashboardResponse,
- metricsGroupsAPIResponse,
- dashboardGitResponse,
-} from '../mock_data';
-
-describe('Monitoring store actions', () => {
- let mock;
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- resetStore(store);
- mock.restore();
- });
-
- describe('requestMetricsData', () => {
- it('sets emptyState to loading', () => {
- const commit = jasmine.createSpy();
- const { state } = store;
-
- requestMetricsData({ state, commit });
-
- expect(commit).toHaveBeenCalledWith(types.REQUEST_METRICS_DATA);
- });
- });
-
- describe('fetchDeploymentsData', () => {
- it('commits RECEIVE_DEPLOYMENTS_DATA_SUCCESS on error', done => {
- const dispatch = jasmine.createSpy();
- const { state } = store;
- state.deploymentsEndpoint = '/success';
-
- mock.onGet(state.deploymentsEndpoint).reply(200, {
- deployments: deploymentData,
- });
-
- fetchDeploymentsData({ state, dispatch })
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataSuccess', deploymentData);
- done();
- })
- .catch(done.fail);
- });
-
- it('commits RECEIVE_DEPLOYMENTS_DATA_FAILURE on error', done => {
- const dispatch = jasmine.createSpy();
- const { state } = store;
- state.deploymentsEndpoint = '/error';
-
- mock.onGet(state.deploymentsEndpoint).reply(500);
-
- fetchDeploymentsData({ state, dispatch })
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataFailure');
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('fetchEnvironmentsData', () => {
- it('commits RECEIVE_ENVIRONMENTS_DATA_SUCCESS on error', done => {
- const dispatch = jasmine.createSpy();
- const { state } = store;
- state.environmentsEndpoint = '/success';
-
- mock.onGet(state.environmentsEndpoint).reply(200, {
- environments: environmentData,
- });
-
- fetchEnvironmentsData({ state, dispatch })
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataSuccess', environmentData);
- done();
- })
- .catch(done.fail);
- });
-
- it('commits RECEIVE_ENVIRONMENTS_DATA_FAILURE on error', done => {
- const dispatch = jasmine.createSpy();
- const { state } = store;
- state.environmentsEndpoint = '/error';
-
- mock.onGet(state.environmentsEndpoint).reply(500);
-
- fetchEnvironmentsData({ state, dispatch })
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataFailure');
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('Set endpoints', () => {
- let mockedState;
-
- beforeEach(() => {
- mockedState = storeState();
- });
-
- it('should commit SET_ENDPOINTS mutation', done => {
- testAction(
- setEndpoints,
- {
- metricsEndpoint: 'additional_metrics.json',
- deploymentsEndpoint: 'deployments.json',
- environmentsEndpoint: 'deployments.json',
- },
- mockedState,
- [
- {
- type: types.SET_ENDPOINTS,
- payload: {
- metricsEndpoint: 'additional_metrics.json',
- deploymentsEndpoint: 'deployments.json',
- environmentsEndpoint: 'deployments.json',
- },
- },
- ],
- [],
- done,
- );
- });
- });
-
- describe('Set empty states', () => {
- let mockedState;
-
- beforeEach(() => {
- mockedState = storeState();
- });
-
- it('should commit SET_METRICS_ENDPOINT mutation', done => {
- testAction(
- setGettingStartedEmptyState,
- null,
- mockedState,
- [{ type: types.SET_GETTING_STARTED_EMPTY_STATE }],
- [],
- done,
- );
- });
- });
-
- describe('fetchDashboard', () => {
- let dispatch;
- let state;
- const response = metricsDashboardResponse;
-
- beforeEach(() => {
- dispatch = jasmine.createSpy();
- state = storeState();
- state.dashboardEndpoint = '/dashboard';
- });
-
- it('dispatches receive and success actions', done => {
- const params = {};
- mock.onGet(state.dashboardEndpoint).reply(200, response);
-
- fetchDashboard({ state, dispatch }, params)
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith('requestMetricsDashboard');
- expect(dispatch).toHaveBeenCalledWith('receiveMetricsDashboardSuccess', {
- response,
- params,
- });
- done();
- })
- .catch(done.fail);
- });
-
- it('dispatches failure action', done => {
- const params = {};
- mock.onGet(state.dashboardEndpoint).reply(500);
-
- fetchDashboard({ state, dispatch }, params)
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith(
- 'receiveMetricsDashboardFailure',
- new Error('Request failed with status code 500'),
- );
- done();
- })
- .catch(done.fail);
- });
- });
-
- describe('receiveMetricsDashboardSuccess', () => {
- let commit;
- let dispatch;
- let state;
-
- beforeEach(() => {
- commit = jasmine.createSpy();
- dispatch = jasmine.createSpy();
- state = storeState();
- });
-
- it('stores groups ', () => {
- const params = {};
- const response = metricsDashboardResponse;
-
- receiveMetricsDashboardSuccess({ state, commit, dispatch }, { response, params });
-
- expect(commit).toHaveBeenCalledWith(
- types.RECEIVE_METRICS_DATA_SUCCESS,
- metricsDashboardResponse.dashboard.panel_groups,
- );
-
- expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetrics', params);
- });
-
- it('sets the dashboards loaded from the repository', () => {
- const params = {};
- const response = metricsDashboardResponse;
-
- response.all_dashboards = dashboardGitResponse;
- receiveMetricsDashboardSuccess({ state, commit, dispatch }, { response, params });
-
- expect(commit).toHaveBeenCalledWith(types.SET_ALL_DASHBOARDS, dashboardGitResponse);
- });
- });
-
- describe('receiveMetricsDashboardFailure', () => {
- let commit;
-
- beforeEach(() => {
- commit = jasmine.createSpy();
- });
-
- it('commits failure action', () => {
- receiveMetricsDashboardFailure({ commit });
-
- expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, undefined);
- });
-
- it('commits failure action with error', () => {
- receiveMetricsDashboardFailure({ commit }, 'uh-oh');
-
- expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, 'uh-oh');
- });
- });
-
- describe('fetchPrometheusMetrics', () => {
- let commit;
- let dispatch;
-
- beforeEach(() => {
- commit = jasmine.createSpy();
- dispatch = jasmine.createSpy();
- });
-
- it('commits empty state when state.groups is empty', done => {
- const state = storeState();
- const params = {};
-
- fetchPrometheusMetrics({ state, commit, dispatch }, params)
- .then(() => {
- expect(commit).toHaveBeenCalledWith(types.SET_NO_DATA_EMPTY_STATE);
- expect(dispatch).not.toHaveBeenCalled();
- done();
- })
- .catch(done.fail);
- });
-
- it('dispatches fetchPrometheusMetric for each panel query', done => {
- const params = {};
- const state = storeState();
- state.dashboard.panel_groups = metricsDashboardResponse.dashboard.panel_groups;
-
- const metric = state.dashboard.panel_groups[0].panels[0].metrics[0];
-
- fetchPrometheusMetrics({ state, commit, dispatch }, params)
- .then(() => {
- expect(dispatch.calls.count()).toEqual(3);
- expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', { metric, params });
- done();
- })
- .catch(done.fail);
-
- done();
- });
- });
-
- describe('fetchPrometheusMetric', () => {
- it('commits prometheus query result', done => {
- const commit = jasmine.createSpy();
- const params = {
- start: '2019-08-06T12:40:02.184Z',
- end: '2019-08-06T20:40:02.184Z',
- };
- const metric = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics[0];
- const state = storeState();
-
- const data = metricsGroupsAPIResponse[0].panels[0].metrics[0];
- const response = { data };
- mock.onGet('http://test').reply(200, response);
-
- fetchPrometheusMetric({ state, commit }, { metric, params });
-
- setTimeout(() => {
- expect(commit).toHaveBeenCalledWith(types.SET_QUERY_RESULT, {
- metricId: metric.metric_id,
- result: data.result,
- });
- done();
- });
- });
- });
-});
diff --git a/spec/javascripts/sidebar/subscriptions_spec.js b/spec/javascripts/sidebar/subscriptions_spec.js
index a97608d6b8a..1256852c472 100644
--- a/spec/javascripts/sidebar/subscriptions_spec.js
+++ b/spec/javascripts/sidebar/subscriptions_spec.js
@@ -76,4 +76,25 @@ describe('Subscriptions', function() {
expect(vm.$emit).toHaveBeenCalledWith('toggleSidebar');
});
+
+ describe('given project emails are disabled', () => {
+ const subscribeDisabledDescription = 'Notifications have been disabled';
+
+ beforeEach(() => {
+ vm = mountComponent(Subscriptions, {
+ subscribed: false,
+ projectEmailsDisabled: true,
+ subscribeDisabledDescription,
+ });
+ });
+
+ it('sets the correct display text', () => {
+ expect(vm.$el.textContent).toContain(subscribeDisabledDescription);
+ expect(vm.$refs.tooltip.dataset.originalTitle).toBe(subscribeDisabledDescription);
+ });
+
+ it('does not render the toggle button', () => {
+ expect(vm.$refs.toggleButton).toBeUndefined();
+ });
+ });
});
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 2339a090ccd..8f627fcc24d 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -30,6 +30,8 @@ issues:
- prometheus_alert_events
- self_managed_prometheus_alert_events
- zoom_meetings
+- vulnerability_links
+- related_vulnerabilities
events:
- author
- project
diff --git a/spec/lib/gitlab/import_export/group_tree_saver_spec.rb b/spec/lib/gitlab/import_export/group_tree_saver_spec.rb
index c752c557d99..b856441981a 100644
--- a/spec/lib/gitlab/import_export/group_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/group_tree_saver_spec.rb
@@ -39,12 +39,16 @@ describe Gitlab::ImportExport::GroupTreeSaver do
end
context 'when :export_fast_serialize feature is enabled' do
+ let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
+
before do
stub_feature_flags(export_fast_serialize: true)
+
+ expect(Gitlab::ImportExport::FastHashSerializer).to receive(:new).with(group, group_tree).and_return(serializer)
end
it 'uses FastHashSerializer' do
- expect_any_instance_of(Gitlab::ImportExport::FastHashSerializer).to receive(:execute).and_call_original
+ expect(serializer).to receive(:execute)
group_tree_saver.save
end
@@ -103,6 +107,18 @@ describe Gitlab::ImportExport::GroupTreeSaver do
expect(saved_group_json['badges']).not_to be_empty
end
+ context 'group children' do
+ let(:children) { group.children }
+
+ it 'exports group children' do
+ expect(saved_group_json['children'].length).to eq(children.count)
+ end
+
+ it 'exports group children of children' do
+ expect(saved_group_json['children'].first['children'].length).to eq(children.first.children.count)
+ end
+ end
+
context 'group members' do
let(:user2) { create(:user, email: 'group@member.com') }
let(:member_emails) do
@@ -146,6 +162,8 @@ describe Gitlab::ImportExport::GroupTreeSaver do
def setup_group
group = create(:group, description: 'description')
+ sub_group = create(:group, description: 'description', parent: group)
+ create(:group, description: 'description', parent: sub_group)
create(:milestone, group: group)
create(:group_badge, group: group)
group_label = create(:group_label, group: group)
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 8f3f45e159d..47e39e5fbe5 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -744,6 +744,12 @@ describe Environment, :use_clean_rails_memory_store_caching do
allow(environment).to receive(:deployment_platform).and_return(double)
end
+ context 'reactive cache configuration' do
+ it 'does not continue to spawn jobs' do
+ expect(described_class.reactive_cache_lifetime).to be < described_class.reactive_cache_refresh_interval
+ end
+ end
+
context 'reactive cache is empty' do
before do
stub_reactive_cache(environment, nil)
diff --git a/spec/requests/user_avatar_spec.rb b/spec/requests/user_avatar_spec.rb
deleted file mode 100644
index 9451674161c..00000000000
--- a/spec/requests/user_avatar_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe 'Loading a user avatar' do
- let(:user) { create(:user, :with_avatar) }
-
- context 'when logged in' do
- # The exact query count will vary depending on the 2FA settings of the
- # instance, group, and user. Removing those extra 2FA queries in this case
- # may not be a good idea, so we just set up the ideal case.
- before do
- stub_application_setting(require_two_factor_authentication: true)
-
- login_as(create(:user, :two_factor))
- end
-
- # One each for: current user, avatar user, and upload record
- it 'only performs three SQL queries' do
- get user.avatar_url # Skip queries on first application load
-
- expect(response).to have_gitlab_http_status(200)
- expect { get user.avatar_url }.not_to exceed_query_limit(3)
- end
- end
-
- context 'when logged out' do
- # One each for avatar user and upload record
- it 'only performs two SQL queries' do
- get user.avatar_url # Skip queries on first application load
-
- expect(response).to have_gitlab_http_status(200)
- expect { get user.avatar_url }.not_to exceed_query_limit(2)
- end
- end
-end
diff --git a/spec/serializers/issuable_sidebar_extras_entity_spec.rb b/spec/serializers/issuable_sidebar_extras_entity_spec.rb
new file mode 100644
index 00000000000..a1a7c554b49
--- /dev/null
+++ b/spec/serializers/issuable_sidebar_extras_entity_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe IssuableSidebarExtrasEntity do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:resource) { create(:issue, project: project) }
+ let(:request) { double('request', current_user: user) }
+
+ subject { described_class.new(resource, request: request).as_json }
+
+ it 'have subscribe attributes' do
+ expect(subject).to include(:participants,
+ :project_emails_disabled,
+ :subscribe_disabled_description,
+ :subscribed,
+ :assignees)
+ end
+end
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
index c724a1a47b4..87fcd70a298 100644
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ b/spec/services/merge_requests/ff_merge_service_spec.rb
@@ -24,33 +24,63 @@ describe MergeRequests::FfMergeService do
context 'valid params' do
let(:service) { described_class.new(project, user, valid_merge_params) }
- before do
- allow(service).to receive(:execute_hooks)
-
+ def execute_ff_merge
perform_enqueued_jobs do
service.execute(merge_request)
end
end
+ before do
+ allow(service).to receive(:execute_hooks)
+ end
+
it "does not create merge commit" do
+ execute_ff_merge
+
source_branch_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
target_branch_sha = merge_request.target_project.repository.commit(merge_request.target_branch).sha
+
expect(source_branch_sha).to eq(target_branch_sha)
end
- it { expect(merge_request).to be_valid }
- it { expect(merge_request).to be_merged }
+ it 'keeps the merge request valid' do
+ expect { execute_ff_merge }
+ .not_to change { merge_request.valid? }
+ end
+
+ it 'updates the merge request to merged' do
+ expect { execute_ff_merge }
+ .to change { merge_request.merged? }
+ .from(false)
+ .to(true)
+ end
it 'sends email to user2 about merge of new merge_request' do
+ execute_ff_merge
+
email = ActionMailer::Base.deliveries.last
expect(email.to.first).to eq(user2.email)
expect(email.subject).to include(merge_request.title)
end
it 'creates system note about merge_request merge' do
+ execute_ff_merge
+
note = merge_request.notes.last
expect(note.note).to include 'merged'
end
+
+ it 'does not update squash_commit_sha if it is not a squash' do
+ expect { execute_ff_merge }.not_to change { merge_request.squash_commit_sha }
+ end
+
+ it 'updates squash_commit_sha if it is a squash' do
+ merge_request.update!(squash: true)
+
+ expect { execute_ff_merge }
+ .to change { merge_request.squash_commit_sha }
+ .from(nil)
+ end
end
context 'error handling' do
@@ -83,6 +113,16 @@ describe MergeRequests::FfMergeService do
expect(merge_request.merge_error).to include(error_message)
expect(Rails.logger).to have_received(:error).with(a_string_matching(error_message))
end
+
+ it 'does not update squash_commit_sha if squash merge is not successful' do
+ merge_request.update!(squash: true)
+
+ expect(project.repository.raw).to receive(:ff_merge) do
+ raise 'Merge error'
+ end
+
+ expect { service.execute(merge_request) }.not_to change { merge_request.squash_commit_sha }
+ end
end
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 7a5e570558e..d7533f99683 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -66,6 +66,11 @@ RSpec.configure do |config|
config.infer_spec_type_from_file_location!
config.full_backtrace = !!ENV['CI']
+ unless ENV['CI']
+ # Re-run failures locally with `--only-failures`
+ config.example_status_persistence_file_path = './spec/examples.txt'
+ end
+
config.define_derived_metadata(file_path: %r{(ee)?/spec/.+_spec\.rb\z}) do |metadata|
location = metadata[:location]