summaryrefslogtreecommitdiff
path: root/spec/frontend/monitoring
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-04-20 18:38:24 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-04-20 18:38:24 +0000
commit983a0bba5d2a042c4a3bbb22432ec192c7501d82 (patch)
treeb153cd387c14ba23bd5a07514c7c01fddf6a78a0 /spec/frontend/monitoring
parenta2bddee2cdb38673df0e004d5b32d9f77797de64 (diff)
downloadgitlab-ce-983a0bba5d2a042c4a3bbb22432ec192c7501d82.tar.gz
Add latest changes from gitlab-org/gitlab@12-10-stable-ee
Diffstat (limited to 'spec/frontend/monitoring')
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap169
-rw-r--r--spec/frontend/monitoring/components/charts/annotations_spec.js11
-rw-r--r--spec/frontend/monitoring/components/charts/options_spec.js29
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js55
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js132
-rw-r--r--spec/frontend/monitoring/components/dashboard_template_spec.js2
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js3
-rw-r--r--spec/frontend/monitoring/components/panel_type_spec.js93
-rw-r--r--spec/frontend/monitoring/fixture_data.js49
-rw-r--r--spec/frontend/monitoring/init_utils.js57
-rw-r--r--spec/frontend/monitoring/mock_data.js348
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js75
-rw-r--r--spec/frontend/monitoring/store/getters_spec.js90
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js7
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js31
-rw-r--r--spec/frontend/monitoring/store_utils.js34
-rw-r--r--spec/frontend/monitoring/utils_spec.js11
17 files changed, 546 insertions, 650 deletions
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index d968b042ff1..1906ad7c6ed 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -6,101 +6,106 @@ exports[`Dashboard template matches the default snapshot 1`] = `
data-qa-selector="prometheus_graphs"
>
<div
- class="prometheus-graphs-header gl-p-3 pb-0 border-bottom bg-gray-light"
+ class="prometheus-graphs-header d-sm-flex flex-sm-wrap pt-2 pr-1 pb-0 pl-2 border-bottom bg-gray-light"
>
<div
- class="row"
+ class="mb-2 pr-2 d-flex d-sm-block"
>
- <gl-form-group-stub
- class="col-sm-12 col-md-6 col-lg-2"
- label="Dashboard"
- label-for="monitor-dashboards-dropdown"
- label-size="sm"
- >
- <dashboards-dropdown-stub
- class="mb-0 d-flex"
- data-qa-selector="dashboards_filter_dropdown"
- defaultbranch="master"
- id="monitor-dashboards-dropdown"
- selecteddashboard="[object Object]"
- toggle-class="dropdown-menu-toggle"
- />
- </gl-form-group-stub>
-
- <gl-form-group-stub
- class="col-sm-6 col-md-6 col-lg-2"
- label="Environment"
- label-for="monitor-environments-dropdown"
- label-size="sm"
+ <dashboards-dropdown-stub
+ class="flex-grow-1"
+ data-qa-selector="dashboards_filter_dropdown"
+ defaultbranch="master"
+ id="monitor-dashboards-dropdown"
+ selecteddashboard="[object Object]"
+ toggle-class="dropdown-menu-toggle"
+ />
+ </div>
+
+ <div
+ class="mb-2 pr-2 d-flex d-sm-block"
+ >
+ <gl-dropdown-stub
+ class="flex-grow-1"
+ data-qa-selector="environments_dropdown"
+ id="monitor-environments-dropdown"
+ menu-class="monitor-environment-dropdown-menu"
+ text="production"
+ toggle-class="dropdown-menu-toggle"
>
- <gl-dropdown-stub
- class="mb-0 d-flex"
- data-qa-selector="environments_dropdown"
- id="monitor-environments-dropdown"
- menu-class="monitor-environment-dropdown-menu"
- text="production"
- toggle-class="dropdown-menu-toggle"
+ <div
+ class="d-flex flex-column overflow-hidden"
>
+ <gl-dropdown-header-stub
+ class="monitor-environment-dropdown-header text-center"
+ >
+
+ Environment
+
+ </gl-dropdown-header-stub>
+
+ <gl-dropdown-divider-stub />
+
+ <gl-search-box-by-type-stub
+ class="m-2"
+ clearbuttontitle="Clear"
+ value=""
+ />
+
+ <div
+ class="flex-fill overflow-auto"
+ />
+
<div
- class="d-flex flex-column overflow-hidden"
+ class="text-secondary no-matches-message"
>
- <gl-dropdown-header-stub
- class="monitor-environment-dropdown-header text-center"
- >
- Environment
- </gl-dropdown-header-stub>
-
- <gl-dropdown-divider-stub />
-
- <gl-search-box-by-type-stub
- class="m-2"
- clearbuttontitle="Clear"
- value=""
- />
-
- <div
- class="flex-fill overflow-auto"
- />
-
- <div
- class="text-secondary no-matches-message"
- >
-
- No matching results
- </div>
+ No matching results
+
</div>
- </gl-dropdown-stub>
- </gl-form-group-stub>
-
- <gl-form-group-stub
- class="col-sm-auto col-md-auto col-lg-auto"
+ </div>
+ </gl-dropdown-stub>
+ </div>
+
+ <div
+ class="mb-2 pr-2 d-flex d-sm-block"
+ >
+ <date-time-picker-stub
+ class="flex-grow-1 show-last-dropdown"
+ customenabled="true"
data-qa-selector="show_last_dropdown"
- label="Show last"
- label-for="monitor-time-window-dropdown"
- label-size="sm"
+ options="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
+ value="[object Object]"
+ />
+ </div>
+
+ <div
+ class="mb-2 pr-2 d-flex d-sm-block"
+ >
+ <gl-deprecated-button-stub
+ class="flex-grow-1"
+ size="md"
+ title="Refresh dashboard"
+ variant="default"
>
- <date-time-picker-stub
- customenabled="true"
- options="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
- value="[object Object]"
+ <icon-stub
+ name="retry"
+ size="16"
/>
- </gl-form-group-stub>
+ </gl-deprecated-button-stub>
+ </div>
+
+ <div
+ class="flex-grow-1"
+ />
+
+ <div
+ class="d-sm-flex"
+ >
+ <!---->
- <gl-form-group-stub
- class="col-sm-2 col-md-2 col-lg-1 refresh-dashboard-button"
- >
- <gl-deprecated-button-stub
- size="md"
- title="Refresh dashboard"
- variant="default"
- >
- <icon-stub
- name="retry"
- size="16"
- />
- </gl-deprecated-button-stub>
- </gl-form-group-stub>
+ <!---->
+
+ <!---->
<!---->
</div>
diff --git a/spec/frontend/monitoring/components/charts/annotations_spec.js b/spec/frontend/monitoring/components/charts/annotations_spec.js
index 69bf1fe4ced..fc90175d307 100644
--- a/spec/frontend/monitoring/components/charts/annotations_spec.js
+++ b/spec/frontend/monitoring/components/charts/annotations_spec.js
@@ -54,6 +54,7 @@ describe('annotations spec', () => {
yAxisIndex: 1,
data: expect.any(Array),
markLine: expect.any(Object),
+ markPoint: expect.any(Object),
}),
);
@@ -61,11 +62,12 @@ describe('annotations spec', () => {
expect(annotation).toEqual(expect.any(Object));
});
- expect(annotations.data).toHaveLength(annotationsData.length);
+ expect(annotations.data).toHaveLength(0);
expect(annotations.markLine.data).toHaveLength(annotationsData.length);
+ expect(annotations.markPoint.data).toHaveLength(annotationsData.length);
});
- it('when deploments and annotations data is passed', () => {
+ it('when deployments and annotations data is passed', () => {
const annotations = generateAnnotationsSeries({
deployments: deploymentData,
annotations: annotationsData,
@@ -77,6 +79,7 @@ describe('annotations spec', () => {
yAxisIndex: 1,
data: expect.any(Array),
markLine: expect.any(Object),
+ markPoint: expect.any(Object),
}),
);
@@ -84,7 +87,9 @@ describe('annotations spec', () => {
expect(annotation).toEqual(expect.any(Object));
});
- expect(annotations.data).toHaveLength(deploymentData.length + annotationsData.length);
+ expect(annotations.data).toHaveLength(deploymentData.length);
+ expect(annotations.markLine.data).toHaveLength(annotationsData.length);
+ expect(annotations.markPoint.data).toHaveLength(annotationsData.length);
});
});
});
diff --git a/spec/frontend/monitoring/components/charts/options_spec.js b/spec/frontend/monitoring/components/charts/options_spec.js
index d219a6627bf..1c8fdc01e3e 100644
--- a/spec/frontend/monitoring/components/charts/options_spec.js
+++ b/spec/frontend/monitoring/components/charts/options_spec.js
@@ -31,7 +31,32 @@ describe('options spec', () => {
});
});
- it('formatter options', () => {
+ it('formatter options defaults to engineering notation', () => {
+ const options = getYAxisOptions();
+
+ expect(options.axisLabel.formatter).toEqual(expect.any(Function));
+ expect(options.axisLabel.formatter(3002.1)).toBe('3k');
+ });
+
+ it('formatter options allows for precision to be set explicitly', () => {
+ const options = getYAxisOptions({
+ precision: 4,
+ });
+
+ expect(options.axisLabel.formatter).toEqual(expect.any(Function));
+ expect(options.axisLabel.formatter(5002.1)).toBe('5.0021k');
+ });
+
+ it('formatter options allows for overrides in milliseconds', () => {
+ const options = getYAxisOptions({
+ format: SUPPORTED_FORMATS.milliseconds,
+ });
+
+ expect(options.axisLabel.formatter).toEqual(expect.any(Function));
+ expect(options.axisLabel.formatter(1.1234)).toBe('1.12ms');
+ });
+
+ it('formatter options allows for overrides in bytes', () => {
const options = getYAxisOptions({
format: SUPPORTED_FORMATS.bytes,
});
@@ -46,7 +71,7 @@ describe('options spec', () => {
const formatter = getTooltipFormatter();
expect(formatter).toEqual(expect.any(Function));
- expect(formatter(1)).toBe('1.000');
+ expect(formatter(0.11111)).toBe('111.1m');
});
it('defined format', () => {
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 870e47edde0..5ac716b0c63 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -1,6 +1,7 @@
import { mount } from '@vue/test-utils';
import { setTestTimeout } from 'helpers/timeout';
import { GlLink } from '@gitlab/ui';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import {
GlAreaChart,
GlLineChart,
@@ -12,23 +13,16 @@ import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
import { createStore } from '~/monitoring/stores';
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
import * as types from '~/monitoring/stores/mutation_types';
+import { deploymentData, mockProjectDir, annotationsData } from '../../mock_data';
import {
- deploymentData,
- mockedQueryResultFixture,
+ metricsDashboardPayload,
metricsDashboardViewModel,
- mockProjectDir,
- mockHost,
-} from '../../mock_data';
+ metricResultStatus,
+} from '../../fixture_data';
import * as iconUtils from '~/lib/utils/icon_utils';
-import { getJSONFixture } from '../../../helpers/fixtures';
const mockSvgPathContent = 'mockSvgPathContent';
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
-
jest.mock('lodash/throttle', () =>
// this throttle mock executes immediately
jest.fn(func => {
@@ -51,7 +45,7 @@ describe('Time series component', () => {
graphData: { ...graphData, type },
deploymentData: store.state.monitoringDashboard.deploymentData,
annotations: store.state.monitoringDashboard.annotations,
- projectPath: `${mockHost}${mockProjectDir}`,
+ projectPath: `${TEST_HOST}${mockProjectDir}`,
},
store,
stubs: {
@@ -74,7 +68,7 @@ describe('Time series component', () => {
store.commit(
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultFixture,
+ metricResultStatus,
);
// dashboard is a dynamically generated fixture and stored at environment_metrics_dashboard.json
[mockGraphData] = store.state.monitoringDashboard.dashboard.panelGroups[1].panels;
@@ -284,6 +278,33 @@ describe('Time series component', () => {
});
});
+ describe('formatAnnotationsTooltipText', () => {
+ const annotationsMetadata = {
+ name: 'annotations',
+ xAxis: annotationsData[0].from,
+ yAxis: 0,
+ tooltipData: {
+ title: '2020/02/19 10:01:41',
+ content: annotationsData[0].description,
+ },
+ };
+
+ const mockMarkPoint = {
+ componentType: 'markPoint',
+ name: 'annotations',
+ value: undefined,
+ data: annotationsMetadata,
+ };
+
+ it('formats tooltip title and sets tooltip content', () => {
+ const formattedTooltipData = timeSeriesChart.vm.formatAnnotationsTooltipText(
+ mockMarkPoint,
+ );
+ expect(formattedTooltipData.title).toBe('19 Feb 2020, 10:01AM');
+ expect(formattedTooltipData.content).toBe(annotationsMetadata.tooltipData.content);
+ });
+ });
+
describe('setSvg', () => {
const mockSvgName = 'mockSvgName';
@@ -386,6 +407,8 @@ describe('Time series component', () => {
series: [
{
name: mockSeriesName,
+ type: 'line',
+ data: [],
},
],
},
@@ -448,8 +471,8 @@ describe('Time series component', () => {
deploymentFormatter = getChartOptions().yAxis[1].axisLabel.formatter;
});
- it('formats and rounds to 2 decimal places', () => {
- expect(dataFormatter(0.88888)).toBe('0.89');
+ it('formats by default to precision notation', () => {
+ expect(dataFormatter(0.88888)).toBe('889m');
});
it('deployment formatter is set as is required to display a tooltip', () => {
@@ -606,7 +629,7 @@ describe('Time series component', () => {
store = createStore();
const graphData = cloneDeep(metricsDashboardViewModel.panelGroups[0].panels[3]);
graphData.metrics.forEach(metric =>
- Object.assign(metric, { result: mockedQueryResultFixture.result }),
+ Object.assign(metric, { result: metricResultStatus.result }),
);
timeSeriesChart = makeTimeSeriesChart(graphData, 'area-chart');
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index f0b510a01f4..8b6ee9b3bf6 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -1,34 +1,23 @@
-import { shallowMount, createLocalVue, mount } from '@vue/test-utils';
-import { GlDropdownItem, GlDeprecatedButton } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
+import Tracking from '~/tracking';
+import { GlModal, GlDropdownItem, GlDeprecatedButton } from '@gitlab/ui';
import VueDraggable from 'vuedraggable';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import { metricStates } from '~/monitoring/constants';
import Dashboard from '~/monitoring/components/dashboard.vue';
-import { getJSONFixture } from '../../../../spec/frontend/helpers/fixtures';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import CustomMetricsFormFields from '~/custom_metrics/components/custom_metrics_form_fields.vue';
import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
import { createStore } from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
-import { setupComponentStore, propsData } from '../init_utils';
-import {
- metricsDashboardViewModel,
- environmentData,
- dashboardGitResponse,
- mockedQueryResultFixture,
-} from '../mock_data';
-
-const localVue = createLocalVue();
-const expectedPanelCount = 4;
-
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
+import { setupStoreWithDashboard, setMetricResult, setupStoreWithData } from '../store_utils';
+import { environmentData, dashboardGitResponse, propsData } from '../mock_data';
+import { metricsDashboardViewModel, metricsDashboardPanelCount } from '../fixture_data';
describe('Dashboard', () => {
let store;
@@ -43,7 +32,6 @@ describe('Dashboard', () => {
const createShallowWrapper = (props = {}, options = {}) => {
wrapper = shallowMount(Dashboard, {
- localVue,
propsData: { ...propsData, ...props },
methods: {
fetchData: jest.fn(),
@@ -55,7 +43,6 @@ describe('Dashboard', () => {
const createMountedWrapper = (props = {}, options = {}) => {
wrapper = mount(Dashboard, {
- localVue,
propsData: { ...propsData, ...props },
methods: {
fetchData: jest.fn(),
@@ -144,7 +131,7 @@ describe('Dashboard', () => {
{ stubs: ['graph-group', 'panel-type'] },
);
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.showEmptyState).toEqual(false);
@@ -172,7 +159,7 @@ describe('Dashboard', () => {
beforeEach(() => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
@@ -201,14 +188,7 @@ describe('Dashboard', () => {
it('hides the environments dropdown list when there is no environments', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
- metricsDashboardPayload,
- );
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultFixture,
- );
+ setupStoreWithDashboard(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
expect(findAllEnvironmentsDropdownItems()).toHaveLength(0);
@@ -218,7 +198,7 @@ describe('Dashboard', () => {
it('renders the datetimepicker dropdown', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DateTimePicker).exists()).toBe(true);
@@ -228,7 +208,7 @@ describe('Dashboard', () => {
it('renders the refresh dashboard button', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
const refreshBtn = wrapper.findAll({ ref: 'refreshDashboardBtn' });
@@ -241,7 +221,11 @@ describe('Dashboard', () => {
describe('when one of the metrics is missing', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
- setupComponentStore(wrapper);
+
+ const { $store } = wrapper.vm;
+
+ setupStoreWithDashboard($store);
+ setMetricResult({ $store, result: [], panel: 2 });
return wrapper.vm.$nextTick();
});
@@ -273,7 +257,7 @@ describe('Dashboard', () => {
},
);
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
@@ -348,14 +332,14 @@ describe('Dashboard', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
it('wraps vuedraggable', () => {
expect(findDraggablePanels().exists()).toBe(true);
- expect(findDraggablePanels().length).toEqual(expectedPanelCount);
+ expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
});
it('is disabled by default', () => {
@@ -411,11 +395,11 @@ describe('Dashboard', () => {
it('shows a remove button, which removes a panel', () => {
expect(findFirstDraggableRemoveButton().isEmpty()).toBe(false);
- expect(findDraggablePanels().length).toEqual(expectedPanelCount);
+ expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
findFirstDraggableRemoveButton().trigger('click');
return wrapper.vm.$nextTick(() => {
- expect(findDraggablePanels().length).toEqual(expectedPanelCount - 1);
+ expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount - 1);
});
});
@@ -534,7 +518,7 @@ describe('Dashboard', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true, currentDashboard });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
@@ -564,4 +548,74 @@ describe('Dashboard', () => {
});
});
});
+
+ describe('add custom metrics', () => {
+ const findAddMetricButton = () => wrapper.vm.$refs.addMetricBtn;
+ describe('when not available', () => {
+ beforeEach(() => {
+ createShallowWrapper({
+ hasMetrics: true,
+ customMetricsPath: '/endpoint',
+ });
+ });
+ it('does not render add button on the dashboard', () => {
+ expect(findAddMetricButton()).toBeUndefined();
+ });
+ });
+
+ describe('when available', () => {
+ let origPage;
+ beforeEach(done => {
+ jest.spyOn(Tracking, 'event').mockReturnValue();
+ createShallowWrapper({
+ hasMetrics: true,
+ customMetricsPath: '/endpoint',
+ customMetricsAvailable: true,
+ });
+ setupStoreWithData(wrapper.vm.$store);
+
+ origPage = document.body.dataset.page;
+ document.body.dataset.page = 'projects:environments:metrics';
+
+ wrapper.vm.$nextTick(done);
+ });
+ afterEach(() => {
+ document.body.dataset.page = origPage;
+ });
+
+ it('renders add button on the dashboard', () => {
+ expect(findAddMetricButton()).toBeDefined();
+ });
+
+ it('uses modal for custom metrics form', () => {
+ expect(wrapper.find(GlModal).exists()).toBe(true);
+ expect(wrapper.find(GlModal).attributes().modalid).toBe('add-metric');
+ });
+ it('adding new metric is tracked', done => {
+ const submitButton = wrapper.vm.$refs.submitCustomMetricsFormBtn;
+ wrapper.setData({
+ formIsValid: true,
+ });
+ wrapper.vm.$nextTick(() => {
+ submitButton.$el.click();
+ wrapper.vm.$nextTick(() => {
+ expect(Tracking.event).toHaveBeenCalledWith(
+ document.body.dataset.page,
+ 'click_button',
+ {
+ label: 'add_new_metric',
+ property: 'modal',
+ value: undefined,
+ },
+ );
+ done();
+ });
+ });
+ });
+
+ it('renders custom metrics form fields', () => {
+ expect(wrapper.find(CustomMetricsFormFields).exists()).toBe(true);
+ });
+ });
+ });
});
diff --git a/spec/frontend/monitoring/components/dashboard_template_spec.js b/spec/frontend/monitoring/components/dashboard_template_spec.js
index 38523ab82bc..d1790df4189 100644
--- a/spec/frontend/monitoring/components/dashboard_template_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_template_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores';
-import { propsData } from '../init_utils';
+import { propsData } from '../mock_data';
jest.mock('~/lib/utils/url_utility');
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index ebfa09874fa..65e9d036d1a 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -9,12 +9,11 @@ import {
updateHistory,
} from '~/lib/utils/url_utility';
import axios from '~/lib/utils/axios_utils';
-import { mockProjectDir } from '../mock_data';
+import { mockProjectDir, propsData } from '../mock_data';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores';
import { defaultTimeRange } from '~/vue_shared/constants';
-import { propsData } from '../init_utils';
jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility');
diff --git a/spec/frontend/monitoring/components/panel_type_spec.js b/spec/frontend/monitoring/components/panel_type_spec.js
index 02511ac46ea..819b5235284 100644
--- a/spec/frontend/monitoring/components/panel_type_spec.js
+++ b/spec/frontend/monitoring/components/panel_type_spec.js
@@ -10,17 +10,17 @@ import TimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
import AnomalyChart from '~/monitoring/components/charts/anomaly.vue';
import {
anomalyMockGraphData,
- graphDataPrometheusQueryRange,
mockLogsHref,
mockLogsPath,
mockNamespace,
mockNamespacedData,
mockTimeRange,
-} from 'jest/monitoring/mock_data';
+} from '../mock_data';
+
+import { graphData, graphDataEmpty } from '../fixture_data';
import { createStore, monitoringDashboard } from '~/monitoring/stores';
import { createStore as createEmbedGroupStore } from '~/monitoring/stores/embed_group';
-global.IS_EE = true;
global.URL.createObjectURL = jest.fn();
const mocks = {
@@ -39,10 +39,13 @@ describe('Panel Type component', () => {
const findCopyLink = () => wrapper.find({ ref: 'copyChartLink' });
const findTimeChart = () => wrapper.find({ ref: 'timeChart' });
+ const findTitle = () => wrapper.find({ ref: 'graphTitle' });
+ const findContextualMenu = () => wrapper.find({ ref: 'contextualMenu' });
const createWrapper = props => {
wrapper = shallowMount(PanelType, {
propsData: {
+ graphData,
...props,
},
store,
@@ -64,14 +67,9 @@ describe('Panel Type component', () => {
});
describe('When no graphData is available', () => {
- let glEmptyChart;
- // Deep clone object before modifying
- const graphDataNoResult = JSON.parse(JSON.stringify(graphDataPrometheusQueryRange));
- graphDataNoResult.metrics[0].result = [];
-
beforeEach(() => {
createWrapper({
- graphData: graphDataNoResult,
+ graphData: graphDataEmpty,
});
});
@@ -80,12 +78,8 @@ describe('Panel Type component', () => {
});
describe('Empty Chart component', () => {
- beforeEach(() => {
- glEmptyChart = wrapper.find(EmptyChart);
- });
-
it('renders the chart title', () => {
- expect(wrapper.find({ ref: 'graphTitle' }).text()).toBe(graphDataNoResult.title);
+ expect(findTitle().text()).toBe(graphDataEmpty.title);
});
it('renders the no download csv link', () => {
@@ -93,26 +87,19 @@ describe('Panel Type component', () => {
});
it('does not contain graph widgets', () => {
- expect(wrapper.find('.js-graph-widgets').exists()).toBe(false);
+ expect(findContextualMenu().exists()).toBe(false);
});
it('is a Vue instance', () => {
- expect(glEmptyChart.isVueInstance()).toBe(true);
- });
-
- it('it receives a graph title', () => {
- const props = glEmptyChart.props();
-
- expect(props.graphTitle).toBe(wrapper.vm.graphData.title);
+ expect(wrapper.find(EmptyChart).exists()).toBe(true);
+ expect(wrapper.find(EmptyChart).isVueInstance()).toBe(true);
});
});
});
describe('when graph data is available', () => {
beforeEach(() => {
- createWrapper({
- graphData: graphDataPrometheusQueryRange,
- });
+ createWrapper();
});
afterEach(() => {
@@ -120,11 +107,11 @@ describe('Panel Type component', () => {
});
it('renders the chart title', () => {
- expect(wrapper.find({ ref: 'graphTitle' }).text()).toBe(graphDataPrometheusQueryRange.title);
+ expect(findTitle().text()).toBe(graphData.title);
});
it('contains graph widgets', () => {
- expect(wrapper.find('.js-graph-widgets').exists()).toBe(true);
+ expect(findContextualMenu().exists()).toBe(true);
expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(true);
});
@@ -177,11 +164,7 @@ describe('Panel Type component', () => {
const findEditCustomMetricLink = () => wrapper.find({ ref: 'editMetricLink' });
beforeEach(() => {
- createWrapper({
- graphData: {
- ...graphDataPrometheusQueryRange,
- },
- });
+ createWrapper();
return wrapper.vm.$nextTick();
});
@@ -193,10 +176,10 @@ describe('Panel Type component', () => {
it('is present when the panel contains an edit_path property', () => {
wrapper.setProps({
graphData: {
- ...graphDataPrometheusQueryRange,
+ ...graphData,
metrics: [
{
- ...graphDataPrometheusQueryRange.metrics[0],
+ ...graphData.metrics[0],
edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
},
],
@@ -205,23 +188,6 @@ describe('Panel Type component', () => {
return wrapper.vm.$nextTick(() => {
expect(findEditCustomMetricLink().exists()).toBe(true);
- });
- });
-
- it('shows an "Edit metric" link for a panel with a single metric', () => {
- wrapper.setProps({
- graphData: {
- ...graphDataPrometheusQueryRange,
- metrics: [
- {
- ...graphDataPrometheusQueryRange.metrics[0],
- edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
- },
- ],
- },
- });
-
- return wrapper.vm.$nextTick(() => {
expect(findEditCustomMetricLink().text()).toBe('Edit metric');
});
});
@@ -229,14 +195,14 @@ describe('Panel Type component', () => {
it('shows an "Edit metrics" link for a panel with multiple metrics', () => {
wrapper.setProps({
graphData: {
- ...graphDataPrometheusQueryRange,
+ ...graphData,
metrics: [
{
- ...graphDataPrometheusQueryRange.metrics[0],
+ ...graphData.metrics[0],
edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
},
{
- ...graphDataPrometheusQueryRange.metrics[0],
+ ...graphData.metrics[0],
edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
},
],
@@ -253,9 +219,7 @@ describe('Panel Type component', () => {
const findViewLogsLink = () => wrapper.find({ ref: 'viewLogsLink' });
beforeEach(() => {
- createWrapper({
- graphData: graphDataPrometheusQueryRange,
- });
+ createWrapper();
return wrapper.vm.$nextTick();
});
@@ -327,7 +291,6 @@ describe('Panel Type component', () => {
beforeEach(() => {
createWrapper({
clipboardText,
- graphData: graphDataPrometheusQueryRange,
});
});
@@ -353,11 +316,13 @@ describe('Panel Type component', () => {
describe('when downloading metrics data as CSV', () => {
beforeEach(() => {
- graphDataPrometheusQueryRange.y_label = 'metric';
wrapper = shallowMount(PanelType, {
propsData: {
clipboardText: exampleText,
- graphData: graphDataPrometheusQueryRange,
+ graphData: {
+ y_label: 'metric',
+ ...graphData,
+ },
},
store,
});
@@ -370,12 +335,12 @@ describe('Panel Type component', () => {
describe('csvText', () => {
it('converts metrics data from json to csv', () => {
- const header = `timestamp,${graphDataPrometheusQueryRange.y_label}`;
- const data = graphDataPrometheusQueryRange.metrics[0].result[0].values;
+ const header = `timestamp,${graphData.y_label}`;
+ const data = graphData.metrics[0].result[0].values;
const firstRow = `${data[0][0]},${data[0][1]}`;
const secondRow = `${data[1][0]},${data[1][1]}`;
- expect(wrapper.vm.csvText).toBe(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
+ expect(wrapper.vm.csvText).toMatch(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
});
});
@@ -402,7 +367,7 @@ describe('Panel Type component', () => {
wrapper = shallowMount(PanelType, {
propsData: {
- graphData: graphDataPrometheusQueryRange,
+ graphData,
namespace: mockNamespace,
},
store,
diff --git a/spec/frontend/monitoring/fixture_data.js b/spec/frontend/monitoring/fixture_data.js
new file mode 100644
index 00000000000..b7b72a15992
--- /dev/null
+++ b/spec/frontend/monitoring/fixture_data.js
@@ -0,0 +1,49 @@
+import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
+import { metricStates } from '~/monitoring/constants';
+
+import { metricsResult } from './mock_data';
+
+// Use globally available `getJSONFixture` so this file can be imported by both karma and jest specs
+export const metricsDashboardResponse = getJSONFixture(
+ 'metrics_dashboard/environment_metrics_dashboard.json',
+);
+export const metricsDashboardPayload = metricsDashboardResponse.dashboard;
+export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
+
+export const metricsDashboardPanelCount = 22;
+export const metricResultStatus = {
+ // First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
+ metricId: 'NO_DB_response_metrics_nginx_ingress_throughput_status_code',
+ result: metricsResult,
+};
+export const metricResultPods = {
+ // Second metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
+ metricId: 'NO_DB_response_metrics_nginx_ingress_latency_pod_average',
+ result: metricsResult,
+};
+export const metricResultEmpty = {
+ metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
+ result: [],
+};
+
+// Graph data
+
+const firstPanel = metricsDashboardViewModel.panelGroups[0].panels[0];
+
+export const graphData = {
+ ...firstPanel,
+ metrics: firstPanel.metrics.map(metric => ({
+ ...metric,
+ result: metricsResult,
+ state: metricStates.OK,
+ })),
+};
+
+export const graphDataEmpty = {
+ ...firstPanel,
+ metrics: firstPanel.metrics.map(metric => ({
+ ...metric,
+ result: [],
+ state: metricStates.NO_DATA,
+ })),
+};
diff --git a/spec/frontend/monitoring/init_utils.js b/spec/frontend/monitoring/init_utils.js
deleted file mode 100644
index 55b6199fdfc..00000000000
--- a/spec/frontend/monitoring/init_utils.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import * as types from '~/monitoring/stores/mutation_types';
-import {
- metricsDashboardPayload,
- mockedEmptyResult,
- mockedQueryResultPayload,
- mockedQueryResultPayloadCoresTotal,
- mockApiEndpoint,
- environmentData,
-} from './mock_data';
-
-export const propsData = {
- hasMetrics: false,
- documentationPath: '/path/to/docs',
- settingsPath: '/path/to/settings',
- clustersPath: '/path/to/clusters',
- tagsPath: '/path/to/tags',
- projectPath: '/path/to/project',
- logsPath: '/path/to/logs',
- defaultBranch: 'master',
- metricsEndpoint: mockApiEndpoint,
- deploymentsEndpoint: null,
- emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
- emptyLoadingSvgPath: '/path/to/loading.svg',
- emptyNoDataSvgPath: '/path/to/no-data.svg',
- emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
- emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
- currentEnvironmentName: 'production',
- customMetricsAvailable: false,
- customMetricsPath: '',
- validateQueryPath: '',
-};
-
-export const setupComponentStore = wrapper => {
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
- metricsDashboardPayload,
- );
-
- // Load 3 panels to the dashboard, one with an empty result
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedEmptyResult,
- );
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayload,
- );
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayloadCoresTotal,
- );
-
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
-};
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index 84dd0b70e71..56236918c68 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -1,13 +1,47 @@
-import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
-
// This import path needs to be relative for now because this mock data is used in
// Karma specs too, where the helpers/test_constants alias can not be resolved
import { TEST_HOST } from '../helpers/test_constants';
-export const mockHost = 'http://test.host';
export const mockProjectDir = '/frontend-fixtures/environments-project';
export const mockApiEndpoint = `${TEST_HOST}/monitoring/mock`;
+export const propsData = {
+ hasMetrics: false,
+ documentationPath: '/path/to/docs',
+ settingsPath: '/path/to/settings',
+ clustersPath: '/path/to/clusters',
+ tagsPath: '/path/to/tags',
+ projectPath: '/path/to/project',
+ logsPath: '/path/to/logs',
+ defaultBranch: 'master',
+ metricsEndpoint: mockApiEndpoint,
+ deploymentsEndpoint: null,
+ emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
+ emptyLoadingSvgPath: '/path/to/loading.svg',
+ emptyNoDataSvgPath: '/path/to/no-data.svg',
+ emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
+ emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
+ currentEnvironmentName: 'production',
+ customMetricsAvailable: false,
+ customMetricsPath: '',
+ validateQueryPath: '',
+};
+
+const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
+ default: false,
+ display_name: `Custom Dashboard ${idx}`,
+ can_edit: true,
+ system_dashboard: false,
+ project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
+ path: `.gitlab/dashboards/dashboard_${idx}.yml`,
+}));
+
+export const mockDashboardsErrorResponse = {
+ all_dashboards: customDashboardsData,
+ message: "Each 'panel_group' must define an array :panels",
+ status: 'error',
+};
+
export const anomalyDeploymentData = [
{
id: 111,
@@ -213,130 +247,27 @@ export const deploymentData = [
export const annotationsData = [
{
id: 'gid://gitlab/Metrics::Dashboard::Annotation/1',
- starting_at: '2020-04-01T12:51:58.373Z',
- ending_at: null,
+ startingAt: '2020-04-12 12:51:53 UTC',
+ endingAt: null,
panelId: null,
description: 'This is a test annotation',
},
{
id: 'gid://gitlab/Metrics::Dashboard::Annotation/2',
description: 'test annotation 2',
- starting_at: '2020-04-02T12:51:58.373Z',
- ending_at: null,
+ startingAt: '2020-04-13 12:51:53 UTC',
+ endingAt: null,
panelId: null,
},
{
id: 'gid://gitlab/Metrics::Dashboard::Annotation/3',
description: 'test annotation 3',
- starting_at: '2020-04-04T12:51:58.373Z',
- ending_at: null,
+ startingAt: '2020-04-16 12:51:53 UTC',
+ endingAt: null,
panelId: null,
},
];
-export const metricsNewGroupsAPIResponse = [
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Pod average)',
- type: 'area-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 17,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
- appearance: {
- line: {
- width: 2,
- },
- },
- },
- ],
- },
- ],
- },
-];
-
-const metricsResult = [
- {
- metric: {},
- values: [
- [1563272065.589, '10.396484375'],
- [1563272125.589, '10.333984375'],
- [1563272185.589, '10.333984375'],
- [1563272245.589, '10.333984375'],
- [1563272305.589, '10.333984375'],
- [1563272365.589, '10.333984375'],
- [1563272425.589, '10.38671875'],
- [1563272485.589, '10.333984375'],
- [1563272545.589, '10.333984375'],
- [1563272605.589, '10.333984375'],
- [1563272665.589, '10.333984375'],
- [1563272725.589, '10.333984375'],
- [1563272785.589, '10.396484375'],
- [1563272845.589, '10.333984375'],
- [1563272905.589, '10.333984375'],
- [1563272965.589, '10.3984375'],
- [1563273025.589, '10.337890625'],
- [1563273085.589, '10.34765625'],
- [1563273145.589, '10.337890625'],
- [1563273205.589, '10.337890625'],
- [1563273265.589, '10.337890625'],
- [1563273325.589, '10.337890625'],
- [1563273385.589, '10.337890625'],
- [1563273445.589, '10.337890625'],
- [1563273505.589, '10.337890625'],
- [1563273565.589, '10.337890625'],
- [1563273625.589, '10.337890625'],
- [1563273685.589, '10.337890625'],
- [1563273745.589, '10.337890625'],
- [1563273805.589, '10.337890625'],
- [1563273865.589, '10.390625'],
- [1563273925.589, '10.390625'],
- ],
- },
-];
-
-export const mockedEmptyResult = {
- metricId: '1_response_metrics_nginx_ingress_throughput_status_code',
- result: [],
-};
-
-export const mockedEmptyThroughputResult = {
- metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
- result: [],
-};
-
-export const mockedQueryResultPayload = {
- metricId: '12_system_metrics_kubernetes_container_memory_total',
- result: metricsResult,
-};
-
-export const mockedQueryResultPayloadCoresTotal = {
- metricId: '13_system_metrics_kubernetes_container_cores_total',
- result: metricsResult,
-};
-
-export const mockedQueryResultFixture = {
- // First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
- metricId: 'NO_DB_response_metrics_nginx_ingress_throughput_status_code',
- result: metricsResult,
-};
-
-export const mockedQueryResultFixtureStatusCode = {
- metricId: 'NO_DB_response_metrics_nginx_ingress_latency_pod_average',
- result: metricsResult,
-};
-
const extraEnvironmentData = new Array(15).fill(null).map((_, idx) => ({
id: `gid://gitlab/Environments/${150 + idx}`,
name: `no-deployment/noop-branch-${idx}`,
@@ -384,158 +315,6 @@ export const environmentData = [
},
].concat(extraEnvironmentData);
-export const metricsDashboardPayload = {
- dashboard: 'Environment metrics',
- priority: 1,
- panel_groups: [
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Memory Used',
- weight: 4,
- y_axis: {
- format: 'megabytes',
- },
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_total',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1000/1000',
- label: 'Total',
- unit: 'MB',
- metric_id: 12,
- prometheus_endpoint_path: 'http://test',
- },
- ],
- },
- {
- title: 'Core Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Cores',
- weight: 3,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_cores_total',
- query_range:
- 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
- label: 'Total',
- unit: 'cores',
- metric_id: 13,
- },
- ],
- },
- {
- title: 'Memory Usage (Pod average)',
- type: 'line-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 14,
- },
- ],
- },
- {
- title: 'memories',
- type: 'area-chart',
- y_label: 'memories',
- metrics: [
- {
- id: 'metric_of_ages_1000',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 20,
- },
- {
- id: 'metric_of_ages_1001',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 21,
- },
- {
- id: 'metric_of_ages_1002',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 22,
- },
- {
- id: 'metric_of_ages_1003',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 23,
- },
- {
- id: 'metric_of_ages_1004',
- label: 'memory_1004',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 24,
- },
- ],
- },
- ],
- },
- {
- group: 'Response metrics (NGINX Ingress VTS)',
- priority: 10,
- panels: [
- {
- metrics: [
- {
- id: 'response_metrics_nginx_ingress_throughput_status_code',
- label: 'Status Code',
- metric_id: 1,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
- query_range:
- 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)',
- unit: 'req / sec',
- },
- ],
- title: 'Throughput',
- type: 'area-chart',
- weight: 1,
- y_label: 'Requests / Sec',
- },
- ],
- },
- ],
-};
-
-/**
- * Mock of response of metrics_dashboard.json
- */
-export const metricsDashboardResponse = {
- all_dashboards: [],
- dashboard: metricsDashboardPayload,
- metrics_data: {},
- status: 'success',
-};
-
-export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
-
-const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
- default: false,
- display_name: `Custom Dashboard ${idx}`,
- can_edit: true,
- system_dashboard: false,
- project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
- path: `.gitlab/dashboards/dashboard_${idx}.yml`,
-}));
-
export const dashboardGitResponse = [
{
default: true,
@@ -548,11 +327,19 @@ export const dashboardGitResponse = [
...customDashboardsData,
];
-export const mockDashboardsErrorResponse = {
- all_dashboards: customDashboardsData,
- message: "Each 'panel_group' must define an array :panels",
- status: 'error',
-};
+// Metrics mocks
+
+export const metricsResult = [
+ {
+ metric: {},
+ values: [
+ [1563272065.589, '10.396484375'],
+ [1563272125.589, '10.333984375'],
+ [1563272185.589, '10.333984375'],
+ [1563272245.589, '10.333984375'],
+ ],
+ },
+];
export const graphDataPrometheusQuery = {
title: 'Super Chart A2',
@@ -578,29 +365,6 @@ export const graphDataPrometheusQuery = {
],
};
-export const graphDataPrometheusQueryRange = {
- title: 'Super Chart A1',
- type: 'area-chart',
- weight: 2,
- metrics: [
- {
- metricId: '2_metric_a',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
- unit: 'MB',
- label: 'Total Consumption',
- prometheus_endpoint_path:
- '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024',
- result: [
- {
- metric: {},
- values: [[1495700554.925, '8.0390625'], [1495700614.925, '8.0390625']],
- },
- ],
- },
- ],
-};
-
export const graphDataPrometheusQueryRangeMultiTrack = {
title: 'Super Chart A3',
type: 'heatmap',
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index c34a5afceb0..f312aa1fd34 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -23,7 +23,11 @@ import {
setGettingStartedEmptyState,
duplicateSystemDashboard,
} from '~/monitoring/stores/actions';
-import { gqClient, parseEnvironmentsResponse } from '~/monitoring/stores/utils';
+import {
+ gqClient,
+ parseEnvironmentsResponse,
+ parseAnnotationsResponse,
+} from '~/monitoring/stores/utils';
import getEnvironments from '~/monitoring/queries/getEnvironments.query.graphql';
import getAnnotations from '~/monitoring/queries/getAnnotations.query.graphql';
import storeState from '~/monitoring/stores/state';
@@ -31,11 +35,14 @@ import {
deploymentData,
environmentData,
annotationsData,
- metricsDashboardResponse,
- metricsDashboardViewModel,
dashboardGitResponse,
mockDashboardsErrorResponse,
} from '../mock_data';
+import {
+ metricsDashboardResponse,
+ metricsDashboardViewModel,
+ metricsDashboardPanelCount,
+} from '../fixture_data';
jest.mock('~/flash');
@@ -221,6 +228,10 @@ describe('Monitoring store actions', () => {
describe('fetchAnnotations', () => {
const { state } = store;
+ state.timeRange = {
+ start: '2020-04-15T12:54:32.137Z',
+ end: '2020-08-15T12:54:32.137Z',
+ };
state.projectPath = 'gitlab-org/gitlab-test';
state.currentEnvironmentName = 'production';
state.currentDashboard = '.gitlab/dashboards/custom_dashboard.yml';
@@ -236,17 +247,25 @@ describe('Monitoring store actions', () => {
variables: {
projectPath: state.projectPath,
environmentName: state.currentEnvironmentName,
- dashboardId: state.currentDashboard,
+ dashboardPath: state.currentDashboard,
+ startingFrom: state.timeRange.start,
},
};
+ const parsedResponse = parseAnnotationsResponse(annotationsData);
mockMutate.mockResolvedValue({
data: {
project: {
- environment: {
- metricDashboard: {
- annotations: annotationsData,
- },
+ environments: {
+ nodes: [
+ {
+ metricsDashboard: {
+ annotations: {
+ nodes: parsedResponse,
+ },
+ },
+ },
+ ],
},
},
},
@@ -257,10 +276,7 @@ describe('Monitoring store actions', () => {
null,
state,
[],
- [
- { type: 'requestAnnotations' },
- { type: 'receiveAnnotationsSuccess', payload: annotationsData },
- ],
+ [{ type: 'receiveAnnotationsSuccess', payload: parsedResponse }],
() => {
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
},
@@ -274,7 +290,8 @@ describe('Monitoring store actions', () => {
variables: {
projectPath: state.projectPath,
environmentName: state.currentEnvironmentName,
- dashboardId: state.currentDashboard,
+ dashboardPath: state.currentDashboard,
+ startingFrom: state.timeRange.start,
},
};
@@ -285,7 +302,7 @@ describe('Monitoring store actions', () => {
null,
state,
[],
- [{ type: 'requestAnnotations' }, { type: 'receiveAnnotationsFailure' }],
+ [{ type: 'receiveAnnotationsFailure' }],
() => {
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
},
@@ -553,7 +570,7 @@ describe('Monitoring store actions', () => {
fetchDashboardData({ state, commit, dispatch })
.then(() => {
- expect(dispatch).toHaveBeenCalledTimes(10); // one per metric plus 1 for deployments
+ expect(dispatch).toHaveBeenCalledTimes(metricsDashboardPanelCount + 1); // plus 1 for deployments
expect(dispatch).toHaveBeenCalledWith('fetchDeploymentsData');
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
metric,
@@ -581,11 +598,13 @@ describe('Monitoring store actions', () => {
let metric;
let state;
let data;
+ let prometheusEndpointPath;
beforeEach(() => {
state = storeState();
- [metric] = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics;
- metric = convertObjectPropsToCamelCase(metric, { deep: true });
+ [metric] = metricsDashboardViewModel.panelGroups[0].panels[0].metrics;
+
+ prometheusEndpointPath = metric.prometheusEndpointPath;
data = {
metricId: metric.metricId,
@@ -594,7 +613,7 @@ describe('Monitoring store actions', () => {
});
it('commits result', done => {
- mock.onGet('http://test').reply(200, { data }); // One attempt
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
fetchPrometheusMetric,
@@ -631,7 +650,7 @@ describe('Monitoring store actions', () => {
};
it('uses calculated step', done => {
- mock.onGet('http://test').reply(200, { data }); // One attempt
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
fetchPrometheusMetric,
@@ -673,7 +692,7 @@ describe('Monitoring store actions', () => {
};
it('uses metric step', done => {
- mock.onGet('http://test').reply(200, { data }); // One attempt
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
fetchPrometheusMetric,
@@ -705,10 +724,10 @@ describe('Monitoring store actions', () => {
it('commits result, when waiting for results', done => {
// Mock multiple attempts while the cache is filling up
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').reply(200, { data }); // 4th attempt
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // 4th attempt
testAction(
fetchPrometheusMetric,
@@ -739,10 +758,10 @@ describe('Monitoring store actions', () => {
it('commits failure, when waiting for results and getting a server error', done => {
// Mock multiple attempts while the cache is filling up and fails
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').reply(500); // 4th attempt
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).reply(500); // 4th attempt
const error = new Error('Request failed with status code 500');
diff --git a/spec/frontend/monitoring/store/getters_spec.js b/spec/frontend/monitoring/store/getters_spec.js
index 40341d32cf5..f040876b832 100644
--- a/spec/frontend/monitoring/store/getters_spec.js
+++ b/spec/frontend/monitoring/store/getters_spec.js
@@ -3,18 +3,13 @@ import * as getters from '~/monitoring/stores/getters';
import mutations from '~/monitoring/stores/mutations';
import * as types from '~/monitoring/stores/mutation_types';
import { metricStates } from '~/monitoring/constants';
+import { environmentData, metricsResult } from '../mock_data';
import {
- environmentData,
- mockedEmptyThroughputResult,
- mockedQueryResultFixture,
- mockedQueryResultFixtureStatusCode,
-} from '../mock_data';
-import { getJSONFixture } from '../../helpers/fixtures';
-
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
+ metricsDashboardPayload,
+ metricResultStatus,
+ metricResultPods,
+ metricResultEmpty,
+} from '../fixture_data';
describe('Monitoring store Getters', () => {
describe('getMetricStates', () => {
@@ -22,6 +17,21 @@ describe('Monitoring store Getters', () => {
let state;
let getMetricStates;
+ const setMetricSuccess = ({ result = metricsResult, group = 0, panel = 0, metric = 0 }) => {
+ const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, {
+ metricId,
+ result,
+ });
+ };
+
+ const setMetricFailure = ({ group = 0, panel = 0, metric = 0 }) => {
+ const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
+ mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
+ metricId,
+ });
+ };
+
beforeEach(() => {
setupState = (initState = {}) => {
state = initState;
@@ -61,31 +71,30 @@ describe('Monitoring store Getters', () => {
it('on an empty metric with no result, returns NO_DATA', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
+ setMetricSuccess({ result: [], group: 2 });
expect(getMetricStates()).toEqual([metricStates.NO_DATA]);
});
it('on a metric with a result, returns OK', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ setMetricSuccess({ group: 1 });
expect(getMetricStates()).toEqual([metricStates.OK]);
});
it('on a metric with an error, returns an error', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[0].panels[0].metrics[0].metricId,
- });
+ setMetricFailure({});
expect(getMetricStates()).toEqual([metricStates.UNKNOWN_ERROR]);
});
it('on multiple metrics with results, returns OK', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
+
+ setMetricSuccess({ group: 1 });
+ setMetricSuccess({ group: 1, panel: 1 });
expect(getMetricStates()).toEqual([metricStates.OK]);
@@ -96,15 +105,8 @@ describe('Monitoring store Getters', () => {
it('on multiple metrics errors', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[0].panels[0].metrics[0].metricId,
- });
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[0].panels[0].metrics[0].metricId,
- });
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[1].panels[0].metrics[0].metricId,
- });
+ setMetricFailure({});
+ setMetricFailure({ group: 1 });
// Entire dashboard fails
expect(getMetricStates()).toEqual([metricStates.UNKNOWN_ERROR]);
@@ -116,14 +118,11 @@ describe('Monitoring store Getters', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
// An success in 1 group
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ setMetricSuccess({ group: 1 });
+
// An error in 2 groups
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[1].panels[1].metrics[0].metricId,
- });
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[2].panels[0].metrics[0].metricId,
- });
+ setMetricFailure({ group: 1, panel: 1 });
+ setMetricFailure({ group: 2, panel: 0 });
expect(getMetricStates()).toEqual([metricStates.OK, metricStates.UNKNOWN_ERROR]);
expect(getMetricStates(groups[1].key)).toEqual([
@@ -182,38 +181,35 @@ describe('Monitoring store Getters', () => {
it('an empty metric, returns empty', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultEmpty);
expect(metricsWithData()).toEqual([]);
});
it('a metric with results, it returns a metric', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
- expect(metricsWithData()).toEqual([mockedQueryResultFixture.metricId]);
+ expect(metricsWithData()).toEqual([metricResultStatus.metricId]);
});
it('multiple metrics with results, it return multiple metrics', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultPods);
- expect(metricsWithData()).toEqual([
- mockedQueryResultFixture.metricId,
- mockedQueryResultFixtureStatusCode.metricId,
- ]);
+ expect(metricsWithData()).toEqual([metricResultStatus.metricId, metricResultPods.metricId]);
});
it('multiple metrics with results, it returns metrics filtered by group', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultPods);
// First group has metrics
expect(metricsWithData(state.dashboard.panelGroups[1].key)).toEqual([
- mockedQueryResultFixture.metricId,
- mockedQueryResultFixtureStatusCode.metricId,
+ metricResultStatus.metricId,
+ metricResultPods.metricId,
]);
// Second group has no metrics
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 34d224e13b0..1452e9bc491 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -6,12 +6,7 @@ import state from '~/monitoring/stores/state';
import { metricStates } from '~/monitoring/constants';
import { deploymentData, dashboardGitResponse } from '../mock_data';
-import { getJSONFixture } from '../../helpers/fixtures';
-
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
+import { metricsDashboardPayload } from '../fixture_data';
describe('Monitoring mutations', () => {
let stateCopy;
diff --git a/spec/frontend/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index f46409e8e32..7ee2a16b4bd 100644
--- a/spec/frontend/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
@@ -2,9 +2,11 @@ import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
import {
uniqMetricsId,
parseEnvironmentsResponse,
+ parseAnnotationsResponse,
removeLeadingSlash,
mapToDashboardViewModel,
} from '~/monitoring/stores/utils';
+import { annotationsData } from '../mock_data';
import { NOT_IN_DB_PREFIX } from '~/monitoring/constants';
const projectPath = 'gitlab-org/gitlab-test';
@@ -56,7 +58,7 @@ describe('mapToDashboardViewModel', () => {
y_label: 'Y Label A',
yAxis: {
name: 'Y Label A',
- format: 'number',
+ format: 'engineering',
precision: 2,
},
metrics: [],
@@ -138,7 +140,7 @@ describe('mapToDashboardViewModel', () => {
y_label: '',
yAxis: {
name: '',
- format: SUPPORTED_FORMATS.number,
+ format: SUPPORTED_FORMATS.engineering,
precision: 2,
},
metrics: [],
@@ -159,7 +161,7 @@ describe('mapToDashboardViewModel', () => {
},
yAxis: {
name: '',
- format: SUPPORTED_FORMATS.number,
+ format: SUPPORTED_FORMATS.engineering,
precision: 2,
},
metrics: [],
@@ -219,7 +221,7 @@ describe('mapToDashboardViewModel', () => {
},
});
- expect(getMappedPanel().yAxis.format).toBe(SUPPORTED_FORMATS.number);
+ expect(getMappedPanel().yAxis.format).toBe(SUPPORTED_FORMATS.engineering);
});
// This property allows single_stat panels to render percentile values
@@ -376,6 +378,27 @@ describe('parseEnvironmentsResponse', () => {
});
});
+describe('parseAnnotationsResponse', () => {
+ const parsedAnnotationResponse = [
+ {
+ description: 'This is a test annotation',
+ endingAt: null,
+ id: 'gid://gitlab/Metrics::Dashboard::Annotation/1',
+ panelId: null,
+ startingAt: new Date('2020-04-12T12:51:53.000Z'),
+ },
+ ];
+ it.each`
+ case | input | expected
+ ${'Returns empty array for null input'} | ${null} | ${[]}
+ ${'Returns empty array for undefined input'} | ${undefined} | ${[]}
+ ${'Returns empty array for empty input'} | ${[]} | ${[]}
+ ${'Returns parsed responses for annotations data'} | ${[annotationsData[0]]} | ${parsedAnnotationResponse}
+ `('$case', ({ input, expected }) => {
+ expect(parseAnnotationsResponse(input)).toEqual(expected);
+ });
+});
+
describe('removeLeadingSlash', () => {
[
{ input: null, output: '' },
diff --git a/spec/frontend/monitoring/store_utils.js b/spec/frontend/monitoring/store_utils.js
new file mode 100644
index 00000000000..d764a79ccc3
--- /dev/null
+++ b/spec/frontend/monitoring/store_utils.js
@@ -0,0 +1,34 @@
+import * as types from '~/monitoring/stores/mutation_types';
+import { metricsResult, environmentData } from './mock_data';
+import { metricsDashboardPayload } from './fixture_data';
+
+export const setMetricResult = ({ $store, result, group = 0, panel = 0, metric = 0 }) => {
+ const { dashboard } = $store.state.monitoringDashboard;
+ const { metricId } = dashboard.panelGroups[group].panels[panel].metrics[metric];
+
+ $store.commit(`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`, {
+ metricId,
+ result,
+ });
+};
+
+const setEnvironmentData = $store => {
+ $store.commit(`monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`, environmentData);
+};
+
+export const setupStoreWithDashboard = $store => {
+ $store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
+ metricsDashboardPayload,
+ );
+};
+
+export const setupStoreWithData = $store => {
+ setupStoreWithDashboard($store);
+
+ setMetricResult({ $store, result: [], panel: 0 });
+ setMetricResult({ $store, result: metricsResult, panel: 1 });
+ setMetricResult({ $store, result: metricsResult, panel: 2 });
+
+ setEnvironmentData($store);
+};
diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js
index 262b8b985cc..0bb1b987b2e 100644
--- a/spec/frontend/monitoring/utils_spec.js
+++ b/spec/frontend/monitoring/utils_spec.js
@@ -1,17 +1,17 @@
import * as monitoringUtils from '~/monitoring/utils';
import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import {
- mockHost,
mockProjectDir,
graphDataPrometheusQuery,
- graphDataPrometheusQueryRange,
anomalyMockGraphData,
barMockData,
} from './mock_data';
+import { graphData } from './fixture_data';
jest.mock('~/lib/utils/url_utility');
-const mockPath = `${mockHost}${mockProjectDir}/-/environments/29/metrics`;
+const mockPath = `${TEST_HOST}${mockProjectDir}/-/environments/29/metrics`;
const generatedLink = 'http://chart.link.com';
@@ -101,10 +101,7 @@ describe('monitoring/utils', () => {
* the validator will look for the `values` key instead of `value`
*/
it('validates data with the query_range format', () => {
- const validGraphData = monitoringUtils.graphDataValidatorForValues(
- false,
- graphDataPrometheusQueryRange,
- );
+ const validGraphData = monitoringUtils.graphDataValidatorForValues(false, graphData);
expect(validGraphData).toBe(true);
});