summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-04-16 06:09:39 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-04-16 06:09:39 +0000
commit1ab35c9208287638a097abc895738fbc0c0860e6 (patch)
tree4ab489a19f051aa1278030b7efa7befeb60ce194
parent7a5662b39bf5d80937dcc5a84a76ce15ff88cce7 (diff)
downloadgitlab-ce-1ab35c9208287638a097abc895738fbc0c0860e6.tar.gz
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/assets/javascripts/monitoring/stores/actions.js7
-rw-r--r--app/assets/javascripts/monitoring/stores/mutation_types.js1
-rw-r--r--app/models/ci/bridge.rb1
-rw-r--r--app/models/ci/build.rb9
-rw-r--r--app/models/ci/processable.rb6
-rw-r--r--app/models/concerns/ci/has_ref.rb2
-rw-r--r--app/models/concerns/ci/pipeline_delegator.rb20
-rw-r--r--changelogs/unreleased/update-ci-build-merge-request.yml5
-rw-r--r--danger/karma/Dangerfile1
-rw-r--r--doc/api/graphql/reference/gitlab_schema.graphql5
-rw-r--r--doc/api/graphql/reference/gitlab_schema.json14
-rw-r--r--doc/api/graphql/reference/index.md1
-rw-r--r--doc/ci/docker/using_docker_build.md24
-rw-r--r--doc/user/group/saml_sso/index.md2
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js22
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js58
-rw-r--r--spec/frontend/monitoring/components/dashboard_template_spec.js2
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js3
-rw-r--r--spec/frontend/monitoring/fixture_data.js25
-rw-r--r--spec/frontend/monitoring/init_utils.js57
-rw-r--r--spec/frontend/monitoring/mock_data.js309
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js44
-rw-r--r--spec/frontend/monitoring/store/getters_spec.js90
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js7
-rw-r--r--spec/frontend/monitoring/store_utils.js34
-rw-r--r--spec/frontend/monitoring/utils_spec.js4
-rw-r--r--spec/frontend/sidebar/sidebar_assignees_spec.js74
-rw-r--r--spec/javascripts/monitoring/components/dashboard_resize_spec.js61
-rw-r--r--spec/javascripts/monitoring/fixture_data.js1
-rw-r--r--spec/javascripts/monitoring/store_utils.js1
-rw-r--r--spec/javascripts/sidebar/sidebar_assignees_spec.js64
-rw-r--r--spec/models/ci/bridge_spec.rb2
-rw-r--r--spec/models/ci/build_spec.rb91
-rw-r--r--spec/models/ci/processable_spec.rb78
34 files changed, 488 insertions, 637 deletions
diff --git a/app/assets/javascripts/monitoring/stores/actions.js b/app/assets/javascripts/monitoring/stores/actions.js
index 5b2bd1f1493..3201f1d4584 100644
--- a/app/assets/javascripts/monitoring/stores/actions.js
+++ b/app/assets/javascripts/monitoring/stores/actions.js
@@ -90,7 +90,7 @@ export const fetchData = ({ dispatch }) => {
* ready after the BE piece is implemented.
* https://gitlab.com/gitlab-org/gitlab/-/issues/211330
*/
- if (isFeatureFlagEnabled('metrics_dashboard_annotations')) {
+ if (isFeatureFlagEnabled('metricsDashboardAnnotations')) {
dispatch('fetchAnnotations');
}
};
@@ -283,8 +283,6 @@ export const receiveEnvironmentsDataFailure = ({ commit }) => {
};
export const fetchAnnotations = ({ state, dispatch }) => {
- dispatch('requestAnnotations');
-
return gqClient
.mutate({
mutation: getAnnotations,
@@ -309,9 +307,6 @@ export const fetchAnnotations = ({ state, dispatch }) => {
});
};
-// While this commit does not update the state it will
-// eventually be useful to show a loading state
-export const requestAnnotations = ({ commit }) => commit(types.REQUEST_ANNOTATIONS);
export const receiveAnnotationsSuccess = ({ commit }, data) =>
commit(types.RECEIVE_ANNOTATIONS_SUCCESS, data);
export const receiveAnnotationsFailure = ({ commit }) => commit(types.RECEIVE_ANNOTATIONS_FAILURE);
diff --git a/app/assets/javascripts/monitoring/stores/mutation_types.js b/app/assets/javascripts/monitoring/stores/mutation_types.js
index 2f9955da1b1..27a9a67edaa 100644
--- a/app/assets/javascripts/monitoring/stores/mutation_types.js
+++ b/app/assets/javascripts/monitoring/stores/mutation_types.js
@@ -4,7 +4,6 @@ export const RECEIVE_METRICS_DASHBOARD_SUCCESS = 'RECEIVE_METRICS_DASHBOARD_SUCC
export const RECEIVE_METRICS_DASHBOARD_FAILURE = 'RECEIVE_METRICS_DASHBOARD_FAILURE';
// Annotations
-export const REQUEST_ANNOTATIONS = 'REQUEST_ANNOTATIONS';
export const RECEIVE_ANNOTATIONS_SUCCESS = 'RECEIVE_ANNOTATIONS_SUCCESS';
export const RECEIVE_ANNOTATIONS_FAILURE = 'RECEIVE_ANNOTATIONS_FAILURE';
diff --git a/app/models/ci/bridge.rb b/app/models/ci/bridge.rb
index fa0619f35b0..76882dfcb0d 100644
--- a/app/models/ci/bridge.rb
+++ b/app/models/ci/bridge.rb
@@ -3,7 +3,6 @@
module Ci
class Bridge < Ci::Processable
include Ci::Contextable
- include Ci::PipelineDelegator
include Ci::Metadatable
include Importable
include AfterCommitQueue
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 74a329dccf4..cbd7945a8b5 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -4,7 +4,6 @@ module Ci
class Build < Ci::Processable
include Ci::Metadatable
include Ci::Contextable
- include Ci::PipelineDelegator
include TokenAuthenticatable
include AfterCommitQueue
include ObjectStorage::BackgroundMove
@@ -591,13 +590,7 @@ module Ci
def merge_request
strong_memoize(:merge_request) do
- merge_requests = MergeRequest.includes(:latest_merge_request_diff)
- .where(source_branch: ref, source_project: pipeline.project)
- .reorder(iid: :desc)
-
- merge_requests.find do |merge_request|
- merge_request.commit_shas.include?(pipeline.sha)
- end
+ pipeline.all_merge_requests.order(iid: :asc).first
end
end
diff --git a/app/models/ci/processable.rb b/app/models/ci/processable.rb
index 4bc8f26ec92..c123bd7c33b 100644
--- a/app/models/ci/processable.rb
+++ b/app/models/ci/processable.rb
@@ -51,6 +51,12 @@ module Ci
validates :type, presence: true
validates :scheduling_type, presence: true, on: :create, if: :validate_scheduling_type?
+ delegate :merge_request?,
+ :merge_request_ref?,
+ :legacy_detached_merge_request_pipeline?,
+ :merge_train_pipeline?,
+ to: :pipeline
+
def aggregated_needs_names
read_attribute(:aggregated_needs_names)
end
diff --git a/app/models/concerns/ci/has_ref.rb b/app/models/concerns/ci/has_ref.rb
index cf57ff47743..e2d459ea70e 100644
--- a/app/models/concerns/ci/has_ref.rb
+++ b/app/models/concerns/ci/has_ref.rb
@@ -2,7 +2,7 @@
##
# We will disable `ref` and `sha` attributes in `Ci::Build` in the future
-# and remove this module in favor of Ci::PipelineDelegator.
+# and remove this module in favor of Ci::Processable.
module Ci
module HasRef
extend ActiveSupport::Concern
diff --git a/app/models/concerns/ci/pipeline_delegator.rb b/app/models/concerns/ci/pipeline_delegator.rb
deleted file mode 100644
index 68ad0fcee31..00000000000
--- a/app/models/concerns/ci/pipeline_delegator.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-##
-# This module is mainly used by child associations of `Ci::Pipeline` that needs to look up
-# single source of truth. For example, `Ci::Build` has `git_ref` method, which behaves
-# slightly different from `Ci::Pipeline`'s `git_ref`. This is very confusing as
-# the system could behave differently time to time.
-# We should have a single interface in `Ci::Pipeline` and access the method always.
-module Ci
- module PipelineDelegator
- extend ActiveSupport::Concern
-
- included do
- delegate :merge_request?,
- :merge_request_ref?,
- :legacy_detached_merge_request_pipeline?,
- :merge_train_pipeline?, to: :pipeline
- end
- end
-end
diff --git a/changelogs/unreleased/update-ci-build-merge-request.yml b/changelogs/unreleased/update-ci-build-merge-request.yml
new file mode 100644
index 00000000000..13cc098af4e
--- /dev/null
+++ b/changelogs/unreleased/update-ci-build-merge-request.yml
@@ -0,0 +1,5 @@
+---
+title: Use Ci::Pipeline#all_merge_requests.first as Ci::Build#merge_request
+merge_request: 27968
+author:
+type: fixed
diff --git a/danger/karma/Dangerfile b/danger/karma/Dangerfile
index 6d692a89e13..cededff5f15 100644
--- a/danger/karma/Dangerfile
+++ b/danger/karma/Dangerfile
@@ -4,6 +4,7 @@
def get_karma_files(files)
files.select do |file|
file.start_with?('ee/spec/javascripts', 'spec/javascripts') &&
+ file.end_with?('_spec.js') &&
!file.end_with?('browser_spec.js')
end
end
diff --git a/doc/api/graphql/reference/gitlab_schema.graphql b/doc/api/graphql/reference/gitlab_schema.graphql
index 91d6717de7e..2e2b1d316c3 100644
--- a/doc/api/graphql/reference/gitlab_schema.graphql
+++ b/doc/api/graphql/reference/gitlab_schema.graphql
@@ -9561,6 +9561,11 @@ type Vulnerability {
location: JSON
"""
+ The project on which the vulnerability was found
+ """
+ project: Project
+
+ """
Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST)
"""
reportType: VulnerabilityReportType
diff --git a/doc/api/graphql/reference/gitlab_schema.json b/doc/api/graphql/reference/gitlab_schema.json
index fb642b1222f..f32cee266fe 100644
--- a/doc/api/graphql/reference/gitlab_schema.json
+++ b/doc/api/graphql/reference/gitlab_schema.json
@@ -28864,6 +28864,20 @@
"deprecationReason": null
},
{
+ "name": "project",
+ "description": "The project on which the vulnerability was found",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Project",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "reportType",
"description": "Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST)",
"args": [
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index d1ea825bef3..3f4fc735c96 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -1502,6 +1502,7 @@ Represents a vulnerability.
| `description` | String | Description of the vulnerability |
| `id` | ID! | GraphQL ID of the vulnerability |
| `location` | JSON | The JSON location metadata for the vulnerability. Its format depends on the type of the security scan that found the vulnerability |
+| `project` | Project | The project on which the vulnerability was found |
| `reportType` | VulnerabilityReportType | Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST) |
| `severity` | VulnerabilitySeverity | Severity of the vulnerability (INFO, UNKNOWN, LOW, MEDIUM, HIGH, CRITICAL) |
| `state` | VulnerabilityState | State of the vulnerability (DETECTED, DISMISSED, RESOLVED, CONFIRMED) |
diff --git a/doc/ci/docker/using_docker_build.md b/doc/ci/docker/using_docker_build.md
index 0ae7a10a290..427f61deb29 100644
--- a/doc/ci/docker/using_docker_build.md
+++ b/doc/ci/docker/using_docker_build.md
@@ -118,7 +118,7 @@ not without its own challenges:
instance of Docker engine so they won't conflict with each other. But this
also means that jobs can be slower because there's no caching of layers.
- By default, Docker 17.09 and higher uses `--storage-driver overlay2` which is
- the recommended storage driver. See [Using the overlayfs driver](#using-the-overlayfs-driver)
+ the recommended storage driver. See [Using the overlayfs driver](#use-the-overlayfs-driver)
for details.
- Since the `docker:19.03.8-dind` container and the Runner container don't share their
root filesystem, the job's working directory can be used as a mount point for
@@ -448,7 +448,7 @@ The steps in the `script` section for the `build` stage can be summed up to:
1. The last two commands push the tagged Docker images to the container registry
so that they may also be used as cache for subsequent builds.
-## Using the OverlayFS driver
+## Use the OverlayFS driver
NOTE: **Note:**
The shared Runners on GitLab.com use the `overlay2` driver by default.
@@ -480,18 +480,22 @@ which can be avoided if a different driver is used, for example `overlay2`.
overlay
```
-### Use driver per project
+### Use the OverlayFS driver per project
-You can enable the driver for each project individually by editing the project's `.gitlab-ci.yml`:
+You can enable the driver for each project individually by using the `DOCKER_DRIVER`
+environment [variable](../yaml/README.md#variables) in `.gitlab-ci.yml`:
```yaml
variables:
DOCKER_DRIVER: overlay2
```
-### Use driver for every project
+### Use the OverlayFS driver for every project
-To enable the driver for every project, you can set the environment variable for every build by adding `environment` in the `[[runners]]` section of `config.toml`:
+If you use your own [GitLab Runners](https://docs.gitlab.com/runner/), you
+can enable the driver for every project by setting the `DOCKER_DRIVER`
+environment variable in the
+[`[[runners]]` section of `config.toml`](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runners-section):
```toml
environment = ["DOCKER_DRIVER=overlay2"]
@@ -499,11 +503,9 @@ environment = ["DOCKER_DRIVER=overlay2"]
If you're running multiple Runners you will have to modify all configuration files.
-> **Notes:**
->
-> - More information about the Runner configuration is available in the [Runner documentation](https://docs.gitlab.com/runner/configuration/).
-> - For more information about using OverlayFS with Docker, you can read
-> [Use the OverlayFS storage driver](https://docs.docker.com/engine/userguide/storagedriver/overlayfs-driver/).
+NOTE: **Note:**
+Read more about the [Runner configuration](https://docs.gitlab.com/runner/configuration/)
+and [using the OverlayFS storage driver](https://docs.docker.com/engine/userguide/storagedriver/overlayfs-driver/).
## Using the GitLab Container Registry
diff --git a/doc/user/group/saml_sso/index.md b/doc/user/group/saml_sso/index.md
index e78a894d987..f49dd225146 100644
--- a/doc/user/group/saml_sso/index.md
+++ b/doc/user/group/saml_sso/index.md
@@ -242,7 +242,7 @@ For a demo of the Azure SAML setup including SCIM, see [SCIM Provisioning on Azu
|--------------|----------------|
| Identifier | Identifier (Entity ID) |
| Assertion consumer service URL | Reply URL (Assertion Consumer Service URL) |
-| Identity provider single sign on URL | Login URL |
+| Identity provider single sign on URL | Sign on URL |
| Certificate fingerprint | Thumbprint |
We recommend:
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 870e47edde0..53463aa3358 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -1,6 +1,7 @@
import { mount } from '@vue/test-utils';
import { setTestTimeout } from 'helpers/timeout';
import { GlLink } from '@gitlab/ui';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import {
GlAreaChart,
GlLineChart,
@@ -12,23 +13,16 @@ import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
import { createStore } from '~/monitoring/stores';
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
import * as types from '~/monitoring/stores/mutation_types';
+import { deploymentData, mockProjectDir } from '../../mock_data';
import {
- deploymentData,
- mockedQueryResultFixture,
+ metricsDashboardPayload,
metricsDashboardViewModel,
- mockProjectDir,
- mockHost,
-} from '../../mock_data';
+ metricResultStatus,
+} from '../../fixture_data';
import * as iconUtils from '~/lib/utils/icon_utils';
-import { getJSONFixture } from '../../../helpers/fixtures';
const mockSvgPathContent = 'mockSvgPathContent';
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
-
jest.mock('lodash/throttle', () =>
// this throttle mock executes immediately
jest.fn(func => {
@@ -51,7 +45,7 @@ describe('Time series component', () => {
graphData: { ...graphData, type },
deploymentData: store.state.monitoringDashboard.deploymentData,
annotations: store.state.monitoringDashboard.annotations,
- projectPath: `${mockHost}${mockProjectDir}`,
+ projectPath: `${TEST_HOST}${mockProjectDir}`,
},
store,
stubs: {
@@ -74,7 +68,7 @@ describe('Time series component', () => {
store.commit(
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultFixture,
+ metricResultStatus,
);
// dashboard is a dynamically generated fixture and stored at environment_metrics_dashboard.json
[mockGraphData] = store.state.monitoringDashboard.dashboard.panelGroups[1].panels;
@@ -606,7 +600,7 @@ describe('Time series component', () => {
store = createStore();
const graphData = cloneDeep(metricsDashboardViewModel.panelGroups[0].panels[3]);
graphData.metrics.forEach(metric =>
- Object.assign(metric, { result: mockedQueryResultFixture.result }),
+ Object.assign(metric, { result: metricResultStatus.result }),
);
timeSeriesChart = makeTimeSeriesChart(graphData, 'area-chart');
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index f0b510a01f4..92e0320f516 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount, createLocalVue, mount } from '@vue/test-utils';
+import { shallowMount, mount } from '@vue/test-utils';
import { GlDropdownItem, GlDeprecatedButton } from '@gitlab/ui';
import VueDraggable from 'vuedraggable';
import MockAdapter from 'axios-mock-adapter';
@@ -6,7 +6,6 @@ import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import { metricStates } from '~/monitoring/constants';
import Dashboard from '~/monitoring/components/dashboard.vue';
-import { getJSONFixture } from '../../../../spec/frontend/helpers/fixtures';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
@@ -14,21 +13,9 @@ import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
import { createStore } from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
-import { setupComponentStore, propsData } from '../init_utils';
-import {
- metricsDashboardViewModel,
- environmentData,
- dashboardGitResponse,
- mockedQueryResultFixture,
-} from '../mock_data';
-
-const localVue = createLocalVue();
-const expectedPanelCount = 4;
-
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
+import { setupStoreWithDashboard, setMetricResult, setupStoreWithData } from '../store_utils';
+import { environmentData, dashboardGitResponse, propsData } from '../mock_data';
+import { metricsDashboardViewModel, metricsDashboardPanelCount } from '../fixture_data';
describe('Dashboard', () => {
let store;
@@ -43,7 +30,6 @@ describe('Dashboard', () => {
const createShallowWrapper = (props = {}, options = {}) => {
wrapper = shallowMount(Dashboard, {
- localVue,
propsData: { ...propsData, ...props },
methods: {
fetchData: jest.fn(),
@@ -55,7 +41,6 @@ describe('Dashboard', () => {
const createMountedWrapper = (props = {}, options = {}) => {
wrapper = mount(Dashboard, {
- localVue,
propsData: { ...propsData, ...props },
methods: {
fetchData: jest.fn(),
@@ -144,7 +129,7 @@ describe('Dashboard', () => {
{ stubs: ['graph-group', 'panel-type'] },
);
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.showEmptyState).toEqual(false);
@@ -172,7 +157,7 @@ describe('Dashboard', () => {
beforeEach(() => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
@@ -201,14 +186,7 @@ describe('Dashboard', () => {
it('hides the environments dropdown list when there is no environments', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
- metricsDashboardPayload,
- );
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultFixture,
- );
+ setupStoreWithDashboard(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
expect(findAllEnvironmentsDropdownItems()).toHaveLength(0);
@@ -218,7 +196,7 @@ describe('Dashboard', () => {
it('renders the datetimepicker dropdown', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DateTimePicker).exists()).toBe(true);
@@ -228,7 +206,7 @@ describe('Dashboard', () => {
it('renders the refresh dashboard button', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick().then(() => {
const refreshBtn = wrapper.findAll({ ref: 'refreshDashboardBtn' });
@@ -241,7 +219,11 @@ describe('Dashboard', () => {
describe('when one of the metrics is missing', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
- setupComponentStore(wrapper);
+
+ const { $store } = wrapper.vm;
+
+ setupStoreWithDashboard($store);
+ setMetricResult({ $store, result: [], panel: 2 });
return wrapper.vm.$nextTick();
});
@@ -273,7 +255,7 @@ describe('Dashboard', () => {
},
);
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
@@ -348,14 +330,14 @@ describe('Dashboard', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
it('wraps vuedraggable', () => {
expect(findDraggablePanels().exists()).toBe(true);
- expect(findDraggablePanels().length).toEqual(expectedPanelCount);
+ expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
});
it('is disabled by default', () => {
@@ -411,11 +393,11 @@ describe('Dashboard', () => {
it('shows a remove button, which removes a panel', () => {
expect(findFirstDraggableRemoveButton().isEmpty()).toBe(false);
- expect(findDraggablePanels().length).toEqual(expectedPanelCount);
+ expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount);
findFirstDraggableRemoveButton().trigger('click');
return wrapper.vm.$nextTick(() => {
- expect(findDraggablePanels().length).toEqual(expectedPanelCount - 1);
+ expect(findDraggablePanels().length).toEqual(metricsDashboardPanelCount - 1);
});
});
@@ -534,7 +516,7 @@ describe('Dashboard', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true, currentDashboard });
- setupComponentStore(wrapper);
+ setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
diff --git a/spec/frontend/monitoring/components/dashboard_template_spec.js b/spec/frontend/monitoring/components/dashboard_template_spec.js
index 38523ab82bc..d1790df4189 100644
--- a/spec/frontend/monitoring/components/dashboard_template_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_template_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores';
-import { propsData } from '../init_utils';
+import { propsData } from '../mock_data';
jest.mock('~/lib/utils/url_utility');
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index ebfa09874fa..65e9d036d1a 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -9,12 +9,11 @@ import {
updateHistory,
} from '~/lib/utils/url_utility';
import axios from '~/lib/utils/axios_utils';
-import { mockProjectDir } from '../mock_data';
+import { mockProjectDir, propsData } from '../mock_data';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores';
import { defaultTimeRange } from '~/vue_shared/constants';
-import { propsData } from '../init_utils';
jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility');
diff --git a/spec/frontend/monitoring/fixture_data.js b/spec/frontend/monitoring/fixture_data.js
new file mode 100644
index 00000000000..76045baa632
--- /dev/null
+++ b/spec/frontend/monitoring/fixture_data.js
@@ -0,0 +1,25 @@
+import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
+import { metricsResult } from './mock_data';
+
+// Use globally available `getJSONFixture` so this file can be imported by both karma and jest specs
+export const metricsDashboardResponse = getJSONFixture(
+ 'metrics_dashboard/environment_metrics_dashboard.json',
+);
+export const metricsDashboardPayload = metricsDashboardResponse.dashboard;
+export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
+
+export const metricsDashboardPanelCount = 22;
+export const metricResultStatus = {
+ // First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
+ metricId: 'NO_DB_response_metrics_nginx_ingress_throughput_status_code',
+ result: metricsResult,
+};
+export const metricResultPods = {
+ // Second metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
+ metricId: 'NO_DB_response_metrics_nginx_ingress_latency_pod_average',
+ result: metricsResult,
+};
+export const metricResultEmpty = {
+ metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
+ result: [],
+};
diff --git a/spec/frontend/monitoring/init_utils.js b/spec/frontend/monitoring/init_utils.js
deleted file mode 100644
index 55b6199fdfc..00000000000
--- a/spec/frontend/monitoring/init_utils.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import * as types from '~/monitoring/stores/mutation_types';
-import {
- metricsDashboardPayload,
- mockedEmptyResult,
- mockedQueryResultPayload,
- mockedQueryResultPayloadCoresTotal,
- mockApiEndpoint,
- environmentData,
-} from './mock_data';
-
-export const propsData = {
- hasMetrics: false,
- documentationPath: '/path/to/docs',
- settingsPath: '/path/to/settings',
- clustersPath: '/path/to/clusters',
- tagsPath: '/path/to/tags',
- projectPath: '/path/to/project',
- logsPath: '/path/to/logs',
- defaultBranch: 'master',
- metricsEndpoint: mockApiEndpoint,
- deploymentsEndpoint: null,
- emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
- emptyLoadingSvgPath: '/path/to/loading.svg',
- emptyNoDataSvgPath: '/path/to/no-data.svg',
- emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
- emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
- currentEnvironmentName: 'production',
- customMetricsAvailable: false,
- customMetricsPath: '',
- validateQueryPath: '',
-};
-
-export const setupComponentStore = wrapper => {
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
- metricsDashboardPayload,
- );
-
- // Load 3 panels to the dashboard, one with an empty result
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedEmptyResult,
- );
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayload,
- );
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayloadCoresTotal,
- );
-
- wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
-};
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index 84dd0b70e71..700fe4086f8 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -1,13 +1,47 @@
-import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
-
// This import path needs to be relative for now because this mock data is used in
// Karma specs too, where the helpers/test_constants alias can not be resolved
import { TEST_HOST } from '../helpers/test_constants';
-export const mockHost = 'http://test.host';
export const mockProjectDir = '/frontend-fixtures/environments-project';
export const mockApiEndpoint = `${TEST_HOST}/monitoring/mock`;
+export const propsData = {
+ hasMetrics: false,
+ documentationPath: '/path/to/docs',
+ settingsPath: '/path/to/settings',
+ clustersPath: '/path/to/clusters',
+ tagsPath: '/path/to/tags',
+ projectPath: '/path/to/project',
+ logsPath: '/path/to/logs',
+ defaultBranch: 'master',
+ metricsEndpoint: mockApiEndpoint,
+ deploymentsEndpoint: null,
+ emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
+ emptyLoadingSvgPath: '/path/to/loading.svg',
+ emptyNoDataSvgPath: '/path/to/no-data.svg',
+ emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
+ emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
+ currentEnvironmentName: 'production',
+ customMetricsAvailable: false,
+ customMetricsPath: '',
+ validateQueryPath: '',
+};
+
+const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
+ default: false,
+ display_name: `Custom Dashboard ${idx}`,
+ can_edit: true,
+ system_dashboard: false,
+ project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
+ path: `.gitlab/dashboards/dashboard_${idx}.yml`,
+}));
+
+export const mockDashboardsErrorResponse = {
+ all_dashboards: customDashboardsData,
+ message: "Each 'panel_group' must define an array :panels",
+ status: 'error',
+};
+
export const anomalyDeploymentData = [
{
id: 111,
@@ -266,77 +300,6 @@ export const metricsNewGroupsAPIResponse = [
},
];
-const metricsResult = [
- {
- metric: {},
- values: [
- [1563272065.589, '10.396484375'],
- [1563272125.589, '10.333984375'],
- [1563272185.589, '10.333984375'],
- [1563272245.589, '10.333984375'],
- [1563272305.589, '10.333984375'],
- [1563272365.589, '10.333984375'],
- [1563272425.589, '10.38671875'],
- [1563272485.589, '10.333984375'],
- [1563272545.589, '10.333984375'],
- [1563272605.589, '10.333984375'],
- [1563272665.589, '10.333984375'],
- [1563272725.589, '10.333984375'],
- [1563272785.589, '10.396484375'],
- [1563272845.589, '10.333984375'],
- [1563272905.589, '10.333984375'],
- [1563272965.589, '10.3984375'],
- [1563273025.589, '10.337890625'],
- [1563273085.589, '10.34765625'],
- [1563273145.589, '10.337890625'],
- [1563273205.589, '10.337890625'],
- [1563273265.589, '10.337890625'],
- [1563273325.589, '10.337890625'],
- [1563273385.589, '10.337890625'],
- [1563273445.589, '10.337890625'],
- [1563273505.589, '10.337890625'],
- [1563273565.589, '10.337890625'],
- [1563273625.589, '10.337890625'],
- [1563273685.589, '10.337890625'],
- [1563273745.589, '10.337890625'],
- [1563273805.589, '10.337890625'],
- [1563273865.589, '10.390625'],
- [1563273925.589, '10.390625'],
- ],
- },
-];
-
-export const mockedEmptyResult = {
- metricId: '1_response_metrics_nginx_ingress_throughput_status_code',
- result: [],
-};
-
-export const mockedEmptyThroughputResult = {
- metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
- result: [],
-};
-
-export const mockedQueryResultPayload = {
- metricId: '12_system_metrics_kubernetes_container_memory_total',
- result: metricsResult,
-};
-
-export const mockedQueryResultPayloadCoresTotal = {
- metricId: '13_system_metrics_kubernetes_container_cores_total',
- result: metricsResult,
-};
-
-export const mockedQueryResultFixture = {
- // First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
- metricId: 'NO_DB_response_metrics_nginx_ingress_throughput_status_code',
- result: metricsResult,
-};
-
-export const mockedQueryResultFixtureStatusCode = {
- metricId: 'NO_DB_response_metrics_nginx_ingress_latency_pod_average',
- result: metricsResult,
-};
-
const extraEnvironmentData = new Array(15).fill(null).map((_, idx) => ({
id: `gid://gitlab/Environments/${150 + idx}`,
name: `no-deployment/noop-branch-${idx}`,
@@ -384,158 +347,6 @@ export const environmentData = [
},
].concat(extraEnvironmentData);
-export const metricsDashboardPayload = {
- dashboard: 'Environment metrics',
- priority: 1,
- panel_groups: [
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Memory Used',
- weight: 4,
- y_axis: {
- format: 'megabytes',
- },
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_total',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1000/1000',
- label: 'Total',
- unit: 'MB',
- metric_id: 12,
- prometheus_endpoint_path: 'http://test',
- },
- ],
- },
- {
- title: 'Core Usage (Total)',
- type: 'area-chart',
- y_label: 'Total Cores',
- weight: 3,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_cores_total',
- query_range:
- 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
- label: 'Total',
- unit: 'cores',
- metric_id: 13,
- },
- ],
- },
- {
- title: 'Memory Usage (Pod average)',
- type: 'line-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 14,
- },
- ],
- },
- {
- title: 'memories',
- type: 'area-chart',
- y_label: 'memories',
- metrics: [
- {
- id: 'metric_of_ages_1000',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 20,
- },
- {
- id: 'metric_of_ages_1001',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 21,
- },
- {
- id: 'metric_of_ages_1002',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 22,
- },
- {
- id: 'metric_of_ages_1003',
- label: 'memory_1000',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 23,
- },
- {
- id: 'metric_of_ages_1004',
- label: 'memory_1004',
- unit: 'count',
- prometheus_endpoint_path: '/root',
- metric_id: 24,
- },
- ],
- },
- ],
- },
- {
- group: 'Response metrics (NGINX Ingress VTS)',
- priority: 10,
- panels: [
- {
- metrics: [
- {
- id: 'response_metrics_nginx_ingress_throughput_status_code',
- label: 'Status Code',
- metric_id: 1,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
- query_range:
- 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[2m])) by (status_code)',
- unit: 'req / sec',
- },
- ],
- title: 'Throughput',
- type: 'area-chart',
- weight: 1,
- y_label: 'Requests / Sec',
- },
- ],
- },
- ],
-};
-
-/**
- * Mock of response of metrics_dashboard.json
- */
-export const metricsDashboardResponse = {
- all_dashboards: [],
- dashboard: metricsDashboardPayload,
- metrics_data: {},
- status: 'success',
-};
-
-export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
-
-const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
- default: false,
- display_name: `Custom Dashboard ${idx}`,
- can_edit: true,
- system_dashboard: false,
- project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
- path: `.gitlab/dashboards/dashboard_${idx}.yml`,
-}));
-
export const dashboardGitResponse = [
{
default: true,
@@ -548,11 +359,47 @@ export const dashboardGitResponse = [
...customDashboardsData,
];
-export const mockDashboardsErrorResponse = {
- all_dashboards: customDashboardsData,
- message: "Each 'panel_group' must define an array :panels",
- status: 'error',
-};
+// Metrics mocks
+
+export const metricsResult = [
+ {
+ metric: {},
+ values: [
+ [1563272065.589, '10.396484375'],
+ [1563272125.589, '10.333984375'],
+ [1563272185.589, '10.333984375'],
+ [1563272245.589, '10.333984375'],
+ [1563272305.589, '10.333984375'],
+ [1563272365.589, '10.333984375'],
+ [1563272425.589, '10.38671875'],
+ [1563272485.589, '10.333984375'],
+ [1563272545.589, '10.333984375'],
+ [1563272605.589, '10.333984375'],
+ [1563272665.589, '10.333984375'],
+ [1563272725.589, '10.333984375'],
+ [1563272785.589, '10.396484375'],
+ [1563272845.589, '10.333984375'],
+ [1563272905.589, '10.333984375'],
+ [1563272965.589, '10.3984375'],
+ [1563273025.589, '10.337890625'],
+ [1563273085.589, '10.34765625'],
+ [1563273145.589, '10.337890625'],
+ [1563273205.589, '10.337890625'],
+ [1563273265.589, '10.337890625'],
+ [1563273325.589, '10.337890625'],
+ [1563273385.589, '10.337890625'],
+ [1563273445.589, '10.337890625'],
+ [1563273505.589, '10.337890625'],
+ [1563273565.589, '10.337890625'],
+ [1563273625.589, '10.337890625'],
+ [1563273685.589, '10.337890625'],
+ [1563273745.589, '10.337890625'],
+ [1563273805.589, '10.337890625'],
+ [1563273865.589, '10.390625'],
+ [1563273925.589, '10.390625'],
+ ],
+ },
+];
export const graphDataPrometheusQuery = {
title: 'Super Chart A2',
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index c34a5afceb0..4591e110974 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -31,11 +31,14 @@ import {
deploymentData,
environmentData,
annotationsData,
- metricsDashboardResponse,
- metricsDashboardViewModel,
dashboardGitResponse,
mockDashboardsErrorResponse,
} from '../mock_data';
+import {
+ metricsDashboardResponse,
+ metricsDashboardViewModel,
+ metricsDashboardPanelCount,
+} from '../fixture_data';
jest.mock('~/flash');
@@ -257,10 +260,7 @@ describe('Monitoring store actions', () => {
null,
state,
[],
- [
- { type: 'requestAnnotations' },
- { type: 'receiveAnnotationsSuccess', payload: annotationsData },
- ],
+ [{ type: 'receiveAnnotationsSuccess', payload: annotationsData }],
() => {
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
},
@@ -285,7 +285,7 @@ describe('Monitoring store actions', () => {
null,
state,
[],
- [{ type: 'requestAnnotations' }, { type: 'receiveAnnotationsFailure' }],
+ [{ type: 'receiveAnnotationsFailure' }],
() => {
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
},
@@ -553,7 +553,7 @@ describe('Monitoring store actions', () => {
fetchDashboardData({ state, commit, dispatch })
.then(() => {
- expect(dispatch).toHaveBeenCalledTimes(10); // one per metric plus 1 for deployments
+ expect(dispatch).toHaveBeenCalledTimes(metricsDashboardPanelCount + 1); // plus 1 for deployments
expect(dispatch).toHaveBeenCalledWith('fetchDeploymentsData');
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
metric,
@@ -581,11 +581,13 @@ describe('Monitoring store actions', () => {
let metric;
let state;
let data;
+ let prometheusEndpointPath;
beforeEach(() => {
state = storeState();
- [metric] = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics;
- metric = convertObjectPropsToCamelCase(metric, { deep: true });
+ [metric] = metricsDashboardViewModel.panelGroups[0].panels[0].metrics;
+
+ prometheusEndpointPath = metric.prometheusEndpointPath;
data = {
metricId: metric.metricId,
@@ -594,7 +596,7 @@ describe('Monitoring store actions', () => {
});
it('commits result', done => {
- mock.onGet('http://test').reply(200, { data }); // One attempt
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
fetchPrometheusMetric,
@@ -631,7 +633,7 @@ describe('Monitoring store actions', () => {
};
it('uses calculated step', done => {
- mock.onGet('http://test').reply(200, { data }); // One attempt
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
fetchPrometheusMetric,
@@ -673,7 +675,7 @@ describe('Monitoring store actions', () => {
};
it('uses metric step', done => {
- mock.onGet('http://test').reply(200, { data }); // One attempt
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // One attempt
testAction(
fetchPrometheusMetric,
@@ -705,10 +707,10 @@ describe('Monitoring store actions', () => {
it('commits result, when waiting for results', done => {
// Mock multiple attempts while the cache is filling up
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').reply(200, { data }); // 4th attempt
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).reply(200, { data }); // 4th attempt
testAction(
fetchPrometheusMetric,
@@ -739,10 +741,10 @@ describe('Monitoring store actions', () => {
it('commits failure, when waiting for results and getting a server error', done => {
// Mock multiple attempts while the cache is filling up and fails
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').replyOnce(statusCodes.NO_CONTENT);
- mock.onGet('http://test').reply(500); // 4th attempt
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpointPath).reply(500); // 4th attempt
const error = new Error('Request failed with status code 500');
diff --git a/spec/frontend/monitoring/store/getters_spec.js b/spec/frontend/monitoring/store/getters_spec.js
index 40341d32cf5..f040876b832 100644
--- a/spec/frontend/monitoring/store/getters_spec.js
+++ b/spec/frontend/monitoring/store/getters_spec.js
@@ -3,18 +3,13 @@ import * as getters from '~/monitoring/stores/getters';
import mutations from '~/monitoring/stores/mutations';
import * as types from '~/monitoring/stores/mutation_types';
import { metricStates } from '~/monitoring/constants';
+import { environmentData, metricsResult } from '../mock_data';
import {
- environmentData,
- mockedEmptyThroughputResult,
- mockedQueryResultFixture,
- mockedQueryResultFixtureStatusCode,
-} from '../mock_data';
-import { getJSONFixture } from '../../helpers/fixtures';
-
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
+ metricsDashboardPayload,
+ metricResultStatus,
+ metricResultPods,
+ metricResultEmpty,
+} from '../fixture_data';
describe('Monitoring store Getters', () => {
describe('getMetricStates', () => {
@@ -22,6 +17,21 @@ describe('Monitoring store Getters', () => {
let state;
let getMetricStates;
+ const setMetricSuccess = ({ result = metricsResult, group = 0, panel = 0, metric = 0 }) => {
+ const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, {
+ metricId,
+ result,
+ });
+ };
+
+ const setMetricFailure = ({ group = 0, panel = 0, metric = 0 }) => {
+ const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
+ mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
+ metricId,
+ });
+ };
+
beforeEach(() => {
setupState = (initState = {}) => {
state = initState;
@@ -61,31 +71,30 @@ describe('Monitoring store Getters', () => {
it('on an empty metric with no result, returns NO_DATA', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
+ setMetricSuccess({ result: [], group: 2 });
expect(getMetricStates()).toEqual([metricStates.NO_DATA]);
});
it('on a metric with a result, returns OK', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ setMetricSuccess({ group: 1 });
expect(getMetricStates()).toEqual([metricStates.OK]);
});
it('on a metric with an error, returns an error', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[0].panels[0].metrics[0].metricId,
- });
+ setMetricFailure({});
expect(getMetricStates()).toEqual([metricStates.UNKNOWN_ERROR]);
});
it('on multiple metrics with results, returns OK', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
+
+ setMetricSuccess({ group: 1 });
+ setMetricSuccess({ group: 1, panel: 1 });
expect(getMetricStates()).toEqual([metricStates.OK]);
@@ -96,15 +105,8 @@ describe('Monitoring store Getters', () => {
it('on multiple metrics errors', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[0].panels[0].metrics[0].metricId,
- });
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[0].panels[0].metrics[0].metricId,
- });
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[1].panels[0].metrics[0].metricId,
- });
+ setMetricFailure({});
+ setMetricFailure({ group: 1 });
// Entire dashboard fails
expect(getMetricStates()).toEqual([metricStates.UNKNOWN_ERROR]);
@@ -116,14 +118,11 @@ describe('Monitoring store Getters', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
// An success in 1 group
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ setMetricSuccess({ group: 1 });
+
// An error in 2 groups
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[1].panels[1].metrics[0].metricId,
- });
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId: groups[2].panels[0].metrics[0].metricId,
- });
+ setMetricFailure({ group: 1, panel: 1 });
+ setMetricFailure({ group: 2, panel: 0 });
expect(getMetricStates()).toEqual([metricStates.OK, metricStates.UNKNOWN_ERROR]);
expect(getMetricStates(groups[1].key)).toEqual([
@@ -182,38 +181,35 @@ describe('Monitoring store Getters', () => {
it('an empty metric, returns empty', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultEmpty);
expect(metricsWithData()).toEqual([]);
});
it('a metric with results, it returns a metric', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
- expect(metricsWithData()).toEqual([mockedQueryResultFixture.metricId]);
+ expect(metricsWithData()).toEqual([metricResultStatus.metricId]);
});
it('multiple metrics with results, it return multiple metrics', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultPods);
- expect(metricsWithData()).toEqual([
- mockedQueryResultFixture.metricId,
- mockedQueryResultFixtureStatusCode.metricId,
- ]);
+ expect(metricsWithData()).toEqual([metricResultStatus.metricId, metricResultPods.metricId]);
});
it('multiple metrics with results, it returns metrics filtered by group', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultPods);
// First group has metrics
expect(metricsWithData(state.dashboard.panelGroups[1].key)).toEqual([
- mockedQueryResultFixture.metricId,
- mockedQueryResultFixtureStatusCode.metricId,
+ metricResultStatus.metricId,
+ metricResultPods.metricId,
]);
// Second group has no metrics
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 34d224e13b0..1452e9bc491 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -6,12 +6,7 @@ import state from '~/monitoring/stores/state';
import { metricStates } from '~/monitoring/constants';
import { deploymentData, dashboardGitResponse } from '../mock_data';
-import { getJSONFixture } from '../../helpers/fixtures';
-
-const metricsDashboardFixture = getJSONFixture(
- 'metrics_dashboard/environment_metrics_dashboard.json',
-);
-const metricsDashboardPayload = metricsDashboardFixture.dashboard;
+import { metricsDashboardPayload } from '../fixture_data';
describe('Monitoring mutations', () => {
let stateCopy;
diff --git a/spec/frontend/monitoring/store_utils.js b/spec/frontend/monitoring/store_utils.js
new file mode 100644
index 00000000000..d764a79ccc3
--- /dev/null
+++ b/spec/frontend/monitoring/store_utils.js
@@ -0,0 +1,34 @@
+import * as types from '~/monitoring/stores/mutation_types';
+import { metricsResult, environmentData } from './mock_data';
+import { metricsDashboardPayload } from './fixture_data';
+
+export const setMetricResult = ({ $store, result, group = 0, panel = 0, metric = 0 }) => {
+ const { dashboard } = $store.state.monitoringDashboard;
+ const { metricId } = dashboard.panelGroups[group].panels[panel].metrics[metric];
+
+ $store.commit(`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`, {
+ metricId,
+ result,
+ });
+};
+
+const setEnvironmentData = $store => {
+ $store.commit(`monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`, environmentData);
+};
+
+export const setupStoreWithDashboard = $store => {
+ $store.commit(
+ `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
+ metricsDashboardPayload,
+ );
+};
+
+export const setupStoreWithData = $store => {
+ setupStoreWithDashboard($store);
+
+ setMetricResult({ $store, result: [], panel: 0 });
+ setMetricResult({ $store, result: metricsResult, panel: 1 });
+ setMetricResult({ $store, result: metricsResult, panel: 2 });
+
+ setEnvironmentData($store);
+};
diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js
index 262b8b985cc..a9010e2bffa 100644
--- a/spec/frontend/monitoring/utils_spec.js
+++ b/spec/frontend/monitoring/utils_spec.js
@@ -1,7 +1,7 @@
import * as monitoringUtils from '~/monitoring/utils';
import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import {
- mockHost,
mockProjectDir,
graphDataPrometheusQuery,
graphDataPrometheusQueryRange,
@@ -11,7 +11,7 @@ import {
jest.mock('~/lib/utils/url_utility');
-const mockPath = `${mockHost}${mockProjectDir}/-/environments/29/metrics`;
+const mockPath = `${TEST_HOST}${mockProjectDir}/-/environments/29/metrics`;
const generatedLink = 'http://chart.link.com';
diff --git a/spec/frontend/sidebar/sidebar_assignees_spec.js b/spec/frontend/sidebar/sidebar_assignees_spec.js
new file mode 100644
index 00000000000..c1876066a21
--- /dev/null
+++ b/spec/frontend/sidebar/sidebar_assignees_spec.js
@@ -0,0 +1,74 @@
+import { shallowMount } from '@vue/test-utils';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import axios from 'axios';
+import SidebarAssignees from '~/sidebar/components/assignees/sidebar_assignees.vue';
+import Assigness from '~/sidebar/components/assignees/assignees.vue';
+import SidebarMediator from '~/sidebar/sidebar_mediator';
+import SidebarService from '~/sidebar/services/sidebar_service';
+import SidebarStore from '~/sidebar/stores/sidebar_store';
+import Mock from './mock_data';
+
+describe('sidebar assignees', () => {
+ let wrapper;
+ let mediator;
+ let axiosMock;
+
+ const createComponent = () => {
+ wrapper = shallowMount(SidebarAssignees, {
+ propsData: {
+ mediator,
+ field: '',
+ },
+ // Attaching to document is required because this component emits something from the parent element :/
+ attachToDocument: true,
+ });
+ };
+
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ mediator = new SidebarMediator(Mock.mediator);
+
+ jest.spyOn(mediator, 'saveAssignees');
+ jest.spyOn(mediator, 'assignYourself');
+
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+
+ SidebarService.singleton = null;
+ SidebarStore.singleton = null;
+ SidebarMediator.singleton = null;
+ axiosMock.restore();
+ });
+
+ it('calls the mediator when saves the assignees', () => {
+ expect(mediator.saveAssignees).not.toHaveBeenCalled();
+
+ wrapper.vm.saveAssignees();
+
+ expect(mediator.saveAssignees).toHaveBeenCalled();
+ });
+
+ it('calls the mediator when "assignSelf" method is called', () => {
+ expect(mediator.assignYourself).not.toHaveBeenCalled();
+ expect(mediator.store.assignees.length).toBe(0);
+
+ wrapper.vm.assignSelf();
+
+ expect(mediator.assignYourself).toHaveBeenCalled();
+ expect(mediator.store.assignees.length).toBe(1);
+ });
+
+ it('hides assignees until fetched', () => {
+ expect(wrapper.find(Assigness).exists()).toBe(false);
+
+ wrapper.vm.store.isFetching.assignees = false;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(Assigness).exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/components/dashboard_resize_spec.js b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
index 6455346e890..0c3193940e6 100644
--- a/spec/javascripts/monitoring/components/dashboard_resize_spec.js
+++ b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
@@ -2,66 +2,13 @@ import Vue from 'vue';
import { createLocalVue } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import Dashboard from '~/monitoring/components/dashboard.vue';
-import * as types from '~/monitoring/stores/mutation_types';
import { createStore } from '~/monitoring/stores';
import axios from '~/lib/utils/axios_utils';
-import {
- metricsDashboardPayload,
- mockedEmptyResult,
- mockedQueryResultPayload,
- mockedQueryResultPayloadCoresTotal,
- mockApiEndpoint,
- environmentData,
-} from '../mock_data';
+import { mockApiEndpoint, propsData } from '../mock_data';
+import { metricsDashboardPayload } from '../fixture_data';
+import { setupStoreWithData } from '../store_utils';
const localVue = createLocalVue();
-const propsData = {
- hasMetrics: false,
- documentationPath: '/path/to/docs',
- settingsPath: '/path/to/settings',
- clustersPath: '/path/to/clusters',
- tagsPath: '/path/to/tags',
- projectPath: '/path/to/project',
- defaultBranch: 'master',
- metricsEndpoint: mockApiEndpoint,
- deploymentsEndpoint: null,
- emptyGettingStartedSvgPath: '/path/to/getting-started.svg',
- emptyLoadingSvgPath: '/path/to/loading.svg',
- emptyNoDataSvgPath: '/path/to/no-data.svg',
- emptyNoDataSmallSvgPath: '/path/to/no-data-small.svg',
- emptyUnableToConnectSvgPath: '/path/to/unable-to-connect.svg',
- currentEnvironmentName: 'production',
- customMetricsAvailable: false,
- customMetricsPath: '',
- validateQueryPath: '',
-};
-
-function setupComponentStore(component) {
- // Load 2 panel groups
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
- metricsDashboardPayload,
- );
-
- // Load 3 panels to the dashboard, one with an empty result
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedEmptyResult,
- );
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayload,
- );
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
- mockedQueryResultPayloadCoresTotal,
- );
-
- component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
-}
describe('Dashboard', () => {
let DashboardComponent;
@@ -109,7 +56,7 @@ describe('Dashboard', () => {
store,
});
- setupComponentStore(component);
+ setupStoreWithData(component.$store);
return Vue.nextTick().then(() => {
[promPanel] = component.$el.querySelectorAll('.prometheus-panel');
diff --git a/spec/javascripts/monitoring/fixture_data.js b/spec/javascripts/monitoring/fixture_data.js
new file mode 100644
index 00000000000..1375c27cdde
--- /dev/null
+++ b/spec/javascripts/monitoring/fixture_data.js
@@ -0,0 +1 @@
+export * from '../../frontend/monitoring/fixture_data';
diff --git a/spec/javascripts/monitoring/store_utils.js b/spec/javascripts/monitoring/store_utils.js
new file mode 100644
index 00000000000..1222716c829
--- /dev/null
+++ b/spec/javascripts/monitoring/store_utils.js
@@ -0,0 +1 @@
+export * from '../../frontend/monitoring/store_utils';
diff --git a/spec/javascripts/sidebar/sidebar_assignees_spec.js b/spec/javascripts/sidebar/sidebar_assignees_spec.js
deleted file mode 100644
index 23b8dc69925..00000000000
--- a/spec/javascripts/sidebar/sidebar_assignees_spec.js
+++ /dev/null
@@ -1,64 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import SidebarAssignees from '~/sidebar/components/assignees/sidebar_assignees.vue';
-import SidebarMediator from '~/sidebar/sidebar_mediator';
-import SidebarService from '~/sidebar/services/sidebar_service';
-import SidebarStore from '~/sidebar/stores/sidebar_store';
-import Mock from './mock_data';
-
-describe('sidebar assignees', () => {
- let vm;
- let mediator;
- let sidebarAssigneesEl;
- preloadFixtures('issues/open-issue.html');
-
- beforeEach(() => {
- loadFixtures('issues/open-issue.html');
-
- mediator = new SidebarMediator(Mock.mediator);
- spyOn(mediator, 'saveAssignees').and.callThrough();
- spyOn(mediator, 'assignYourself').and.callThrough();
-
- const SidebarAssigneeComponent = Vue.extend(SidebarAssignees);
- sidebarAssigneesEl = document.querySelector('#js-vue-sidebar-assignees');
- vm = mountComponent(
- SidebarAssigneeComponent,
- {
- mediator,
- field: sidebarAssigneesEl.dataset.field,
- },
- sidebarAssigneesEl,
- );
- });
-
- afterEach(() => {
- SidebarService.singleton = null;
- SidebarStore.singleton = null;
- SidebarMediator.singleton = null;
- });
-
- it('calls the mediator when saves the assignees', () => {
- vm.saveAssignees();
-
- expect(mediator.saveAssignees).toHaveBeenCalled();
- });
-
- it('calls the mediator when "assignSelf" method is called', () => {
- vm.assignSelf();
-
- expect(mediator.assignYourself).toHaveBeenCalled();
- expect(mediator.store.assignees.length).toEqual(1);
- });
-
- it('hides assignees until fetched', done => {
- const currentAssignee = sidebarAssigneesEl.querySelector('.value');
-
- expect(currentAssignee).toBe(null);
-
- vm.store.isFetching.assignees = false;
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.value')).toBeVisible();
- done();
- });
- });
-});
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 31e13122b95..34f89d9cdae 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -17,8 +17,6 @@ describe Ci::Bridge do
{ trigger: { project: 'my/project', branch: 'master' } }
end
- it { is_expected.to include_module(Ci::PipelineDelegator) }
-
it 'has many sourced pipelines' do
expect(bridge).to have_many(:sourced_pipelines)
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 673b9e5f076..ff9f26faad3 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -37,8 +37,6 @@ describe Ci::Build do
it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) }
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
- it { is_expected.to include_module(Ci::PipelineDelegator) }
-
describe 'associations' do
it 'has a bidirectional relationship with projects' do
expect(described_class.reflect_on_association(:project).has_inverse?).to eq(:builds)
@@ -1818,64 +1816,65 @@ describe Ci::Build do
end
describe '#merge_request' do
- def create_mr(build, pipeline, factory: :merge_request, created_at: Time.now)
- create(factory, source_project: pipeline.project,
- target_project: pipeline.project,
- source_branch: build.ref,
- created_at: created_at)
- end
+ subject { pipeline.builds.take.merge_request }
- context 'when a MR has a reference to the pipeline' do
- before do
- @merge_request = create_mr(build, pipeline, factory: :merge_request)
+ context 'on a branch pipeline' do
+ let!(:pipeline) { create(:ci_pipeline, :with_job, project: project, ref: 'fix') }
- commits = [double(id: pipeline.sha)]
- allow(@merge_request).to receive(:commits).and_return(commits)
- allow(MergeRequest).to receive_message_chain(:includes, :where, :reorder).and_return([@merge_request])
+ context 'with no merge request' do
+ it { is_expected.to be_nil }
end
- it 'returns the single associated MR' do
- expect(build.merge_request.id).to eq(@merge_request.id)
- end
- end
+ context 'with an open merge request from the same ref name' do
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: 'fix') }
- context 'when there is not a MR referencing the pipeline' do
- it 'returns nil' do
- expect(build.merge_request).to be_nil
- end
- end
+ # If no diff exists, the pipeline commit was not part of the merge
+ # request and may have simply incidentally used the same ref name.
+ context 'without a merge request diff containing the pipeline commit' do
+ it { is_expected.to be_nil }
+ end
- context 'when more than one MR have a reference to the pipeline' do
- before do
- @merge_request = create_mr(build, pipeline, factory: :merge_request)
- @merge_request.close!
- @merge_request2 = create_mr(build, pipeline, factory: :merge_request)
+ # If the merge request was truly opened from the branch that the
+ # pipeline ran on, that head sha will be present in a diff.
+ context 'with a merge request diff containing the pipeline commit' do
+ let!(:mr_diff) { create(:merge_request_diff, merge_request: merge_request) }
+ let!(:mr_diff_commit) { create(:merge_request_diff_commit, sha: build.sha, merge_request_diff: mr_diff) }
- commits = [double(id: pipeline.sha)]
- allow(@merge_request).to receive(:commits).and_return(commits)
- allow(@merge_request2).to receive(:commits).and_return(commits)
- allow(MergeRequest).to receive_message_chain(:includes, :where, :reorder).and_return([@merge_request, @merge_request2])
+ it { is_expected.to eq(merge_request) }
+ end
end
- it 'returns the first MR' do
- expect(build.merge_request.id).to eq(@merge_request.id)
+ context 'with multiple open merge requests' do
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: 'fix') }
+ let!(:mr_diff) { create(:merge_request_diff, merge_request: merge_request) }
+ let!(:mr_diff_commit) { create(:merge_request_diff_commit, sha: build.sha, merge_request_diff: mr_diff) }
+
+ let!(:new_merge_request) { create(:merge_request, source_project: project, source_branch: 'fix', target_branch: 'staging') }
+ let!(:new_mr_diff) { create(:merge_request_diff, merge_request: new_merge_request) }
+ let!(:new_mr_diff_commit) { create(:merge_request_diff_commit, sha: build.sha, merge_request_diff: new_mr_diff) }
+
+ it 'returns the first merge request' do
+ expect(subject).to eq(merge_request)
+ end
end
end
- context 'when a Build is created after the MR' do
- before do
- @merge_request = create_mr(build, pipeline, factory: :merge_request_with_diffs)
- pipeline2 = create(:ci_pipeline, project: project)
- @build2 = create(:ci_build, pipeline: pipeline2)
+ context 'on a detached merged request pipeline' do
+ let(:pipeline) { create(:ci_pipeline, :detached_merge_request_pipeline, :with_job) }
- allow(@merge_request).to receive(:commit_shas)
- .and_return([pipeline.sha, pipeline2.sha])
- allow(MergeRequest).to receive_message_chain(:includes, :where, :reorder).and_return([@merge_request])
- end
+ it { is_expected.to eq(pipeline.merge_request) }
+ end
- it 'returns the current MR' do
- expect(@build2.merge_request.id).to eq(@merge_request.id)
- end
+ context 'on a legacy detached merged request pipeline' do
+ let(:pipeline) { create(:ci_pipeline, :legacy_detached_merge_request_pipeline, :with_job) }
+
+ it { is_expected.to eq(pipeline.merge_request) }
+ end
+
+ context 'on a pipeline for merged results' do
+ let(:pipeline) { create(:ci_pipeline, :merged_result_pipeline, :with_job) }
+
+ it { is_expected.to eq(pipeline.merge_request) }
end
end
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index e8ef7b29681..4490371bde5 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -6,6 +6,18 @@ describe Ci::Processable do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:detached_merge_request_pipeline) do
+ create(:ci_pipeline, :detached_merge_request_pipeline, :with_job, project: project)
+ end
+
+ let_it_be(:legacy_detached_merge_request_pipeline) do
+ create(:ci_pipeline, :legacy_detached_merge_request_pipeline, :with_job, project: project)
+ end
+
+ let_it_be(:merged_result_pipeline) do
+ create(:ci_pipeline, :merged_result_pipeline, :with_job, project: project)
+ end
+
describe '#aggregated_needs_names' do
let(:with_aggregated_needs) { pipeline.processables.select_with_aggregated_needs(project) }
@@ -155,4 +167,70 @@ describe Ci::Processable do
end
end
end
+
+ describe '#merge_request?' do
+ subject { pipeline.processables.first.merge_request? }
+
+ context 'in a detached merge request pipeline' do
+ let(:pipeline) { detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request?) }
+ end
+
+ context 'in a legacy detached merge_request_pipeline' do
+ let(:pipeline) { legacy_detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request?) }
+ end
+
+ context 'in a pipeline for merged results' do
+ let(:pipeline) { merged_result_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request?) }
+ end
+ end
+
+ describe '#merge_request_ref?' do
+ subject { pipeline.processables.first.merge_request_ref? }
+
+ context 'in a detached merge request pipeline' do
+ let(:pipeline) { detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request_ref?) }
+ end
+
+ context 'in a legacy detached merge_request_pipeline' do
+ let(:pipeline) { legacy_detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request_ref?) }
+ end
+
+ context 'in a pipeline for merged results' do
+ let(:pipeline) { merged_result_pipeline }
+
+ it { is_expected.to eq(pipeline.merge_request_ref?) }
+ end
+ end
+
+ describe '#legacy_detached_merge_request_pipeline?' do
+ subject { pipeline.processables.first.legacy_detached_merge_request_pipeline? }
+
+ context 'in a detached merge request pipeline' do
+ let(:pipeline) { detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
+ end
+
+ context 'in a legacy detached merge_request_pipeline' do
+ let(:pipeline) { legacy_detached_merge_request_pipeline }
+
+ it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
+ end
+
+ context 'in a pipeline for merged results' do
+ let(:pipeline) { merged_result_pipeline }
+
+ it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
+ end
+ end
end