summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-04-07 18:09:19 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-04-07 18:09:19 +0000
commit3290d46655f07d7ae3dca788d6df9f326972ffd8 (patch)
tree0d24713e1592cdd3583257f14a52d46a22539ed1
parentc6b3ec3f56fa32a0e0ed3de0d0878d25f1adaddf (diff)
downloadgitlab-ce-3290d46655f07d7ae3dca788d6df9f326972ffd8.tar.gz
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.gitlab/ci/global.gitlab-ci.yml4
-rw-r--r--.gitlab/ci/review.gitlab-ci.yml11
-rw-r--r--app/assets/javascripts/monitoring/stores/actions.js186
-rw-r--r--app/assets/javascripts/monitoring/stores/mutation_types.js11
-rw-r--r--app/assets/javascripts/monitoring/stores/mutations.js6
-rw-r--r--app/assets/javascripts/notes/components/sort_discussion.vue13
-rw-r--r--app/assets/javascripts/snippets/components/snippet_blob_view.vue15
-rw-r--r--app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql4
-rw-r--r--app/assets/javascripts/vue_shared/components/local_storage_sync.vue39
-rw-r--r--app/assets/stylesheets/framework/dropdowns.scss8
-rw-r--r--app/models/clusters/cluster.rb1
-rw-r--r--app/models/environment.rb1
-rw-r--r--app/models/metrics/dashboard/annotation.rb33
-rw-r--r--app/policies/group_policy.rb4
-rw-r--r--app/policies/metrics/dashboard/annotation_policy.rb9
-rw-r--r--app/policies/project_policy.rb4
-rw-r--r--app/services/metrics/dashboard/annotations/create_service.rb80
-rw-r--r--app/services/metrics/dashboard/annotations/delete_service.rb43
-rw-r--r--changelogs/unreleased/204730-button-integration.yml5
-rw-r--r--changelogs/unreleased/mwaw-211329-add-annotation-model-and-relation.yml5
-rw-r--r--db/migrate/20200319124127_create_metrics_dashboard_annotations.rb24
-rw-r--r--db/structure.sql36
-rw-r--r--doc/administration/instance_limits.md8
-rw-r--r--doc/development/i18n/externalization.md4
-rw-r--r--doc/development/i18n/index.md4
-rw-r--r--doc/development/i18n/merging_translations.md22
-rw-r--r--doc/development/i18n/proofreader.md92
-rw-r--r--doc/development/i18n/translation.md10
-rw-r--r--doc/policy/maintenance.md26
-rw-r--r--doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_10.pngbin0 -> 98355 bytes
-rw-r--r--doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_8.pngbin66215 -> 0 bytes
-rw-r--r--doc/user/compliance/compliance_dashboard/index.md3
-rw-r--r--locale/gitlab.pot36
-rw-r--r--scripts/review_apps/base-config.yaml3
-rwxr-xr-xscripts/review_apps/review-apps.sh85
-rw-r--r--spec/factories/metrics/dashboard/annotations.rb15
-rw-r--r--spec/features/issues/user_sorts_issue_comments_spec.rb45
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js2
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js2
-rw-r--r--spec/frontend/monitoring/init_utils.js2
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js212
-rw-r--r--spec/frontend/monitoring/store/getters_spec.js30
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js16
-rw-r--r--spec/frontend/notes/components/sort_discussion_spec.js19
-rw-r--r--spec/frontend/vue_shared/components/local_storage_sync_spec.js128
-rw-r--r--spec/javascripts/monitoring/components/dashboard_resize_spec.js2
-rw-r--r--spec/models/clusters/cluster_spec.rb1
-rw-r--r--spec/models/environment_spec.rb1
-rw-r--r--spec/models/metrics/dashboard/annotation_spec.rb53
-rw-r--r--spec/policies/metrics/dashboard/annotation_policy_spec.rb73
-rw-r--r--spec/policies/project_policy_spec.rb3
-rw-r--r--spec/services/metrics/dashboard/annotations/create_service_spec.rb160
-rw-r--r--spec/services/metrics/dashboard/annotations/delete_service_spec.rb93
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb4
54 files changed, 1346 insertions, 350 deletions
diff --git a/.gitlab/ci/global.gitlab-ci.yml b/.gitlab/ci/global.gitlab-ci.yml
index b557624d345..e5467df7374 100644
--- a/.gitlab/ci/global.gitlab-ci.yml
+++ b/.gitlab/ci/global.gitlab-ci.yml
@@ -30,7 +30,7 @@
policy: pull
.use-pg9:
- image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.5-golang-1.14-git-2.24-lfs-2.9-chrome-73.0-node-12.x-yarn-1.21-postgresql-9.6-graphicsmagick-1.3.34"
+ image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.5-golang-1.14-git-2.26-lfs-2.9-chrome-73.0-node-12.x-yarn-1.21-postgresql-9.6-graphicsmagick-1.3.34"
services:
- name: postgres:9.6.17
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
@@ -63,7 +63,7 @@
key: "debian-stretch-ruby-2.6.5-pg11-node-12.x"
.use-pg9-ee:
- image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.5-golang-1.14-git-2.24-lfs-2.9-chrome-73.0-node-12.x-yarn-1.21-postgresql-9.6-graphicsmagick-1.3.34"
+ image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.5-golang-1.14-git-2.26-lfs-2.9-chrome-73.0-node-12.x-yarn-1.21-postgresql-9.6-graphicsmagick-1.3.34"
services:
- name: postgres:9.6.17
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
diff --git a/.gitlab/ci/review.gitlab-ci.yml b/.gitlab/ci/review.gitlab-ci.yml
index 0e93c1d3bee..8148b044eb4 100644
--- a/.gitlab/ci/review.gitlab-ci.yml
+++ b/.gitlab/ci/review.gitlab-ci.yml
@@ -81,7 +81,7 @@ review-build-cng:
.review-workflow-base:
extends:
- .default-retry
- image: registry.gitlab.com/gitlab-org/gitlab-build-images:gitlab-charts-build-base
+ image: registry.gitlab.com/gitlab-org/gitlab-build-images:gitlab-helm3-kubectl1.14
variables:
HOST_SUFFIX: "${CI_ENVIRONMENT_SLUG}"
DOMAIN: "-${CI_ENVIRONMENT_SLUG}.${REVIEW_APPS_DOMAIN}"
@@ -113,7 +113,6 @@ review-deploy:
script:
- check_kube_domain
- ensure_namespace
- - install_tiller
- install_external_dns
- download_chart
- date
@@ -149,6 +148,7 @@ review-stop-failed-deployment:
stage: prepare
script:
- delete_failed_release
+ - delete_helm2_release
review-stop:
extends:
@@ -210,8 +210,11 @@ review-qa-all:
review-performance:
extends:
- - .review-docker
+ - .default-retry
- .review:rules:mr-and-schedule-auto-if-frontend-manual-otherwise
+ image:
+ name: sitespeedio/sitespeed.io:6.3.1
+ entrypoint: [""]
stage: qa
# This is needed so that manual jobs with needs don't block the pipeline.
# See https://gitlab.com/gitlab-org/gitlab/-/issues/199979.
@@ -224,7 +227,7 @@ review-performance:
- wget -O ./gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/master/index.js
- mkdir -p sitespeed-results
script:
- - docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:6.3.1 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results "${CI_ENVIRONMENT_URL}"
+ - /start.sh --plugins.add ./gitlab-exporter --outputFolder sitespeed-results "${CI_ENVIRONMENT_URL}"
after_script:
- mv sitespeed-results/data/performance.json performance.json
artifacts:
diff --git a/app/assets/javascripts/monitoring/stores/actions.js b/app/assets/javascripts/monitoring/stores/actions.js
index 2e4987b7349..acc09fa6305 100644
--- a/app/assets/javascripts/monitoring/stores/actions.js
+++ b/app/assets/javascripts/monitoring/stores/actions.js
@@ -12,6 +12,20 @@ import { s__, sprintf } from '../../locale';
import { PROMETHEUS_TIMEOUT } from '../constants';
+function prometheusMetricQueryParams(timeRange) {
+ const { start, end } = convertToFixedRange(timeRange);
+
+ const timeDiff = (new Date(end) - new Date(start)) / 1000;
+ const minStep = 60;
+ const queryDataPoints = 600;
+
+ return {
+ start_time: start,
+ end_time: end,
+ step: Math.max(minStep, Math.ceil(timeDiff / queryDataPoints)),
+ };
+}
+
function backOffRequest(makeRequestCallback) {
return backOff((next, stop) => {
makeRequestCallback()
@@ -26,6 +40,20 @@ function backOffRequest(makeRequestCallback) {
}, PROMETHEUS_TIMEOUT);
}
+function getPrometheusMetricResult(prometheusEndpoint, params) {
+ return backOffRequest(() => axios.get(prometheusEndpoint, { params }))
+ .then(res => res.data)
+ .then(response => {
+ if (response.status === 'error') {
+ throw new Error(response.error);
+ }
+
+ return response.data.result;
+ });
+}
+
+// Setup
+
export const setGettingStartedEmptyState = ({ commit }) => {
commit(types.SET_GETTING_STARTED_EMPTY_STATE);
};
@@ -47,56 +75,26 @@ export const setShowErrorBanner = ({ commit }, enabled) => {
commit(types.SET_SHOW_ERROR_BANNER, enabled);
};
-export const requestMetricsDashboard = ({ commit }) => {
- commit(types.REQUEST_METRICS_DATA);
-};
-export const receiveMetricsDashboardSuccess = ({ commit, dispatch }, { response, params }) => {
- const { all_dashboards, dashboard, metrics_data } = response;
-
- commit(types.SET_ALL_DASHBOARDS, all_dashboards);
- commit(types.RECEIVE_METRICS_DATA_SUCCESS, dashboard);
- commit(types.SET_ENDPOINTS, convertObjectPropsToCamelCase(metrics_data));
-
- return dispatch('fetchPrometheusMetrics', params);
-};
-export const receiveMetricsDashboardFailure = ({ commit }, error) => {
- commit(types.RECEIVE_METRICS_DATA_FAILURE, error);
-};
-
-export const receiveDeploymentsDataSuccess = ({ commit }, data) =>
- commit(types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS, data);
-export const receiveDeploymentsDataFailure = ({ commit }) =>
- commit(types.RECEIVE_DEPLOYMENTS_DATA_FAILURE);
-export const requestEnvironmentsData = ({ commit }) => commit(types.REQUEST_ENVIRONMENTS_DATA);
-export const receiveEnvironmentsDataSuccess = ({ commit }, data) =>
- commit(types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS, data);
-export const receiveEnvironmentsDataFailure = ({ commit }) =>
- commit(types.RECEIVE_ENVIRONMENTS_DATA_FAILURE);
+// All Data
export const fetchData = ({ dispatch }) => {
- dispatch('fetchDashboard');
- dispatch('fetchDeploymentsData');
dispatch('fetchEnvironmentsData');
+ dispatch('fetchDashboard');
};
+// Metrics dashboard
+
export const fetchDashboard = ({ state, commit, dispatch }) => {
dispatch('requestMetricsDashboard');
const params = {};
-
- if (state.timeRange) {
- const { start, end } = convertToFixedRange(state.timeRange);
- params.start_time = start;
- params.end_time = end;
- }
-
if (state.currentDashboard) {
params.dashboard = state.currentDashboard;
}
return backOffRequest(() => axios.get(state.dashboardEndpoint, { params }))
.then(resp => resp.data)
- .then(response => dispatch('receiveMetricsDashboardSuccess', { response, params }))
+ .then(response => dispatch('receiveMetricsDashboardSuccess', { response }))
.catch(error => {
Sentry.captureException(error);
@@ -120,61 +118,43 @@ export const fetchDashboard = ({ state, commit, dispatch }) => {
});
};
-function fetchPrometheusResult(prometheusEndpoint, params) {
- return backOffRequest(() => axios.get(prometheusEndpoint, { params }))
- .then(res => res.data)
- .then(response => {
- if (response.status === 'error') {
- throw new Error(response.error);
- }
-
- return response.data.result;
- });
-}
-
-/**
- * Returns list of metrics in data.result
- * {"status":"success", "data":{"resultType":"matrix","result":[]}}
- *
- * @param {metric} metric
- */
-export const fetchPrometheusMetric = ({ commit }, { metric, params }) => {
- const { start_time, end_time } = params;
- const timeDiff = (new Date(end_time) - new Date(start_time)) / 1000;
+export const requestMetricsDashboard = ({ commit }) => {
+ commit(types.REQUEST_METRICS_DASHBOARD);
+};
+export const receiveMetricsDashboardSuccess = ({ commit, dispatch }, { response }) => {
+ const { all_dashboards, dashboard, metrics_data } = response;
- const minStep = 60;
- const queryDataPoints = 600;
- const step = metric.step ? metric.step : Math.max(minStep, Math.ceil(timeDiff / queryDataPoints));
+ commit(types.SET_ALL_DASHBOARDS, all_dashboards);
+ commit(types.RECEIVE_METRICS_DASHBOARD_SUCCESS, dashboard);
+ commit(types.SET_ENDPOINTS, convertObjectPropsToCamelCase(metrics_data));
- const queryParams = {
- start_time,
- end_time,
- step,
- };
+ return dispatch('fetchPrometheusMetrics');
+};
+export const receiveMetricsDashboardFailure = ({ commit }, error) => {
+ commit(types.RECEIVE_METRICS_DASHBOARD_FAILURE, error);
+};
- commit(types.REQUEST_METRIC_RESULT, { metricId: metric.metricId });
+// Metrics
- return fetchPrometheusResult(metric.prometheusEndpointPath, queryParams)
- .then(result => {
- commit(types.RECEIVE_METRIC_RESULT_SUCCESS, { metricId: metric.metricId, result });
- })
- .catch(error => {
- Sentry.captureException(error);
+/**
+ * Loads timeseries data: Prometheus data points and deployment data from the project
+ * @param {Object} Vuex store
+ */
+export const fetchPrometheusMetrics = ({ state, dispatch, getters }) => {
+ dispatch('fetchDeploymentsData');
- commit(types.RECEIVE_METRIC_RESULT_FAILURE, { metricId: metric.metricId, error });
- // Continue to throw error so the dashboard can notify using createFlash
- throw error;
- });
-};
+ if (!state.timeRange) {
+ createFlash(s__(`Metrics|Invalid time range, please verify.`), 'warning');
+ return Promise.reject();
+ }
-export const fetchPrometheusMetrics = ({ state, commit, dispatch, getters }, params) => {
- commit(types.REQUEST_METRICS_DATA);
+ const defaultQueryParams = prometheusMetricQueryParams(state.timeRange);
const promises = [];
state.dashboard.panelGroups.forEach(group => {
group.panels.forEach(panel => {
panel.metrics.forEach(metric => {
- promises.push(dispatch('fetchPrometheusMetric', { metric, params }));
+ promises.push(dispatch('fetchPrometheusMetric', { metric, defaultQueryParams }));
});
});
});
@@ -192,6 +172,35 @@ export const fetchPrometheusMetrics = ({ state, commit, dispatch, getters }, par
});
};
+/**
+ * Returns list of metrics in data.result
+ * {"status":"success", "data":{"resultType":"matrix","result":[]}}
+ *
+ * @param {metric} metric
+ */
+export const fetchPrometheusMetric = ({ commit }, { metric, defaultQueryParams }) => {
+ const queryParams = { ...defaultQueryParams };
+ if (metric.step) {
+ queryParams.step = metric.step;
+ }
+
+ commit(types.REQUEST_METRIC_RESULT, { metricId: metric.metricId });
+
+ return getPrometheusMetricResult(metric.prometheusEndpointPath, queryParams)
+ .then(result => {
+ commit(types.RECEIVE_METRIC_RESULT_SUCCESS, { metricId: metric.metricId, result });
+ })
+ .catch(error => {
+ Sentry.captureException(error);
+
+ commit(types.RECEIVE_METRIC_RESULT_FAILURE, { metricId: metric.metricId, error });
+ // Continue to throw error so the dashboard can notify using createFlash
+ throw error;
+ });
+};
+
+// Deployments
+
export const fetchDeploymentsData = ({ state, dispatch }) => {
if (!state.deploymentsEndpoint) {
return Promise.resolve([]);
@@ -212,6 +221,14 @@ export const fetchDeploymentsData = ({ state, dispatch }) => {
createFlash(s__('Metrics|There was an error getting deployment information.'));
});
};
+export const receiveDeploymentsDataSuccess = ({ commit }, data) => {
+ commit(types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS, data);
+};
+export const receiveDeploymentsDataFailure = ({ commit }) => {
+ commit(types.RECEIVE_DEPLOYMENTS_DATA_FAILURE);
+};
+
+// Environments
export const fetchEnvironmentsData = ({ state, dispatch }) => {
dispatch('requestEnvironmentsData');
@@ -241,6 +258,17 @@ export const fetchEnvironmentsData = ({ state, dispatch }) => {
createFlash(s__('Metrics|There was an error getting environments information.'));
});
};
+export const requestEnvironmentsData = ({ commit }) => {
+ commit(types.REQUEST_ENVIRONMENTS_DATA);
+};
+export const receiveEnvironmentsDataSuccess = ({ commit }, data) => {
+ commit(types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS, data);
+};
+export const receiveEnvironmentsDataFailure = ({ commit }) => {
+ commit(types.RECEIVE_ENVIRONMENTS_DATA_FAILURE);
+};
+
+// Dashboard manipulation
/**
* Set a new array of metrics to a panel group
diff --git a/app/assets/javascripts/monitoring/stores/mutation_types.js b/app/assets/javascripts/monitoring/stores/mutation_types.js
index 09eb7dc1673..9a3489d53d7 100644
--- a/app/assets/javascripts/monitoring/stores/mutation_types.js
+++ b/app/assets/javascripts/monitoring/stores/mutation_types.js
@@ -1,19 +1,24 @@
-export const REQUEST_METRICS_DATA = 'REQUEST_METRICS_DATA';
-export const RECEIVE_METRICS_DATA_SUCCESS = 'RECEIVE_METRICS_DATA_SUCCESS';
-export const RECEIVE_METRICS_DATA_FAILURE = 'RECEIVE_METRICS_DATA_FAILURE';
+// Dashboard "skeleton", groups, panels and metrics
+export const REQUEST_METRICS_DASHBOARD = 'REQUEST_METRICS_DASHBOARD';
+export const RECEIVE_METRICS_DASHBOARD_SUCCESS = 'RECEIVE_METRICS_DASHBOARD_SUCCESS';
+export const RECEIVE_METRICS_DASHBOARD_FAILURE = 'RECEIVE_METRICS_DASHBOARD_FAILURE';
+// Git project deployments
export const REQUEST_DEPLOYMENTS_DATA = 'REQUEST_DEPLOYMENTS_DATA';
export const RECEIVE_DEPLOYMENTS_DATA_SUCCESS = 'RECEIVE_DEPLOYMENTS_DATA_SUCCESS';
export const RECEIVE_DEPLOYMENTS_DATA_FAILURE = 'RECEIVE_DEPLOYMENTS_DATA_FAILURE';
+// Environments
export const REQUEST_ENVIRONMENTS_DATA = 'REQUEST_ENVIRONMENTS_DATA';
export const RECEIVE_ENVIRONMENTS_DATA_SUCCESS = 'RECEIVE_ENVIRONMENTS_DATA_SUCCESS';
export const RECEIVE_ENVIRONMENTS_DATA_FAILURE = 'RECEIVE_ENVIRONMENTS_DATA_FAILURE';
+// Metric data points
export const REQUEST_METRIC_RESULT = 'REQUEST_METRIC_RESULT';
export const RECEIVE_METRIC_RESULT_SUCCESS = 'RECEIVE_METRIC_RESULT_SUCCESS';
export const RECEIVE_METRIC_RESULT_FAILURE = 'RECEIVE_METRIC_RESULT_FAILURE';
+// Parameters and other information
export const SET_TIME_RANGE = 'SET_TIME_RANGE';
export const SET_ALL_DASHBOARDS = 'SET_ALL_DASHBOARDS';
export const SET_ENDPOINTS = 'SET_ENDPOINTS';
diff --git a/app/assets/javascripts/monitoring/stores/mutations.js b/app/assets/javascripts/monitoring/stores/mutations.js
index 2e10d189087..0a7bb47d533 100644
--- a/app/assets/javascripts/monitoring/stores/mutations.js
+++ b/app/assets/javascripts/monitoring/stores/mutations.js
@@ -74,18 +74,18 @@ export default {
/**
* Dashboard panels structure and global state
*/
- [types.REQUEST_METRICS_DATA](state) {
+ [types.REQUEST_METRICS_DASHBOARD](state) {
state.emptyState = 'loading';
state.showEmptyState = true;
},
- [types.RECEIVE_METRICS_DATA_SUCCESS](state, dashboard) {
+ [types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, dashboard) {
state.dashboard = mapToDashboardViewModel(dashboard);
if (!state.dashboard.panelGroups.length) {
state.emptyState = 'noData';
}
},
- [types.RECEIVE_METRICS_DATA_FAILURE](state, error) {
+ [types.RECEIVE_METRICS_DASHBOARD_FAILURE](state, error) {
state.emptyState = error ? 'unableToConnect' : 'noData';
state.showEmptyState = true;
},
diff --git a/app/assets/javascripts/notes/components/sort_discussion.vue b/app/assets/javascripts/notes/components/sort_discussion.vue
index 3f82ddde3ef..4a7543819eb 100644
--- a/app/assets/javascripts/notes/components/sort_discussion.vue
+++ b/app/assets/javascripts/notes/components/sort_discussion.vue
@@ -1,7 +1,9 @@
+gs
<script>
import { GlIcon } from '@gitlab/ui';
import { mapActions, mapGetters } from 'vuex';
import { __ } from '~/locale';
+import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import Tracking from '~/tracking';
import { ASC, DESC } from '../constants';
@@ -14,16 +16,20 @@ export default {
SORT_OPTIONS,
components: {
GlIcon,
+ LocalStorageSync,
},
mixins: [Tracking.mixin()],
computed: {
- ...mapGetters(['sortDirection']),
+ ...mapGetters(['sortDirection', 'noteableType']),
selectedOption() {
return SORT_OPTIONS.find(({ key }) => this.sortDirection === key);
},
dropdownText() {
return this.selectedOption.text;
},
+ storageKey() {
+ return `sort_direction_${this.noteableType.toLowerCase()}`;
+ },
},
methods: {
...mapActions(['setDiscussionSortDirection']),
@@ -44,6 +50,11 @@ export default {
<template>
<div class="mr-2 d-inline-block align-bottom full-width-mobile">
+ <local-storage-sync
+ :value="sortDirection"
+ :storage-key="storageKey"
+ @input="setDiscussionSortDirection"
+ />
<button class="btn btn-sm js-dropdown-text" data-toggle="dropdown" aria-expanded="false">
{{ dropdownText }}
<gl-icon name="chevron-down" />
diff --git a/app/assets/javascripts/snippets/components/snippet_blob_view.vue b/app/assets/javascripts/snippets/components/snippet_blob_view.vue
index 4703a940e08..3e3dcab70c0 100644
--- a/app/assets/javascripts/snippets/components/snippet_blob_view.vue
+++ b/app/assets/javascripts/snippets/components/snippet_blob_view.vue
@@ -4,6 +4,7 @@ import { SNIPPET_VISIBILITY_PUBLIC } from '../constants';
import BlobHeader from '~/blob/components/blob_header.vue';
import BlobContent from '~/blob/components/blob_content.vue';
import { GlLoadingIcon } from '@gitlab/ui';
+import CloneDropdownButton from '~/vue_shared/components/clone_dropdown.vue';
import GetSnippetBlobQuery from '../queries/snippet.blob.query.graphql';
import GetBlobContent from '../queries/snippet.blob.content.query.graphql';
@@ -16,6 +17,7 @@ export default {
BlobHeader,
BlobContent,
GlLoadingIcon,
+ CloneDropdownButton,
},
apollo: {
blob: {
@@ -72,6 +74,9 @@ export default {
const { richViewer, simpleViewer } = this.blob;
return this.activeViewerType === RICH_BLOB_VIEWER ? richViewer : simpleViewer;
},
+ canBeCloned() {
+ return this.snippet.sshUrlToRepo || this.snippet.httpUrlToRepo;
+ },
},
methods: {
switchViewer(newViewer, respectHash = false) {
@@ -90,7 +95,15 @@ export default {
class="prepend-top-20 append-bottom-20"
/>
<article v-else class="file-holder snippet-file-content">
- <blob-header :blob="blob" :active-viewer-type="viewer.type" @viewer-changed="switchViewer" />
+ <blob-header :blob="blob" :active-viewer-type="viewer.type" @viewer-changed="switchViewer">
+ <template #actions>
+ <clone-dropdown-button
+ v-if="canBeCloned"
+ :ssh-link="snippet.sshUrlToRepo"
+ :http-link="snippet.httpUrlToRepo"
+ />
+ </template>
+ </blob-header>
<blob-content :loading="isContentLoading" :content="blobContent" :active-viewer="viewer" />
</article>
</div>
diff --git a/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql b/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql
index e0cc6cc2dda..22aab7c7795 100644
--- a/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql
+++ b/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql
@@ -7,8 +7,10 @@ fragment SnippetBase on Snippet {
updatedAt
visibilityLevel
webUrl
+ httpUrlToRepo
+ sshUrlToRepo
userPermissions {
adminSnippet
updateSnippet
}
-} \ No newline at end of file
+}
diff --git a/app/assets/javascripts/vue_shared/components/local_storage_sync.vue b/app/assets/javascripts/vue_shared/components/local_storage_sync.vue
new file mode 100644
index 00000000000..b5d6b872547
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/local_storage_sync.vue
@@ -0,0 +1,39 @@
+<script>
+export default {
+ props: {
+ storageKey: {
+ type: String,
+ required: true,
+ },
+ value: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ },
+ watch: {
+ value(newVal) {
+ this.saveValue(newVal);
+ },
+ },
+ mounted() {
+ // On mount, trigger update if we actually have a localStorageValue
+ const value = this.getValue();
+
+ if (value && this.value !== value) {
+ this.$emit('input', value);
+ }
+ },
+ methods: {
+ getValue() {
+ return localStorage.getItem(this.storageKey);
+ },
+ saveValue(val) {
+ localStorage.setItem(this.storageKey, val);
+ },
+ },
+ render() {
+ return this.$slots.default;
+ },
+};
+</script>
diff --git a/app/assets/stylesheets/framework/dropdowns.scss b/app/assets/stylesheets/framework/dropdowns.scss
index a56505ee6e2..b6edadb05a9 100644
--- a/app/assets/stylesheets/framework/dropdowns.scss
+++ b/app/assets/stylesheets/framework/dropdowns.scss
@@ -1,6 +1,14 @@
.dropdown {
position: relative;
+ // Once the new design (https://gitlab.com/gitlab-org/gitlab-foss/-/issues/63499/designs)
+ // for Snippets is introduced and Clone button is relocated, we won't
+ // need this style.
+ // Issue for the refactoring: https://gitlab.com/gitlab-org/gitlab/-/issues/213327
+ &.gl-new-dropdown button.dropdown-toggle {
+ @include gl-display-inline-flex;
+ }
+
.btn-link {
&:hover {
cursor: pointer;
diff --git a/app/models/clusters/cluster.rb b/app/models/clusters/cluster.rb
index 78efe2b4337..42771eaa82a 100644
--- a/app/models/clusters/cluster.rb
+++ b/app/models/clusters/cluster.rb
@@ -59,6 +59,7 @@ module Clusters
has_one_cluster_application :elastic_stack
has_many :kubernetes_namespaces
+ has_many :metrics_dashboard_annotations, class_name: 'Metrics::Dashboard::Annotation', inverse_of: :cluster
accepts_nested_attributes_for :provider_gcp, update_only: true
accepts_nested_attributes_for :provider_aws, update_only: true
diff --git a/app/models/environment.rb b/app/models/environment.rb
index fecf13f349e..23c2296688d 100644
--- a/app/models/environment.rb
+++ b/app/models/environment.rb
@@ -18,6 +18,7 @@ class Environment < ApplicationRecord
has_many :successful_deployments, -> { success }, class_name: 'Deployment'
has_many :active_deployments, -> { active }, class_name: 'Deployment'
has_many :prometheus_alerts, inverse_of: :environment
+ has_many :metrics_dashboard_annotations, class_name: 'Metrics::Dashboard::Annotation', inverse_of: :environment
has_many :self_managed_prometheus_alert_events, inverse_of: :environment
has_one :last_deployment, -> { success.order('deployments.id DESC') }, class_name: 'Deployment'
diff --git a/app/models/metrics/dashboard/annotation.rb b/app/models/metrics/dashboard/annotation.rb
new file mode 100644
index 00000000000..2f1b6527742
--- /dev/null
+++ b/app/models/metrics/dashboard/annotation.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Metrics
+ module Dashboard
+ class Annotation < ApplicationRecord
+ self.table_name = 'metrics_dashboard_annotations'
+
+ belongs_to :environment, inverse_of: :metrics_dashboard_annotations
+ belongs_to :cluster, class_name: 'Clusters::Cluster', inverse_of: :metrics_dashboard_annotations
+
+ validates :starting_at, presence: true
+ validates :description, presence: true, length: { maximum: 255 }
+ validates :dashboard_path, presence: true, length: { maximum: 255 }
+ validates :panel_xid, length: { maximum: 255 }
+ validate :single_ownership
+ validate :orphaned_annotation
+
+ private
+
+ def single_ownership
+ return if cluster.nil? ^ environment.nil?
+
+ errors.add(:base, s_("Metrics::Dashboard::Annotation|Annotation can't belong to both a cluster and an environment at the same time"))
+ end
+
+ def orphaned_annotation
+ return if cluster.present? || environment.present?
+
+ errors.add(:base, s_("Metrics::Dashboard::Annotation|Annotation must belong to a cluster or an environment"))
+ end
+ end
+ end
+end
diff --git a/app/policies/group_policy.rb b/app/policies/group_policy.rb
index abd63753908..5e252c8e564 100644
--- a/app/policies/group_policy.rb
+++ b/app/policies/group_policy.rb
@@ -75,6 +75,9 @@ class GroupPolicy < BasePolicy
rule { developer }.policy do
enable :admin_milestone
enable :read_package
+ enable :create_metrics_dashboard_annotation
+ enable :delete_metrics_dashboard_annotation
+ enable :update_metrics_dashboard_annotation
end
rule { reporter }.policy do
@@ -82,6 +85,7 @@ class GroupPolicy < BasePolicy
enable :admin_label
enable :admin_list
enable :admin_issue
+ enable :read_metrics_dashboard_annotation
end
rule { maintainer }.policy do
diff --git a/app/policies/metrics/dashboard/annotation_policy.rb b/app/policies/metrics/dashboard/annotation_policy.rb
new file mode 100644
index 00000000000..25b78e104c4
--- /dev/null
+++ b/app/policies/metrics/dashboard/annotation_policy.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+module Metrics
+ module Dashboard
+ class AnnotationPolicy < BasePolicy
+ delegate { @subject.cluster }
+ delegate { @subject.environment }
+ end
+ end
+end
diff --git a/app/policies/project_policy.rb b/app/policies/project_policy.rb
index e694963eac0..0f5e4ac378e 100644
--- a/app/policies/project_policy.rb
+++ b/app/policies/project_policy.rb
@@ -224,6 +224,7 @@ class ProjectPolicy < BasePolicy
enable :read_sentry_issue
enable :update_sentry_issue
enable :read_prometheus
+ enable :read_metrics_dashboard_annotation
end
# We define `:public_user_access` separately because there are cases in gitlab-ee
@@ -276,6 +277,9 @@ class ProjectPolicy < BasePolicy
enable :update_deployment
enable :create_release
enable :update_release
+ enable :create_metrics_dashboard_annotation
+ enable :delete_metrics_dashboard_annotation
+ enable :update_metrics_dashboard_annotation
end
rule { can?(:developer_access) & user_confirmed? }.policy do
diff --git a/app/services/metrics/dashboard/annotations/create_service.rb b/app/services/metrics/dashboard/annotations/create_service.rb
new file mode 100644
index 00000000000..c04f4c56b51
--- /dev/null
+++ b/app/services/metrics/dashboard/annotations/create_service.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+# Create Metrics::Dashboard::Annotation entry based on matched dashboard_path, environment, cluster
+module Metrics
+ module Dashboard
+ module Annotations
+ class CreateService < ::BaseService
+ include Stepable
+
+ steps :authorize_environment_access,
+ :authorize_cluster_access,
+ :parse_dashboard_path,
+ :create
+
+ def initialize(user, params)
+ @user, @params = user, params
+ end
+
+ def execute
+ execute_steps
+ end
+
+ private
+
+ attr_reader :user, :params
+
+ def authorize_environment_access(options)
+ if environment.nil? || Ability.allowed?(user, :create_metrics_dashboard_annotation, project)
+ options[:environment] = environment
+ success(options)
+ else
+ error(s_('Metrics::Dashboard::Annotation|You are not authorized to create annotation for selected environment'))
+ end
+ end
+
+ def authorize_cluster_access(options)
+ if cluster.nil? || Ability.allowed?(user, :create_metrics_dashboard_annotation, cluster)
+ options[:cluster] = cluster
+ success(options)
+ else
+ error(s_('Metrics::Dashboard::Annotation|You are not authorized to create annotation for selected cluster'))
+ end
+ end
+
+ def parse_dashboard_path(options)
+ dashboard_path = params[:dashboard_path]
+
+ Gitlab::Metrics::Dashboard::Finder.find_raw(project, dashboard_path: dashboard_path)
+ options[:dashboard_path] = dashboard_path
+
+ success(options)
+ rescue Gitlab::Template::Finders::RepoTemplateFinder::FileNotFoundError
+ error(s_('Metrics::Dashboard::Annotation|Dashboard with requested path can not be found'))
+ end
+
+ def create(options)
+ annotation = Annotation.new(options.slice(:environment, :cluster, :dashboard_path).merge(params.slice(:description, :starting_at, :ending_at)))
+
+ if annotation.save
+ success(annotation: annotation)
+ else
+ error(annotation.errors)
+ end
+ end
+
+ def environment
+ params[:environment]
+ end
+
+ def cluster
+ params[:cluster]
+ end
+
+ def project
+ (environment || cluster)&.project
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/metrics/dashboard/annotations/delete_service.rb b/app/services/metrics/dashboard/annotations/delete_service.rb
new file mode 100644
index 00000000000..c6a6c4f5fbf
--- /dev/null
+++ b/app/services/metrics/dashboard/annotations/delete_service.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+# Delete Metrics::Dashboard::Annotation entry
+module Metrics
+ module Dashboard
+ module Annotations
+ class DeleteService < ::BaseService
+ include Stepable
+
+ steps :authorize_action,
+ :delete
+
+ def initialize(user, annotation)
+ @user, @annotation = user, annotation
+ end
+
+ def execute
+ execute_steps
+ end
+
+ private
+
+ attr_reader :user, :annotation
+
+ def authorize_action(_options)
+ if Ability.allowed?(user, :delete_metrics_dashboard_annotation, annotation)
+ success
+ else
+ error(s_('Metrics::Dashboard::Annotation|You are not authorized to delete this annotation'))
+ end
+ end
+
+ def delete(_options)
+ if annotation.destroy
+ success
+ else
+ error(s_('Metrics::Dashboard::Annotation|Annotation has not been deleted'))
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/changelogs/unreleased/204730-button-integration.yml b/changelogs/unreleased/204730-button-integration.yml
new file mode 100644
index 00000000000..e39c3c3b2a1
--- /dev/null
+++ b/changelogs/unreleased/204730-button-integration.yml
@@ -0,0 +1,5 @@
+---
+title: Added the clone button for Snippet view
+merge_request: 28840
+author:
+type: added
diff --git a/changelogs/unreleased/mwaw-211329-add-annotation-model-and-relation.yml b/changelogs/unreleased/mwaw-211329-add-annotation-model-and-relation.yml
new file mode 100644
index 00000000000..88a5b006fcc
--- /dev/null
+++ b/changelogs/unreleased/mwaw-211329-add-annotation-model-and-relation.yml
@@ -0,0 +1,5 @@
+---
+title: Add metrics dashboard annotation model, relation, policy, create and delete services. To provide interface for create and delete operations.
+merge_request: 27583
+author:
+type: added
diff --git a/db/migrate/20200319124127_create_metrics_dashboard_annotations.rb b/db/migrate/20200319124127_create_metrics_dashboard_annotations.rb
new file mode 100644
index 00000000000..4c57e38518a
--- /dev/null
+++ b/db/migrate/20200319124127_create_metrics_dashboard_annotations.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+# See https://docs.gitlab.com/ee/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class CreateMetricsDashboardAnnotations < ActiveRecord::Migration[6.0]
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ def change
+ create_table :metrics_dashboard_annotations do |t|
+ t.datetime_with_timezone :starting_at, null: false
+ t.datetime_with_timezone :ending_at
+ t.references :environment, index: false, foreign_key: { on_delete: :cascade }, null: true
+ t.references :cluster, index: false, foreign_key: { on_delete: :cascade }, null: true
+ t.string :dashboard_path, null: false, limit: 255
+ t.string :panel_xid, limit: 255
+ t.text :description, null: false, limit: 255
+
+ t.index %i(environment_id dashboard_path starting_at ending_at), where: 'environment_id IS NOT NULL', name: "index_metrics_dashboard_annotations_on_environment_id_and_3_col"
+ t.index %i(cluster_id dashboard_path starting_at ending_at), where: 'cluster_id IS NOT NULL', name: "index_metrics_dashboard_annotations_on_cluster_id_and_3_columns"
+ end
+ end
+end
diff --git a/db/structure.sql b/db/structure.sql
index f7c2fadef18..9226cdcbc73 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -3872,6 +3872,26 @@ CREATE SEQUENCE public.merge_trains_id_seq
ALTER SEQUENCE public.merge_trains_id_seq OWNED BY public.merge_trains.id;
+CREATE TABLE public.metrics_dashboard_annotations (
+ id bigint NOT NULL,
+ starting_at timestamp with time zone NOT NULL,
+ ending_at timestamp with time zone,
+ environment_id bigint,
+ cluster_id bigint,
+ dashboard_path character varying(255) NOT NULL,
+ panel_xid character varying(255),
+ description text NOT NULL
+);
+
+CREATE SEQUENCE public.metrics_dashboard_annotations_id_seq
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+ALTER SEQUENCE public.metrics_dashboard_annotations_id_seq OWNED BY public.metrics_dashboard_annotations.id;
+
CREATE TABLE public.milestone_releases (
milestone_id bigint NOT NULL,
release_id bigint NOT NULL
@@ -7196,6 +7216,8 @@ ALTER TABLE ONLY public.merge_requests_closing_issues ALTER COLUMN id SET DEFAUL
ALTER TABLE ONLY public.merge_trains ALTER COLUMN id SET DEFAULT nextval('public.merge_trains_id_seq'::regclass);
+ALTER TABLE ONLY public.metrics_dashboard_annotations ALTER COLUMN id SET DEFAULT nextval('public.metrics_dashboard_annotations_id_seq'::regclass);
+
ALTER TABLE ONLY public.milestones ALTER COLUMN id SET DEFAULT nextval('public.milestones_id_seq'::regclass);
ALTER TABLE ONLY public.namespace_statistics ALTER COLUMN id SET DEFAULT nextval('public.namespace_statistics_id_seq'::regclass);
@@ -7974,6 +7996,9 @@ ALTER TABLE ONLY public.merge_requests
ALTER TABLE ONLY public.merge_trains
ADD CONSTRAINT merge_trains_pkey PRIMARY KEY (id);
+ALTER TABLE ONLY public.metrics_dashboard_annotations
+ ADD CONSTRAINT metrics_dashboard_annotations_pkey PRIMARY KEY (id);
+
ALTER TABLE ONLY public.milestones
ADD CONSTRAINT milestones_pkey PRIMARY KEY (id);
@@ -9459,6 +9484,10 @@ CREATE INDEX index_merge_trains_on_pipeline_id ON public.merge_trains USING btre
CREATE INDEX index_merge_trains_on_user_id ON public.merge_trains USING btree (user_id);
+CREATE INDEX index_metrics_dashboard_annotations_on_cluster_id_and_3_columns ON public.metrics_dashboard_annotations USING btree (cluster_id, dashboard_path, starting_at, ending_at) WHERE (cluster_id IS NOT NULL);
+
+CREATE INDEX index_metrics_dashboard_annotations_on_environment_id_and_3_col ON public.metrics_dashboard_annotations USING btree (environment_id, dashboard_path, starting_at, ending_at) WHERE (environment_id IS NOT NULL);
+
CREATE INDEX index_milestone_releases_on_release_id ON public.milestone_releases USING btree (release_id);
CREATE INDEX index_milestones_on_description_trigram ON public.milestones USING gin (description public.gin_trgm_ops);
@@ -11063,6 +11092,9 @@ ALTER TABLE ONLY public.suggestions
ALTER TABLE ONLY public.requirements
ADD CONSTRAINT fk_rails_33fed8aa4e FOREIGN KEY (author_id) REFERENCES public.users(id) ON DELETE SET NULL;
+ALTER TABLE ONLY public.metrics_dashboard_annotations
+ ADD CONSTRAINT fk_rails_345ab51043 FOREIGN KEY (cluster_id) REFERENCES public.clusters(id) ON DELETE CASCADE;
+
ALTER TABLE ONLY public.wiki_page_slugs
ADD CONSTRAINT fk_rails_358b46be14 FOREIGN KEY (wiki_page_meta_id) REFERENCES public.wiki_page_meta(id) ON DELETE CASCADE;
@@ -11582,6 +11614,9 @@ ALTER TABLE ONLY public.clusters
ALTER TABLE ONLY public.analytics_cycle_analytics_group_stages
ADD CONSTRAINT fk_rails_ae5da3409b FOREIGN KEY (group_id) REFERENCES public.namespaces(id) ON DELETE CASCADE;
+ALTER TABLE ONLY public.metrics_dashboard_annotations
+ ADD CONSTRAINT fk_rails_aeb11a7643 FOREIGN KEY (environment_id) REFERENCES public.environments(id) ON DELETE CASCADE;
+
ALTER TABLE ONLY public.pool_repositories
ADD CONSTRAINT fk_rails_af3f8c5d62 FOREIGN KEY (shard_id) REFERENCES public.shards(id) ON DELETE RESTRICT;
@@ -12911,6 +12946,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200318175008
20200319071702
20200319123041
+20200319124127
20200319203901
20200320112455
20200320123839
diff --git a/doc/administration/instance_limits.md b/doc/administration/instance_limits.md
index 22f2d6ca0b9..cf4cd9f4345 100644
--- a/doc/administration/instance_limits.md
+++ b/doc/administration/instance_limits.md
@@ -176,6 +176,14 @@ To set this limit on a self-managed installation, run the following in the
Plan.default.limits.update!(ci_pipeline_schedules: 100)
```
+## Instance monitoring and metrics
+
+### Prometheus Alert JSON payloads
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/14929) in GitLab 12.6.
+
+Prometheus alert payloads sent to the `notify.json` endpoint are limited to 1 MB in size.
+
## Environment data on Deploy Boards
[Deploy Boards](../user/project/deploy_boards.md) load information from Kubernetes about
diff --git a/doc/development/i18n/externalization.md b/doc/development/i18n/externalization.md
index a550d977916..91ca6120db9 100644
--- a/doc/development/i18n/externalization.md
+++ b/doc/development/i18n/externalization.md
@@ -455,11 +455,11 @@ bin/rake gettext:regenerate
This command will update `locale/gitlab.pot` file with the newly externalized
strings and remove any strings that aren't used anymore. You should check this
file in. Once the changes are on master, they will be picked up by
-[Crowdin](https://translate.gitlab.com) and be presented for
+[CrowdIn](https://translate.gitlab.com) and be presented for
translation.
We don't need to check in any changes to the `locale/[language]/gitlab.po` files.
-They are updated automatically when [translations from Crowdin are merged](merging_translations.md).
+They are updated automatically when [translations from CrowdIn are merged](merging_translations.md).
If there are merge conflicts in the `gitlab.pot` file, you can delete the file
and regenerate it using the same command.
diff --git a/doc/development/i18n/index.md b/doc/development/i18n/index.md
index 7f59d30f8f9..929eded3f8e 100644
--- a/doc/development/i18n/index.md
+++ b/doc/development/i18n/index.md
@@ -30,7 +30,7 @@ See [Externalization for GitLab](externalization.md).
### Translate strings
The translation process is managed at <https://translate.gitlab.com>
-using [Crowdin](https://crowdin.com/).
+using [CrowdIn](https://crowdin.com/).
You will need to create an account before you can submit translations.
Once you are signed in, select the language you wish to contribute translations to.
@@ -51,4 +51,4 @@ able to proofread and instructions on becoming a proofreader yourself.
Translations are typically included in the next major or minor release.
-See [Merging translations from Crowdin](merging_translations.md).
+See [Merging translations from CrowdIn](merging_translations.md).
diff --git a/doc/development/i18n/merging_translations.md b/doc/development/i18n/merging_translations.md
index 15c7f42ec9d..5d9dbd23efa 100644
--- a/doc/development/i18n/merging_translations.md
+++ b/doc/development/i18n/merging_translations.md
@@ -1,33 +1,33 @@
-# Merging translations from Crowdin
+# Merging translations from CrowdIn
-Crowdin automatically syncs the `gitlab.pot` file with the Crowdin service, presenting
+CrowdIn automatically syncs the `gitlab.pot` file with the CrowdIn service, presenting
newly added externalized strings to the community of translators.
-[GitLab Crowdin Bot](https://gitlab.com/gitlab-crowdin-bot) also creates merge requests
+[GitLab CrowdIn Bot](https://gitlab.com/gitlab-crowdin-bot) also creates merge requests
to take newly approved translation submissions and merge them into the `locale/<language>/gitlab.po`
files. Check the [merge requests created by `gitlab-crowdin-bot`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests?scope=all&utf8=%E2%9C%93&state=opened&author_username=gitlab-crowdin-bot)
to see new and merged merge requests.
## Validation
-By default Crowdin commits translations with `[skip ci]` in the commit
+By default CrowdIn commits translations with `[skip ci]` in the commit
message. This is done to avoid a bunch of pipelines being run. Before
merging translations, make sure to trigger a pipeline to validate
-translations, we have static analysis validating things Crowdin
+translations, we have static analysis validating things CrowdIn
doesn't do. Create a new pipeline at `https://gitlab.com/gitlab-org/gitlab/pipelines/new`
(need Developer access permissions) for the `master-i18n` branch.
If there are validation errors, the easiest solution is to disapprove
-the offending string in Crowdin, leaving a comment with what is
+the offending string in CrowdIn, leaving a comment with what is
required to fix the offense. There is an
[issue](https://gitlab.com/gitlab-org/gitlab/issues/23256)
suggesting to automate this process. Disapproving will exclude the
invalid translation, the merge request will be updated within a few
minutes.
-It might be handy to pause the integration on the Crowdin side for a
+It might be handy to pause the integration on the CrowdIn side for a
little while so translations don't keep coming. This can be done by
-clicking `Pause sync` on the [Crowdin integration settings
+clicking `Pause sync` on the [CrowdIn integration settings
page](https://translate.gitlab.com/project/gitlab-ee/settings#integration).
When all failures are resolved, the translations need to be double
@@ -37,16 +37,16 @@ checked once more as discussed in [confidential issue](../../user/project/issues
When all translations are found good and pipelines pass the
translations can be merged into the master branch. When merging the translations,
-make sure to check the **Remove source branch** checkbox, so Crowdin recreates the
+make sure to check the **Remove source branch** checkbox, so CrowdIn recreates the
`master-i18n` from master after the new translation was merged.
We are discussing [automating this entire process](https://gitlab.com/gitlab-org/gitlab/issues/19896).
## Recreate the merge request
-Crowdin creates a new merge request as soon as the old one is closed
+CrowdIn creates a new merge request as soon as the old one is closed
or merged. But it won't recreate the `master-i18n` branch every
-time. To force Crowdin to recreate the branch, close any [open merge
+time. To force CrowdIn to recreate the branch, close any [open merge
request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests?scope=all&utf8=%E2%9C%93&state=opened&author_username=gitlab-crowdin-bot)
and delete the
[`master-18n`](https://gitlab.com/gitlab-org/gitlab/-/branches/all?utf8=✓&search=master-i18n).
diff --git a/doc/development/i18n/proofreader.md b/doc/development/i18n/proofreader.md
index 0e631a41de8..743529b9bbc 100644
--- a/doc/development/i18n/proofreader.md
+++ b/doc/development/i18n/proofreader.md
@@ -8,85 +8,85 @@ are very appreciative of the work done by translators and proofreaders!
- Albanian
- Proofreaders needed.
- Amharic
- - Tsegaselassie Tadesse - [GitLab](https://gitlab.com/tsega), [Crowdin](https://crowdin.com/profile/tsegaselassi/activity)
+ - Tsegaselassie Tadesse - [GitLab](https://gitlab.com/tsega), [CrowdIn](https://crowdin.com/profile/tsegaselassi/activity)
- Arabic
- Proofreaders needed.
- Bulgarian
- - Lyubomir Vasilev - [Crowdin](https://crowdin.com/profile/lyubomirv)
+ - Lyubomir Vasilev - [CrowdIn](https://crowdin.com/profile/lyubomirv)
- Catalan
- - David Planella - [GitLab](https://gitlab.com/dplanella), [Crowdin](https://crowdin.com/profile/dplanella)
+ - David Planella - [GitLab](https://gitlab.com/dplanella), [CrowdIn](https://crowdin.com/profile/dplanella)
- Chinese Simplified 简体中文
- - Huang Tao - [GitLab](https://gitlab.com/htve), [Crowdin](https://crowdin.com/profile/htve)
- - Victor Wu - [GitLab](https://gitlab.com/victorwuky), [Crowdin](https://crowdin.com/profile/victorwu)
- - Xiaogang Wen - [GitLab](https://gitlab.com/xiaogang_gitlab), [Crowdin](https://crowdin.com/profile/xiaogang_gitlab)
+ - Huang Tao - [GitLab](https://gitlab.com/htve), [CrowdIn](https://crowdin.com/profile/htve)
+ - Victor Wu - [GitLab](https://gitlab.com/victorwuky), [CrowdIn](https://crowdin.com/profile/victorwu)
+ - Xiaogang Wen - [GitLab](https://gitlab.com/xiaogang_gitlab), [CrowdIn](https://crowdin.com/profile/xiaogang_gitlab)
- Chinese Traditional 繁體中文
- - Weizhe Ding - [GitLab](https://gitlab.com/d.weizhe), [Crowdin](https://crowdin.com/profile/d.weizhe)
- - Yi-Jyun Pan - [GitLab](https://gitlab.com/pan93412), [Crowdin](https://crowdin.com/profile/pan93412)
- - Victor Wu - [GitLab](https://gitlab.com/victorwuky), [Crowdin](https://crowdin.com/profile/victorwu)
+ - Weizhe Ding - [GitLab](https://gitlab.com/d.weizhe), [CrowdIn](https://crowdin.com/profile/d.weizhe)
+ - Yi-Jyun Pan - [GitLab](https://gitlab.com/pan93412), [CrowdIn](https://crowdin.com/profile/pan93412)
+ - Victor Wu - [GitLab](https://gitlab.com/victorwuky), [CrowdIn](https://crowdin.com/profile/victorwu)
- Chinese Traditional, Hong Kong 繁體中文 (香港)
- - Victor Wu - [GitLab](https://gitlab.com/victorwuky), [Crowdin](https://crowdin.com/profile/victorwu)
- - Ivan Ip - [GitLab](https://gitlab.com/lifehome), [Crowdin](https://crowdin.com/profile/lifehome)
+ - Victor Wu - [GitLab](https://gitlab.com/victorwuky), [CrowdIn](https://crowdin.com/profile/victorwu)
+ - Ivan Ip - [GitLab](https://gitlab.com/lifehome), [CrowdIn](https://crowdin.com/profile/lifehome)
- Czech
- - Jan Urbanec - [GitLab](https://gitlab.com/TatranskyMedved), [Crowdin](https://crowdin.com/profile/Tatranskymedved)
+ - Jan Urbanec - [GitLab](https://gitlab.com/TatranskyMedved), [CrowdIn](https://crowdin.com/profile/Tatranskymedved)
- Danish
- - Saederup92 - [GitLab](https://gitlab.com/Saederup92), [Crowdin](https://crowdin.com/profile/Saederup92)
+ - Saederup92 - [GitLab](https://gitlab.com/Saederup92), [CrowdIn](https://crowdin.com/profile/Saederup92)
- Dutch
- - Emily Hendle - [GitLab](https://gitlab.com/pundachan), [Crowdin](https://crowdin.com/profile/pandachan)
+ - Emily Hendle - [GitLab](https://gitlab.com/pundachan), [CrowdIn](https://crowdin.com/profile/pandachan)
- Esperanto
-- Lyubomir Vasilev - [Crowdin](https://crowdin.com/profile/lyubomirv)
+- Lyubomir Vasilev - [CrowdIn](https://crowdin.com/profile/lyubomirv)
- Estonian
- Proofreaders needed.
- Filipino
- Proofreaders needed.
- French
- - Davy Defaud - [GitLab](https://gitlab.com/DevDef), [Crowdin](https://crowdin.com/profile/DevDef)
+ - Davy Defaud - [GitLab](https://gitlab.com/DevDef), [CrowdIn](https://crowdin.com/profile/DevDef)
- Galician
- - Antón Méixome - [Crowdin](https://crowdin.com/profile/meixome)
- - Pedro Garcia - [GitLab](https://gitlab.com/pedgarrod), [Crowdin](https://crowdin.com/profile/breaking_pitt)
+ - Antón Méixome - [CrowdIn](https://crowdin.com/profile/meixome)
+ - Pedro Garcia - [GitLab](https://gitlab.com/pedgarrod), [CrowdIn](https://crowdin.com/profile/breaking_pitt)
- German
- - Michael Hahnle - [GitLab](https://gitlab.com/mhah), [Crowdin](https://crowdin.com/profile/mhah)
- - Katrin Leinweber - [GitLab](https://gitlab.com/katrinleinweber/), [Crowdin](https://crowdin.com/profile/katrinleinweber)
+ - Michael Hahnle - [GitLab](https://gitlab.com/mhah), [CrowdIn](https://crowdin.com/profile/mhah)
+ - Katrin Leinweber - [GitLab](https://gitlab.com/katrinleinweber/), [CrowdIn](https://crowdin.com/profile/katrinleinweber)
- Greek
- Proofreaders needed.
- Hebrew
- - Yaron Shahrabani - [GitLab](https://gitlab.com/yarons), [Crowdin](https://crowdin.com/profile/YaronSh)
+ - Yaron Shahrabani - [GitLab](https://gitlab.com/yarons), [CrowdIn](https://crowdin.com/profile/YaronSh)
- Hungarian
- Proofreaders needed.
- Indonesian
- - Adi Ferdian - [GitLab](https://gitlab.com/adiferd), [Crowdin](https://crowdin.com/profile/adiferd)
- - Ahmad Naufal Mukhtar - [GitLab](https://gitlab.com/anaufalm), [Crowdin](https://crowdin.com/profile/anaufalm)
+ - Adi Ferdian - [GitLab](https://gitlab.com/adiferd), [CrowdIn](https://crowdin.com/profile/adiferd)
+ - Ahmad Naufal Mukhtar - [GitLab](https://gitlab.com/anaufalm), [CrowdIn](https://crowdin.com/profile/anaufalm)
- Italian
- - Massimiliano Cuttini - [GitLab](https://gitlab.com/maxcuttins), [Crowdin](https://crowdin.com/profile/maxcuttins)
- - Paolo Falomo - [GitLab](https://gitlab.com/paolofalomo), [Crowdin](https://crowdin.com/profile/paolo.falomo)
+ - Massimiliano Cuttini - [GitLab](https://gitlab.com/maxcuttins), [CrowdIn](https://crowdin.com/profile/maxcuttins)
+ - Paolo Falomo - [GitLab](https://gitlab.com/paolofalomo), [CrowdIn](https://crowdin.com/profile/paolo.falomo)
- Japanese
- - Hiroyuki Sato - [GitLab](https://gitlab.com/hiroponz), [Crowdin](https://crowdin.com/profile/hiroponz)
- - Tomo Dote - [GitLab](https://gitlab.com/fu7mu4), [Crowdin](https://crowdin.com/profile/fu7mu4)
- - Hiromi Nozawa - [GitLab](https://gitlab.com/hir0mi), [Crowdin](https://crowdin.com/profile/hir0mi)
+ - Hiroyuki Sato - [GitLab](https://gitlab.com/hiroponz), [CrowdIn](https://crowdin.com/profile/hiroponz)
+ - Tomo Dote - [GitLab](https://gitlab.com/fu7mu4), [CrowdIn](https://crowdin.com/profile/fu7mu4)
+ - Hiromi Nozawa - [GitLab](https://gitlab.com/hir0mi), [CrowdIn](https://crowdin.com/profile/hir0mi)
- Korean
- - Chang-Ho Cha - [GitLab](https://gitlab.com/changho-cha), [Crowdin](https://crowdin.com/profile/zzazang)
- - Ji Hun Oh - [GitLab](https://gitlab.com/Baw-Appie), [Crowdin](https://crowdin.com/profile/BawAppie)
- - Jeongwhan Choi - [GitLab](https://gitlab.com/jeongwhanchoi), [Crowdin](https://crowdin.com/profile/jeongwhanchoi)
+ - Chang-Ho Cha - [GitLab](https://gitlab.com/changho-cha), [CrowdIn](https://crowdin.com/profile/zzazang)
+ - Ji Hun Oh - [GitLab](https://gitlab.com/Baw-Appie), [CrowdIn](https://crowdin.com/profile/BawAppie)
+ - Jeongwhan Choi - [GitLab](https://gitlab.com/jeongwhanchoi), [CrowdIn](https://crowdin.com/profile/jeongwhanchoi)
- Mongolian
- Proofreaders needed.
- Norwegian Bokmal
- Proofreaders needed.
- Polish
- - Filip Mech - [GitLab](https://gitlab.com/mehenz), [Crowdin](https://crowdin.com/profile/mehenz)
- - Maksymilian Roman - [GitLab](https://gitlab.com/villaincandle), [Crowdin](https://crowdin.com/profile/villaincandle)
+ - Filip Mech - [GitLab](https://gitlab.com/mehenz), [CrowdIn](https://crowdin.com/profile/mehenz)
+ - Maksymilian Roman - [GitLab](https://gitlab.com/villaincandle), [CrowdIn](https://crowdin.com/profile/villaincandle)
- Portuguese
- Proofreaders needed.
- - Diogo Trindade - [GitLab](https://gitlab.com/luisdiogo2071317), [Crowdin](https://crowdin.com/profile/ldiogotrindade)
+ - Diogo Trindade - [GitLab](https://gitlab.com/luisdiogo2071317), [CrowdIn](https://crowdin.com/profile/ldiogotrindade)
- Portuguese, Brazilian
- - Paulo George Gomes Bezerra - [GitLab](https://gitlab.com/paulobezerra), [Crowdin](https://crowdin.com/profile/paulogomes.rep)
- - André Gama - [GitLab](https://gitlab.com/andregamma), [Crowdin](https://crowdin.com/profile/ToeOficial)
+ - Paulo George Gomes Bezerra - [GitLab](https://gitlab.com/paulobezerra), [CrowdIn](https://crowdin.com/profile/paulogomes.rep)
+ - André Gama - [GitLab](https://gitlab.com/andregamma), [CrowdIn](https://crowdin.com/profile/ToeOficial)
- Romanian
- Proofreaders needed.
- Russian
- - Nikita Grylov - [GitLab](https://gitlab.com/nixel2007), [Crowdin](https://crowdin.com/profile/nixel2007)
- - Alexy Lustin - [GitLab](https://gitlab.com/allustin), [Crowdin](https://crowdin.com/profile/lustin)
- - Mark Minakou - [GitLab](https://gitlab.com/sandzhaj), [Crowdin](https://crowdin.com/profile/sandzhaj)
- - NickVolynkin - [Crowdin](https://crowdin.com/profile/NickVolynkin)
- - Andrey Komarov - [GitLab](https://gitlab.com/elkamarado), [Crowdin](https://crowdin.com/profile/kamarado)
+ - Nikita Grylov - [GitLab](https://gitlab.com/nixel2007), [CrowdIn](https://crowdin.com/profile/nixel2007)
+ - Alexy Lustin - [GitLab](https://gitlab.com/allustin), [CrowdIn](https://crowdin.com/profile/lustin)
+ - Mark Minakou - [GitLab](https://gitlab.com/sandzhaj), [CrowdIn](https://crowdin.com/profile/sandzhaj)
+ - NickVolynkin - [CrowdIn](https://crowdin.com/profile/NickVolynkin)
+ - Andrey Komarov - [GitLab](https://gitlab.com/elkamarado), [CrowdIn](https://crowdin.com/profile/kamarado)
- Serbian (Cyrillic)
- Proofreaders needed.
- Serbian (Latin)
@@ -94,18 +94,18 @@ are very appreciative of the work done by translators and proofreaders!
- Slovak
- Proofreaders needed.
- Spanish
- - Pedro Garcia - [GitLab](https://gitlab.com/pedgarrod), [Crowdin](https://crowdin.com/profile/breaking_pitt)
+ - Pedro Garcia - [GitLab](https://gitlab.com/pedgarrod), [CrowdIn](https://crowdin.com/profile/breaking_pitt)
- Turkish
- - Ali Demirtaş - [GitLab](https://gitlab.com/alidemirtas), [Crowdin](https://crowdin.com/profile/alidemirtas)
+ - Ali Demirtaş - [GitLab](https://gitlab.com/alidemirtas), [CrowdIn](https://crowdin.com/profile/alidemirtas)
- Ukrainian
- - Volodymyr Sobotovych - [GitLab](https://gitlab.com/wheleph), [Crowdin](https://crowdin.com/profile/wheleph)
- - Andrew Vityuk - [GitLab](https://gitlab.com/3_1_3_u), [Crowdin](https://crowdin.com/profile/andruwa13)
+ - Volodymyr Sobotovych - [GitLab](https://gitlab.com/wheleph), [CrowdIn](https://crowdin.com/profile/wheleph)
+ - Andrew Vityuk - [GitLab](https://gitlab.com/3_1_3_u), [CrowdIn](https://crowdin.com/profile/andruwa13)
- Welsh
- Proofreaders needed.
## Become a proofreader
-> **Note:** Before requesting Proofreader permissions in Crowdin please make
+> **Note:** Before requesting Proofreader permissions in CrowdIn please make
> sure that you have a history of contributing translations to the GitLab
> project.
diff --git a/doc/development/i18n/translation.md b/doc/development/i18n/translation.md
index 683dae6ce3d..e1c02c2c9c2 100644
--- a/doc/development/i18n/translation.md
+++ b/doc/development/i18n/translation.md
@@ -1,15 +1,15 @@
# Translating GitLab
-For managing the translation process we use [Crowdin](https://crowdin.com).
+For managing the translation process we use [CrowdIn](https://crowdin.com).
-## Using Crowdin
+## Using CrowdIn
-The first step is to get familiar with Crowdin.
+The first step is to get familiar with CrowdIn.
### Sign In
To contribute translations at <https://translate.gitlab.com>
-you must create a Crowdin account.
+you must create a CrowdIn account.
You may create a new account or use any of their supported sign in services.
### Language Selections
@@ -24,7 +24,7 @@ GitLab is being translated into many languages.
The online translation editor is the easiest way to contribute translations.
-![Crowdin Editor](img/crowdin-editor.png)
+![CrowdIn Editor](img/crowdin-editor.png)
1. Strings for translation are listed in the left panel
1. Translations are entered into the central panel.
diff --git a/doc/policy/maintenance.md b/doc/policy/maintenance.md
index 4c27bc5c4fd..35cea84c129 100644
--- a/doc/policy/maintenance.md
+++ b/doc/policy/maintenance.md
@@ -156,6 +156,15 @@ To ensure these are successful:
For example: `11.11.x` -> `12.0.x` -> `12.8.x`
+### Upgrades from old versions
+
+- `8.11.x` and earlier: you might have to upgrade to `8.12.0` specifically before you can
+ upgrade to `8.17.7`. This was [reported in an issue](https://gitlab.com/gitlab-org/gitlab/-/issues/207259).
+- [CI changes prior to version 8.0](https://docs.gitlab.com/omnibus/update/README.html#updating-gitlab-ci-from-prior-540-to-version-714-via-omnibus-gitlab)
+ when it was merged into GitLab.
+- Version specific changes in
+ [the Omnibus documentation](https://docs.gitlab.com/omnibus/update/README.html#version-specific-changes).
+
### Example upgrade paths
Please see the table below for some examples:
@@ -165,7 +174,7 @@ Please see the table below for some examples:
| 9.4.5 | 8.13.4 | `8.13.4` -> `8.17.7` -> `9.4.5` | `8.17.7` is the last version in version `8` |
| 10.1.4 | 8.13.4 | `8.13.4 -> 8.17.7 -> 9.5.10 -> 10.1.4` | `8.17.7` is the last version in version `8`, `9.5.10` is the last version in version `9` |
| 11.3.4 | 8.13.4 | `8.13.4` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> `11.3.4` | `8.17.7` is the last version in version `8`, `9.5.10` is the last version in version `9`, `10.8.7` is the last version in version `10` |
-| 12.5.8 | 11.3.4 | `11.3.4` -> `11.11.8` -> `12.0.12` -> `12.5.8` | `11.11.8` is the last version in version `11`. `12.0.x` [is a required step](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23211#note_272842444). |
+| 12.5.10 | 11.3.4 | `11.3.4` -> `11.11.8` -> `12.0.12` -> `12.5.10` | `11.11.8` is the last version in version `11`. `12.0.x` [is a required step](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23211#note_272842444). |
| 12.8.5 | 9.2.6 | `9.2.6` -> `9.5.10` -> `10.8.7` -> `11.11.8` -> `12.0.12` -> `12.8.5` | Four intermediate versions are required: the final 9.5, 10.8, 11.11 releases, plus 12.0. |
NOTE: **Note:**
@@ -173,6 +182,21 @@ Instructions for installing a specific version of GitLab or downloading the pack
## More information
+Check [our release posts](https://about.gitlab.com/releases/categories/releases/).
+
+Each month, we publish either a major or minor release of GitLab. At the end
+of those release posts there are three sections to look for: deprecations, important notes,
+and upgrade barometer. These will will draw your attention to:
+
+- Steps you need to perform as part of an upgrade.
+ For example [8.12](https://about.gitlab.com/releases/2016/09/22/gitlab-8-12-released/#upgrade-barometer)
+ required the Elasticsearch index to be recreated. Any older version of GitLab upgrading to 8.12 or higher
+ would require this.
+- Changes to the versions of software we support such as
+ [ceasing support for IE11 in GitLab 13](https://about.gitlab.com/releases/2020/03/22/gitlab-12-9-released/#ending-support-for-internet-explorer-11).
+
+You should check all the major and minor versions you're passing over.
+
More information about the release procedures can be found in our
[release documentation](https://gitlab.com/gitlab-org/release/docs). You may also want to read our
[Responsible Disclosure Policy](https://about.gitlab.com/security/disclosure/).
diff --git a/doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_10.png b/doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_10.png
new file mode 100644
index 00000000000..466552f746e
--- /dev/null
+++ b/doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_10.png
Binary files differ
diff --git a/doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_8.png b/doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_8.png
deleted file mode 100644
index 5fc54927de7..00000000000
--- a/doc/user/compliance/compliance_dashboard/img/compliance_dashboard_v12_8.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/compliance/compliance_dashboard/index.md b/doc/user/compliance/compliance_dashboard/index.md
index afe3ce185e6..25feb6e56bc 100644
--- a/doc/user/compliance/compliance_dashboard/index.md
+++ b/doc/user/compliance/compliance_dashboard/index.md
@@ -14,7 +14,7 @@ for merging into production.
To access the Compliance Dashboard for a group, navigate to **{shield}** **Security & Compliance > Compliance** on the group's menu.
-![Compliance Dashboard](img/compliance_dashboard_v12_8.png)
+![Compliance Dashboard](img/compliance_dashboard_v12_10.png)
## Use cases
@@ -24,6 +24,7 @@ You can use the dashboard to:
- Get an overview of the latest Merge Request for each project.
- See if Merge Requests were approved and by whom.
+- See the latest [CI Pipeline](../../../ci/pipelines/index.md) result for each Merge Request.
## Permissions
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 7349ca34ae5..4889e7ce112 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -5937,6 +5937,9 @@ msgstr ""
msgid "Create project label"
msgstr ""
+msgid "Create requirement"
+msgstr ""
+
msgid "Create wildcard: %{searchTerm}"
msgstr ""
@@ -6852,6 +6855,9 @@ msgstr ""
msgid "Describe the goal of the changes and what reviewers should be aware of."
msgstr ""
+msgid "Describe the requirement here"
+msgstr ""
+
msgid "Description"
msgstr ""
@@ -12698,6 +12704,27 @@ msgstr ""
msgid "Metrics for environment"
msgstr ""
+msgid "Metrics::Dashboard::Annotation|Annotation can't belong to both a cluster and an environment at the same time"
+msgstr ""
+
+msgid "Metrics::Dashboard::Annotation|Annotation has not been deleted"
+msgstr ""
+
+msgid "Metrics::Dashboard::Annotation|Annotation must belong to a cluster or an environment"
+msgstr ""
+
+msgid "Metrics::Dashboard::Annotation|Dashboard with requested path can not be found"
+msgstr ""
+
+msgid "Metrics::Dashboard::Annotation|You are not authorized to create annotation for selected cluster"
+msgstr ""
+
+msgid "Metrics::Dashboard::Annotation|You are not authorized to create annotation for selected environment"
+msgstr ""
+
+msgid "Metrics::Dashboard::Annotation|You are not authorized to delete this annotation"
+msgstr ""
+
msgid "Metrics|Add metric"
msgstr ""
@@ -16988,6 +17015,9 @@ msgstr ""
msgid "Require users to prove ownership of custom domains"
msgstr ""
+msgid "Requirement"
+msgstr ""
+
msgid "Requirements"
msgstr ""
@@ -18647,6 +18677,9 @@ msgstr ""
msgid "Something went wrong while closing the %{issuable}. Please try again later"
msgstr ""
+msgid "Something went wrong while creating a requirement."
+msgstr ""
+
msgid "Something went wrong while deleting description changes. Please try again."
msgstr ""
@@ -18722,6 +18755,9 @@ msgstr ""
msgid "Something went wrong while stopping this environment. Please try again."
msgstr ""
+msgid "Something went wrong while updating a requirement."
+msgstr ""
+
msgid "Something went wrong while updating your list settings"
msgstr ""
diff --git a/scripts/review_apps/base-config.yaml b/scripts/review_apps/base-config.yaml
index 9b9fcdb85c4..a34bb23a5e6 100644
--- a/scripts/review_apps/base-config.yaml
+++ b/scripts/review_apps/base-config.yaml
@@ -146,8 +146,7 @@ prometheus:
install: false
redis:
metrics:
- resources:
- enabled: false
+ enabled: false
resources:
requests:
cpu: 100m
diff --git a/scripts/review_apps/review-apps.sh b/scripts/review_apps/review-apps.sh
index ca69fa6c02b..915b4f5050b 100755
--- a/scripts/review_apps/review-apps.sh
+++ b/scripts/review_apps/review-apps.sh
@@ -7,7 +7,7 @@ function deploy_exists() {
echoinfo "Checking if ${release} exists in the ${namespace} namespace..." true
- helm status --tiller-namespace "${namespace}" "${release}" >/dev/null 2>&1
+ helm status --namespace "${namespace}" "${release}" >/dev/null 2>&1
deploy_exists=$?
echoinfo "Deployment status for ${release} is ${deploy_exists}"
@@ -20,15 +20,15 @@ function previous_deploy_failed() {
echoinfo "Checking for previous deployment of ${release}" true
- helm status --tiller-namespace "${namespace}" "${release}" >/dev/null 2>&1
+ helm status --namespace "${namespace}" "${release}" >/dev/null 2>&1
local status=$?
# if `status` is `0`, deployment exists, has a status
if [ $status -eq 0 ]; then
echoinfo "Previous deployment found, checking status..."
- deployment_status=$(helm status --tiller-namespace "${namespace}" "${release}" | grep ^STATUS | cut -d' ' -f2)
+ deployment_status=$(helm status --namespace "${namespace}" "${release}" | grep ^STATUS | cut -d' ' -f2)
echoinfo "Previous deployment state: ${deployment_status}"
- if [[ "$deployment_status" == "FAILED" || "$deployment_status" == "PENDING_UPGRADE" || "$deployment_status" == "PENDING_INSTALL" ]]; then
+ if [[ "$deployment_status" == "failed" || "$deployment_status" == "pending-upgrade" || "$deployment_status" == "pending-install" ]]; then
status=0;
else
status=1;
@@ -58,7 +58,7 @@ function helm_delete_release() {
echoinfo "Deleting Helm release '${release}'..." true
- helm delete --tiller-namespace "${namespace}" --purge "${release}"
+ helm uninstall --namespace "${namespace}" "${release}"
}
function kubectl_cleanup_release() {
@@ -95,6 +95,36 @@ function delete_failed_release() {
fi
}
+function helm2_deploy_exists() {
+ local namespace="${1}"
+ local release="${2}"
+ local deploy_exists
+
+ echoinfo "Checking if Helm 2 ${release} exists in the ${namespace} namespace..." true
+
+ kubectl get cm -l OWNER=TILLER -n ${namespace} | grep ${release} 2>&1
+ deploy_exists=$?
+
+ echoinfo "Helm 2 release for ${release} is ${deploy_exists}"
+ return $deploy_exists
+}
+
+function delete_helm2_release() {
+ local namespace="${KUBE_NAMESPACE}"
+ local release="${CI_ENVIRONMENT_SLUG}"
+
+ if [ -z "${release}" ]; then
+ echoerr "No release given, aborting the delete!"
+ return
+ fi
+
+ if ! helm2_deploy_exists "${namespace}" "${release}"; then
+ echoinfo "No Review App with ${release} is currently deployed by Helm 2."
+ else
+ echoinfo "Cleaning up ${release} installed by Helm 2"
+ kubectl_cleanup_release "${namespace}" "${release}"
+ fi
+}
function get_pod() {
local namespace="${KUBE_NAMESPACE}"
@@ -148,54 +178,22 @@ function ensure_namespace() {
kubectl describe namespace "${namespace}" || kubectl create namespace "${namespace}"
}
-function install_tiller() {
- local namespace="${KUBE_NAMESPACE}"
-
- echoinfo "Checking deployment/tiller-deploy status in the ${namespace} namespace..." true
-
- echoinfo "Initiating the Helm client..."
- helm init --client-only
-
- # Set toleration for Tiller to be installed on a specific node pool
- helm init \
- --tiller-namespace "${namespace}" \
- --wait \
- --upgrade \
- --force-upgrade \
- --node-selectors "app=helm" \
- --replicas 3 \
- --override "spec.template.spec.tolerations[0].key"="dedicated" \
- --override "spec.template.spec.tolerations[0].operator"="Equal" \
- --override "spec.template.spec.tolerations[0].value"="helm" \
- --override "spec.template.spec.tolerations[0].effect"="NoSchedule"
-
- kubectl rollout status --namespace "${namespace}" --watch "deployment/tiller-deploy"
-
- if ! helm version --tiller-namespace "${namespace}" --debug; then
- echo "Failed to init Tiller."
- return 1
- fi
-}
-
function install_external_dns() {
local namespace="${KUBE_NAMESPACE}"
- local release="dns-gitlab-review-app"
+ local release="dns-gitlab-review-app-helm3"
local domain
domain=$(echo "${REVIEW_APPS_DOMAIN}" | awk -F. '{printf "%s.%s", $(NF-1), $NF}')
echoinfo "Installing external DNS for domain ${domain}..." true
if ! deploy_exists "${namespace}" "${release}" || previous_deploy_failed "${namespace}" "${release}" ; then
echoinfo "Installing external-dns Helm chart"
- helm repo update --tiller-namespace "${namespace}"
+ helm repo add bitnami https://charts.bitnami.com/bitnami
+ helm repo update
# Default requested: CPU => 0, memory => 0
- # Chart > 2.6.1 has a problem with AWS so we're pinning it for now.
- # See https://gitlab.com/gitlab-org/gitlab/issues/37269 and https://github.com/kubernetes-sigs/external-dns/issues/1262
- helm install stable/external-dns \
- --tiller-namespace "${namespace}" \
+ helm install "${release}" bitnami/external-dns \
--namespace "${namespace}" \
- --version '2.6.1' \
- --name "${release}" \
+ --version '2.13.3' \
--set provider="aws" \
--set aws.credentials.secretKey="${REVIEW_APPS_AWS_SECRET_KEY}" \
--set aws.credentials.accessKey="${REVIEW_APPS_AWS_ACCESS_KEY}" \
@@ -289,11 +287,10 @@ function deploy() {
HELM_CMD=$(cat << EOF
helm upgrade \
- --tiller-namespace="${namespace}" \
--namespace="${namespace}" \
--install \
--wait \
- --timeout 900 \
+ --timeout 900s \
--set ci.branch="${CI_COMMIT_REF_NAME}" \
--set ci.commit.sha="${CI_COMMIT_SHORT_SHA}" \
--set ci.job.url="${CI_JOB_URL}" \
diff --git a/spec/factories/metrics/dashboard/annotations.rb b/spec/factories/metrics/dashboard/annotations.rb
new file mode 100644
index 00000000000..2e5c373918e
--- /dev/null
+++ b/spec/factories/metrics/dashboard/annotations.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :metrics_dashboard_annotation, class: '::Metrics::Dashboard::Annotation' do
+ description { "Dashbaord annoation description" }
+ dashboard_path { "custom_dashbaord.yml" }
+ starting_at { Time.current }
+ environment
+
+ trait :with_cluster do
+ cluster
+ environment { nil }
+ end
+ end
+end
diff --git a/spec/features/issues/user_sorts_issue_comments_spec.rb b/spec/features/issues/user_sorts_issue_comments_spec.rb
new file mode 100644
index 00000000000..e1c0acc32f1
--- /dev/null
+++ b/spec/features/issues/user_sorts_issue_comments_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Comment sort direction' do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:comment_1) { create(:note_on_issue, noteable: issue, project: project, note: 'written first') }
+ let_it_be(:comment_2) { create(:note_on_issue, noteable: issue, project: project, note: 'written second') }
+
+ context 'on issue page', :js do
+ before do
+ visit project_issue_path(project, issue)
+ end
+
+ it 'saves sort order' do
+ # open dropdown, and select 'Newest first'
+ page.within('.issuable-details') do
+ click_button('Oldest first')
+ click_button('Newest first')
+ end
+
+ expect(first_comment).to have_content(comment_2.note)
+ expect(last_comment).to have_content(comment_1.note)
+
+ visit project_issue_path(project, issue)
+ wait_for_requests
+
+ expect(first_comment).to have_content(comment_2.note)
+ expect(last_comment).to have_content(comment_1.note)
+ end
+ end
+
+ def all_comments
+ all('.timeline > .note.timeline-entry')
+ end
+
+ def first_comment
+ all_comments.first
+ end
+
+ def last_comment
+ all_comments.last
+ end
+end
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 84b74ef659e..f2478a583dc 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -65,7 +65,7 @@ describe('Time series component', () => {
store = createStore();
store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
+ `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
metricsDashboardPayload,
);
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index f2c3b199481..f0b510a01f4 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -202,7 +202,7 @@ describe('Dashboard', () => {
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
+ `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
metricsDashboardPayload,
);
wrapper.vm.$store.commit(
diff --git a/spec/frontend/monitoring/init_utils.js b/spec/frontend/monitoring/init_utils.js
index 36c654ba7b3..55b6199fdfc 100644
--- a/spec/frontend/monitoring/init_utils.js
+++ b/spec/frontend/monitoring/init_utils.js
@@ -32,7 +32,7 @@ export const propsData = {
export const setupComponentStore = wrapper => {
wrapper.vm.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
+ `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
metricsDashboardPayload,
);
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index 9f0b4d16fc1..7c559aed2c5 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -5,13 +5,13 @@ import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import * as commonUtils from '~/lib/utils/common_utils';
import createFlash from '~/flash';
+import { defaultTimeRange } from '~/vue_shared/constants';
import store from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
import {
fetchDashboard,
receiveMetricsDashboardSuccess,
- receiveMetricsDashboardFailure,
fetchDeploymentsData,
fetchEnvironmentsData,
fetchPrometheusMetrics,
@@ -77,42 +77,40 @@ describe('Monitoring store actions', () => {
});
describe('fetchDeploymentsData', () => {
- it('commits RECEIVE_DEPLOYMENTS_DATA_SUCCESS on error', done => {
- const dispatch = jest.fn();
+ it('dispatches receiveDeploymentsDataSuccess on success', () => {
const { state } = store;
state.deploymentsEndpoint = '/success';
mock.onGet(state.deploymentsEndpoint).reply(200, {
deployments: deploymentData,
});
- fetchDeploymentsData({
+
+ return testAction(
+ fetchDeploymentsData,
+ null,
state,
- dispatch,
- })
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataSuccess', deploymentData);
- done();
- })
- .catch(done.fail);
+ [],
+ [{ type: 'receiveDeploymentsDataSuccess', payload: deploymentData }],
+ );
});
- it('commits RECEIVE_DEPLOYMENTS_DATA_FAILURE on error', done => {
- const dispatch = jest.fn();
+ it('dispatches receiveDeploymentsDataFailure on error', () => {
const { state } = store;
state.deploymentsEndpoint = '/error';
mock.onGet(state.deploymentsEndpoint).reply(500);
- fetchDeploymentsData({
+
+ return testAction(
+ fetchDeploymentsData,
+ null,
state,
- dispatch,
- })
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataFailure');
- done();
- })
- .catch(done.fail);
+ [],
+ [{ type: 'receiveDeploymentsDataFailure' }],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ },
+ );
});
});
describe('fetchEnvironmentsData', () => {
- const dispatch = jest.fn();
const { state } = store;
state.projectPath = 'gitlab-org/gitlab-test';
@@ -164,15 +162,19 @@ describe('Monitoring store actions', () => {
state.environmentsSearchTerm = searchTerm;
mockMutate.mockReturnValue(Promise.resolve());
- return fetchEnvironmentsData({
+ return testAction(
+ fetchEnvironmentsData,
+ null,
state,
- dispatch,
- }).then(() => {
- expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
- });
+ [],
+ [{ type: 'requestEnvironmentsData' }, { type: 'receiveEnvironmentsDataFailure' }],
+ () => {
+ expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
+ },
+ );
});
- it('commits RECEIVE_ENVIRONMENTS_DATA_SUCCESS on success', () => {
+ it('dispatches receiveEnvironmentsDataSuccess on success', () => {
jest.spyOn(gqClient, 'mutate').mockReturnValue(
Promise.resolve({
data: {
@@ -185,26 +187,31 @@ describe('Monitoring store actions', () => {
}),
);
- return fetchEnvironmentsData({
+ return testAction(
+ fetchEnvironmentsData,
+ null,
state,
- dispatch,
- }).then(() => {
- expect(dispatch).toHaveBeenCalledWith(
- 'receiveEnvironmentsDataSuccess',
- parseEnvironmentsResponse(environmentData, state.projectPath),
- );
- });
+ [],
+ [
+ { type: 'requestEnvironmentsData' },
+ {
+ type: 'receiveEnvironmentsDataSuccess',
+ payload: parseEnvironmentsResponse(environmentData, state.projectPath),
+ },
+ ],
+ );
});
- it('commits RECEIVE_ENVIRONMENTS_DATA_FAILURE on error', () => {
+ it('dispatches receiveEnvironmentsDataFailure on error', () => {
jest.spyOn(gqClient, 'mutate').mockReturnValue(Promise.reject());
- return fetchEnvironmentsData({
+ return testAction(
+ fetchEnvironmentsData,
+ null,
state,
- dispatch,
- }).then(() => {
- expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataFailure');
- });
+ [],
+ [{ type: 'requestEnvironmentsData' }, { type: 'receiveEnvironmentsDataFailure' }],
+ );
});
});
@@ -266,27 +273,24 @@ describe('Monitoring store actions', () => {
state = storeState();
state.dashboardEndpoint = '/dashboard';
});
- it('on success, dispatches receive and success actions', done => {
- const params = {};
+
+ it('on success, dispatches receive and success actions', () => {
document.body.dataset.page = 'projects:environments:metrics';
mock.onGet(state.dashboardEndpoint).reply(200, response);
- fetchDashboard(
- {
- state,
- commit,
- dispatch,
- },
- params,
- )
- .then(() => {
- expect(dispatch).toHaveBeenCalledWith('requestMetricsDashboard');
- expect(dispatch).toHaveBeenCalledWith('receiveMetricsDashboardSuccess', {
- response,
- params,
- });
- done();
- })
- .catch(done.fail);
+
+ return testAction(
+ fetchDashboard,
+ null,
+ state,
+ [],
+ [
+ { type: 'requestMetricsDashboard' },
+ {
+ type: 'receiveMetricsDashboardSuccess',
+ payload: { response },
+ },
+ ],
+ );
});
describe('on failure', () => {
@@ -299,7 +303,7 @@ describe('Monitoring store actions', () => {
};
});
- it('dispatches a failure action', done => {
+ it('dispatches a failure', done => {
result()
.then(() => {
expect(commit).toHaveBeenCalledWith(
@@ -351,31 +355,22 @@ describe('Monitoring store actions', () => {
let commit;
let dispatch;
let state;
+
beforeEach(() => {
commit = jest.fn();
dispatch = jest.fn();
state = storeState();
});
- it('stores groups ', () => {
- const params = {};
+
+ it('stores groups', () => {
const response = metricsDashboardResponse;
- receiveMetricsDashboardSuccess(
- {
- state,
- commit,
- dispatch,
- },
- {
- response,
- params,
- },
- );
+ receiveMetricsDashboardSuccess({ state, commit, dispatch }, { response });
expect(commit).toHaveBeenCalledWith(
- types.RECEIVE_METRICS_DATA_SUCCESS,
+ types.RECEIVE_METRICS_DASHBOARD_SUCCESS,
metricsDashboardResponse.dashboard,
);
- expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetrics', params);
+ expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetrics');
});
it('sets the dashboards loaded from the repository', () => {
const params = {};
@@ -395,29 +390,7 @@ describe('Monitoring store actions', () => {
expect(commit).toHaveBeenCalledWith(types.SET_ALL_DASHBOARDS, dashboardGitResponse);
});
});
- describe('receiveMetricsDashboardFailure', () => {
- let commit;
- beforeEach(() => {
- commit = jest.fn();
- });
- it('commits failure action', () => {
- receiveMetricsDashboardFailure({
- commit,
- });
- expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, undefined);
- });
- it('commits failure action with error', () => {
- receiveMetricsDashboardFailure(
- {
- commit,
- },
- 'uh-oh',
- );
- expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, 'uh-oh');
- });
- });
describe('fetchPrometheusMetrics', () => {
- const params = {};
let commit;
let dispatch;
let state;
@@ -427,13 +400,15 @@ describe('Monitoring store actions', () => {
commit = jest.fn();
dispatch = jest.fn();
state = storeState();
+
+ state.timeRange = defaultTimeRange;
});
it('commits empty state when state.groups is empty', done => {
const getters = {
metricsWithData: () => [],
};
- fetchPrometheusMetrics({ state, commit, dispatch, getters }, params)
+ fetchPrometheusMetrics({ state, commit, dispatch, getters })
.then(() => {
expect(Tracking.event).toHaveBeenCalledWith(
document.body.dataset.page,
@@ -444,7 +419,9 @@ describe('Monitoring store actions', () => {
value: 0,
},
);
- expect(dispatch).not.toHaveBeenCalled();
+ expect(dispatch).toHaveBeenCalledTimes(1);
+ expect(dispatch).toHaveBeenCalledWith('fetchDeploymentsData');
+
expect(createFlash).not.toHaveBeenCalled();
done();
})
@@ -460,11 +437,15 @@ describe('Monitoring store actions', () => {
metricsWithData: () => [metric.id],
};
- fetchPrometheusMetrics({ state, commit, dispatch, getters }, params)
+ fetchPrometheusMetrics({ state, commit, dispatch, getters })
.then(() => {
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
metric,
- params,
+ defaultQueryParams: {
+ start_time: expect.any(String),
+ end_time: expect.any(String),
+ step: expect.any(Number),
+ },
});
expect(Tracking.event).toHaveBeenCalledWith(
@@ -487,16 +468,22 @@ describe('Monitoring store actions', () => {
state.dashboard.panelGroups = metricsDashboardViewModel.panelGroups;
const metric = state.dashboard.panelGroups[0].panels[0].metrics[0];
+ dispatch.mockResolvedValueOnce(); // fetchDeploymentsData
// Mock having one out of four metrics failing
dispatch.mockRejectedValueOnce(new Error('Error fetching this metric'));
dispatch.mockResolvedValue();
- fetchPrometheusMetrics({ state, commit, dispatch }, params)
+ fetchPrometheusMetrics({ state, commit, dispatch })
.then(() => {
- expect(dispatch).toHaveBeenCalledTimes(9); // one per metric
+ expect(dispatch).toHaveBeenCalledTimes(10); // one per metric plus 1 for deployments
+ expect(dispatch).toHaveBeenCalledWith('fetchDeploymentsData');
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
metric,
- params,
+ defaultQueryParams: {
+ start_time: expect.any(String),
+ end_time: expect.any(String),
+ step: expect.any(Number),
+ },
});
expect(createFlash).toHaveBeenCalledTimes(1);
@@ -508,9 +495,10 @@ describe('Monitoring store actions', () => {
});
});
describe('fetchPrometheusMetric', () => {
- const params = {
+ const defaultQueryParams = {
start_time: '2019-08-06T12:40:02.184Z',
end_time: '2019-08-06T20:40:02.184Z',
+ step: 60,
};
let metric;
let state;
@@ -532,7 +520,7 @@ describe('Monitoring store actions', () => {
testAction(
fetchPrometheusMetric,
- { metric, params },
+ { metric, defaultQueryParams },
state,
[
{
@@ -569,7 +557,7 @@ describe('Monitoring store actions', () => {
testAction(
fetchPrometheusMetric,
- { metric, params },
+ { metric, defaultQueryParams },
state,
[
{
@@ -611,7 +599,7 @@ describe('Monitoring store actions', () => {
testAction(
fetchPrometheusMetric,
- { metric, params },
+ { metric, defaultQueryParams },
state,
[
{
@@ -646,7 +634,7 @@ describe('Monitoring store actions', () => {
testAction(
fetchPrometheusMetric,
- { metric, params },
+ { metric, defaultQueryParams },
state,
[
{
@@ -682,7 +670,7 @@ describe('Monitoring store actions', () => {
testAction(
fetchPrometheusMetric,
- { metric, params },
+ { metric, defaultQueryParams },
state,
[
{
diff --git a/spec/frontend/monitoring/store/getters_spec.js b/spec/frontend/monitoring/store/getters_spec.js
index bc62ada1034..40341d32cf5 100644
--- a/spec/frontend/monitoring/store/getters_spec.js
+++ b/spec/frontend/monitoring/store/getters_spec.js
@@ -51,7 +51,7 @@ describe('Monitoring store Getters', () => {
setupState({
dashboard: { panelGroups: [] },
});
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
groups = state.dashboard.panelGroups;
});
@@ -60,21 +60,21 @@ describe('Monitoring store Getters', () => {
});
it('on an empty metric with no result, returns NO_DATA', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
expect(getMetricStates()).toEqual([metricStates.NO_DATA]);
});
it('on a metric with a result, returns OK', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
expect(getMetricStates()).toEqual([metricStates.OK]);
});
it('on a metric with an error, returns an error', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
metricId: groups[0].panels[0].metrics[0].metricId,
});
@@ -83,7 +83,7 @@ describe('Monitoring store Getters', () => {
});
it('on multiple metrics with results, returns OK', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
@@ -94,7 +94,7 @@ describe('Monitoring store Getters', () => {
expect(getMetricStates(state.dashboard.panelGroups[2].key)).toEqual([]);
});
it('on multiple metrics errors', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
metricId: groups[0].panels[0].metrics[0].metricId,
@@ -113,7 +113,7 @@ describe('Monitoring store Getters', () => {
});
it('on multiple metrics with errors', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
// An success in 1 group
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
@@ -175,27 +175,27 @@ describe('Monitoring store Getters', () => {
});
it('no loaded metric returns empty', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
expect(metricsWithData()).toEqual([]);
});
it('an empty metric, returns empty', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
expect(metricsWithData()).toEqual([]);
});
it('a metric with results, it returns a metric', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
expect(metricsWithData()).toEqual([mockedQueryResultFixture.metricId]);
});
it('multiple metrics with results, it return multiple metrics', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
@@ -206,7 +206,7 @@ describe('Monitoring store Getters', () => {
});
it('multiple metrics with results, it returns metrics filtered by group', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
@@ -291,7 +291,7 @@ describe('Monitoring store Getters', () => {
});
it('return no metrics when dashboard is not persisted', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, mockData);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, mockData);
metricsSavedToDb = getters.metricsSavedToDb(state);
expect(metricsSavedToDb).toEqual([]);
@@ -304,7 +304,7 @@ describe('Monitoring store Getters', () => {
metric.metric_id = id;
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, mockData);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, mockData);
metricsSavedToDb = getters.metricsSavedToDb(state);
expect(metricsSavedToDb).toEqual([`${id}_${metric.id}`]);
@@ -321,7 +321,7 @@ describe('Monitoring store Getters', () => {
metric1.metric_id = id1;
metric2.metric_id = id2;
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, mockData);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, mockData);
metricsSavedToDb = getters.metricsSavedToDb(state);
expect(metricsSavedToDb).toEqual([`${id1}_${metric1.id}`, `${id2}_${metric2.id}`]);
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 6f1a81782f3..21a27a443af 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -20,7 +20,7 @@ describe('Monitoring mutations', () => {
stateCopy = state();
});
- describe('RECEIVE_METRICS_DATA_SUCCESS', () => {
+ describe('RECEIVE_METRICS_DASHBOARD_SUCCESS', () => {
let payload;
const getGroups = () => stateCopy.dashboard.panelGroups;
@@ -29,7 +29,7 @@ describe('Monitoring mutations', () => {
payload = metricsDashboardPayload;
});
it('adds a key to the group', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](stateCopy, payload);
const groups = getGroups();
expect(groups[0].key).toBe('system-metrics-kubernetes-0');
@@ -37,7 +37,7 @@ describe('Monitoring mutations', () => {
expect(groups[2].key).toBe('response-metrics-nginx-ingress-2');
});
it('normalizes values', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](stateCopy, payload);
const expectedLabel = 'Pod average (MB)';
const { label, queryRange } = getGroups()[0].panels[2].metrics[0];
@@ -45,7 +45,7 @@ describe('Monitoring mutations', () => {
expect(queryRange.length).toBeGreaterThan(0);
});
it('contains six groups, with panels with a metric each', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](stateCopy, payload);
const groups = getGroups();
@@ -61,7 +61,7 @@ describe('Monitoring mutations', () => {
expect(groups[1].panels[0].metrics).toHaveLength(1);
});
it('assigns metrics a metric id', () => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](stateCopy, payload);
const groups = getGroups();
@@ -195,7 +195,7 @@ describe('Monitoring mutations', () => {
describe('REQUEST_METRIC_RESULT', () => {
beforeEach(() => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboard);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](stateCopy, dashboard);
});
it('stores a loading state on a metric', () => {
expect(stateCopy.showEmptyState).toBe(true);
@@ -218,7 +218,7 @@ describe('Monitoring mutations', () => {
describe('RECEIVE_METRIC_RESULT_SUCCESS', () => {
beforeEach(() => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboard);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](stateCopy, dashboard);
});
it('clears empty state', () => {
expect(stateCopy.showEmptyState).toBe(true);
@@ -251,7 +251,7 @@ describe('Monitoring mutations', () => {
describe('RECEIVE_METRIC_RESULT_FAILURE', () => {
beforeEach(() => {
- mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboard);
+ mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](stateCopy, dashboard);
});
it('maintains the loading state when a metric fails', () => {
expect(stateCopy.showEmptyState).toBe(true);
diff --git a/spec/frontend/notes/components/sort_discussion_spec.js b/spec/frontend/notes/components/sort_discussion_spec.js
index 724c77eee3d..575f1057db2 100644
--- a/spec/frontend/notes/components/sort_discussion_spec.js
+++ b/spec/frontend/notes/components/sort_discussion_spec.js
@@ -1,6 +1,7 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import SortDiscussion from '~/notes/components/sort_discussion.vue';
+import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import createStore from '~/notes/stores';
import { ASC, DESC } from '~/notes/constants';
import Tracking from '~/tracking';
@@ -21,6 +22,8 @@ describe('Sort Discussion component', () => {
});
};
+ const findLocalStorageSync = () => wrapper.find(LocalStorageSync);
+
beforeEach(() => {
store = createStore();
jest.spyOn(Tracking, 'event');
@@ -31,6 +34,22 @@ describe('Sort Discussion component', () => {
wrapper = null;
});
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('has local storage sync', () => {
+ expect(findLocalStorageSync().exists()).toBe(true);
+ });
+
+ it('calls setDiscussionSortDirection when update is emitted', () => {
+ findLocalStorageSync().vm.$emit('input', ASC);
+
+ expect(store.dispatch).toHaveBeenCalledWith('setDiscussionSortDirection', ASC);
+ });
+ });
+
describe('when asc', () => {
describe('when the dropdown is clicked', () => {
it('calls the right actions', () => {
diff --git a/spec/frontend/vue_shared/components/local_storage_sync_spec.js b/spec/frontend/vue_shared/components/local_storage_sync_spec.js
new file mode 100644
index 00000000000..5470171a21e
--- /dev/null
+++ b/spec/frontend/vue_shared/components/local_storage_sync_spec.js
@@ -0,0 +1,128 @@
+import { shallowMount } from '@vue/test-utils';
+import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
+
+describe('Local Storage Sync', () => {
+ let wrapper;
+
+ const createComponent = ({ props = {}, slots = {} } = {}) => {
+ wrapper = shallowMount(LocalStorageSync, {
+ propsData: props,
+ slots,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ localStorage.clear();
+ });
+
+ it('is a renderless component', () => {
+ const html = '<div class="test-slot"></div>';
+ createComponent({
+ props: {
+ storageKey: 'key',
+ },
+ slots: {
+ default: html,
+ },
+ });
+
+ expect(wrapper.html()).toBe(html);
+ });
+
+ describe('localStorage empty', () => {
+ const storageKey = 'issue_list_order';
+
+ it('does not emit input event', () => {
+ createComponent({
+ props: {
+ storageKey,
+ value: 'ascending',
+ },
+ });
+
+ expect(wrapper.emitted('input')).toBeFalsy();
+ });
+
+ it('saves updated value to localStorage', () => {
+ createComponent({
+ props: {
+ storageKey,
+ value: 'ascending',
+ },
+ });
+
+ const newValue = 'descending';
+ wrapper.setProps({
+ value: newValue,
+ });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(localStorage.getItem(storageKey)).toBe(newValue);
+ });
+ });
+
+ it('does not save default value', () => {
+ const value = 'ascending';
+
+ createComponent({
+ props: {
+ storageKey,
+ value,
+ },
+ });
+
+ expect(localStorage.getItem(storageKey)).toBe(null);
+ });
+ });
+
+ describe('localStorage has saved value', () => {
+ const storageKey = 'issue_list_order_by';
+ const savedValue = 'last_updated';
+
+ beforeEach(() => {
+ localStorage.setItem(storageKey, savedValue);
+ });
+
+ it('emits input event with saved value', () => {
+ createComponent({
+ props: {
+ storageKey,
+ value: 'ascending',
+ },
+ });
+
+ expect(wrapper.emitted('input')[0][0]).toBe(savedValue);
+ });
+
+ it('does not overwrite localStorage with prop value', () => {
+ createComponent({
+ props: {
+ storageKey,
+ value: 'created',
+ },
+ });
+
+ expect(localStorage.getItem(storageKey)).toBe(savedValue);
+ });
+
+ it('updating the value updates localStorage', () => {
+ createComponent({
+ props: {
+ storageKey,
+ value: 'created',
+ },
+ });
+
+ const newValue = 'last_updated';
+ wrapper.setProps({
+ value: newValue,
+ });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(localStorage.getItem(storageKey)).toBe(newValue);
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/components/dashboard_resize_spec.js b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
index 6a35069ccff..6455346e890 100644
--- a/spec/javascripts/monitoring/components/dashboard_resize_spec.js
+++ b/spec/javascripts/monitoring/components/dashboard_resize_spec.js
@@ -39,7 +39,7 @@ const propsData = {
function setupComponentStore(component) {
// Load 2 panel groups
component.$store.commit(
- `monitoringDashboard/${types.RECEIVE_METRICS_DATA_SUCCESS}`,
+ `monitoringDashboard/${types.RECEIVE_METRICS_DASHBOARD_SUCCESS}`,
metricsDashboardPayload,
);
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index f6c19ccc9d3..8685838fdde 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -27,6 +27,7 @@ describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to have_many(:kubernetes_namespaces) }
it { is_expected.to have_one(:cluster_project) }
it { is_expected.to have_many(:deployment_clusters) }
+ it { is_expected.to have_many(:metrics_dashboard_annotations) }
it { is_expected.to delegate_method(:status).to(:provider) }
it { is_expected.to delegate_method(:status_reason).to(:provider) }
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 896203d8669..d0305d878e3 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -17,6 +17,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
it { is_expected.to belong_to(:project).required }
it { is_expected.to have_many(:deployments) }
+ it { is_expected.to have_many(:metrics_dashboard_annotations) }
it { is_expected.to delegate_method(:stop_action).to(:last_deployment) }
it { is_expected.to delegate_method(:manual_actions).to(:last_deployment) }
diff --git a/spec/models/metrics/dashboard/annotation_spec.rb b/spec/models/metrics/dashboard/annotation_spec.rb
new file mode 100644
index 00000000000..ed3bef37a7c
--- /dev/null
+++ b/spec/models/metrics/dashboard/annotation_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::Dashboard::Annotation do
+ describe 'associations' do
+ it { is_expected.to belong_to(:environment).inverse_of(:metrics_dashboard_annotations) }
+ it { is_expected.to belong_to(:cluster).class_name('Clusters::Cluster').inverse_of(:metrics_dashboard_annotations) }
+ end
+
+ describe 'validation' do
+ it { is_expected.to validate_presence_of(:description) }
+ it { is_expected.to validate_presence_of(:dashboard_path) }
+ it { is_expected.to validate_presence_of(:starting_at) }
+ it { is_expected.to validate_length_of(:dashboard_path).is_at_most(255) }
+ it { is_expected.to validate_length_of(:panel_xid).is_at_most(255) }
+ it { is_expected.to validate_length_of(:description).is_at_most(255) }
+
+ context 'orphaned annotation' do
+ subject { build(:metrics_dashboard_annotation, environment: nil) }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error about both missing relations' do
+ subject.valid?
+
+ expect(subject.errors.full_messages).to include(/Annotation must belong to a cluster or an environment/)
+ end
+ end
+
+ context 'environments annotation' do
+ subject { build(:metrics_dashboard_annotation) }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'clusters annotation' do
+ subject { build(:metrics_dashboard_annotation, :with_cluster) }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'annotation with shared ownership' do
+ subject { build(:metrics_dashboard_annotation, :with_cluster, environment: build(:environment) ) }
+
+ it 'reports error about both shared ownership' do
+ subject.valid?
+
+ expect(subject.errors.full_messages).to include(/Annotation can't belong to both a cluster and an environment at the same time/)
+ end
+ end
+ end
+end
diff --git a/spec/policies/metrics/dashboard/annotation_policy_spec.rb b/spec/policies/metrics/dashboard/annotation_policy_spec.rb
new file mode 100644
index 00000000000..4dc5f4cd0b4
--- /dev/null
+++ b/spec/policies/metrics/dashboard/annotation_policy_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::Dashboard::AnnotationPolicy, :models do
+ shared_examples 'metrics dashboard annotation policy' do
+ context 'when guest' do
+ before do
+ project.add_guest(user)
+ end
+
+ it { expect(policy).to be_disallowed :read_metrics_dashboard_annotation }
+ it { expect(policy).to be_disallowed :create_metrics_dashboard_annotation }
+ it { expect(policy).to be_disallowed :update_metrics_dashboard_annotation }
+ it { expect(policy).to be_disallowed :delete_metrics_dashboard_annotation }
+ end
+
+ context 'when reporter' do
+ before do
+ project.add_reporter(user)
+ end
+
+ it { expect(policy).to be_allowed :read_metrics_dashboard_annotation }
+ it { expect(policy).to be_disallowed :create_metrics_dashboard_annotation }
+ it { expect(policy).to be_disallowed :update_metrics_dashboard_annotation }
+ it { expect(policy).to be_disallowed :delete_metrics_dashboard_annotation }
+ end
+
+ context 'when developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { expect(policy).to be_allowed :read_metrics_dashboard_annotation }
+ it { expect(policy).to be_allowed :create_metrics_dashboard_annotation }
+ it { expect(policy).to be_allowed :update_metrics_dashboard_annotation }
+ it { expect(policy).to be_allowed :delete_metrics_dashboard_annotation }
+ end
+
+ context 'when maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it { expect(policy).to be_allowed :read_metrics_dashboard_annotation }
+ it { expect(policy).to be_allowed :create_metrics_dashboard_annotation }
+ it { expect(policy).to be_allowed :update_metrics_dashboard_annotation }
+ it { expect(policy).to be_allowed :delete_metrics_dashboard_annotation }
+ end
+ end
+
+ describe 'rules' do
+ context 'environments annotation' do
+ let(:annotation) { create(:metrics_dashboard_annotation, environment: environment) }
+ let(:environment) { create(:environment) }
+ let!(:project) { environment.project }
+ let(:user) { create(:user) }
+ let(:policy) { described_class.new(user, annotation) }
+
+ it_behaves_like 'metrics dashboard annotation policy'
+ end
+
+ context 'cluster annotation' do
+ let(:annotation) { create(:metrics_dashboard_annotation, environment: nil, cluster: cluster) }
+ let(:cluster) { create(:cluster, :project) }
+ let(:project) { cluster.project }
+ let(:user) { create(:user) }
+ let(:policy) { described_class.new(user, annotation) }
+
+ it_behaves_like 'metrics dashboard annotation policy'
+ end
+ end
+end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index cce84c4f357..d098369e124 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -28,7 +28,7 @@ describe ProjectPolicy do
download_code fork_project create_snippet update_issue
admin_issue admin_label admin_list read_commit_status read_build
read_container_image read_pipeline read_environment read_deployment
- read_merge_request download_wiki_code read_sentry_issue
+ read_merge_request download_wiki_code read_sentry_issue read_metrics_dashboard_annotation
]
end
@@ -43,6 +43,7 @@ describe ProjectPolicy do
update_pipeline create_merge_request_from create_wiki push_code
resolve_note create_container_image update_container_image destroy_container_image
create_environment update_environment create_deployment update_deployment create_release update_release
+ create_metrics_dashboard_annotation delete_metrics_dashboard_annotation update_metrics_dashboard_annotation
]
end
diff --git a/spec/services/metrics/dashboard/annotations/create_service_spec.rb b/spec/services/metrics/dashboard/annotations/create_service_spec.rb
new file mode 100644
index 00000000000..7dabca3c860
--- /dev/null
+++ b/spec/services/metrics/dashboard/annotations/create_service_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::Dashboard::Annotations::CreateService do
+ let_it_be(:user) { create(:user) }
+ let(:description) { 'test annotation' }
+ let(:dashboard_path) { 'config/prometheus/common_metrics.yml' }
+ let(:starting_at) { 15.minutes.ago }
+ let(:ending_at) { nil }
+ let(:service_instance) { described_class.new(user, annotation_params) }
+ let(:annotation_params) do
+ {
+ environment: environment,
+ cluster: cluster,
+ description: description,
+ dashboard_path: dashboard_path,
+ starting_at: starting_at,
+ ending_at: ending_at
+ }
+ end
+
+ shared_examples 'executed annotation creation' do
+ it 'returns success response', :aggregate_failures do
+ annotation = instance_double(::Metrics::Dashboard::Annotation)
+ allow(::Metrics::Dashboard::Annotation).to receive(:new).and_return(annotation)
+ allow(annotation).to receive(:save).and_return(true)
+
+ response = service_instance.execute
+
+ expect(response[:status]).to be :success
+ expect(response[:annotation]).to be annotation
+ end
+
+ it 'creates annotation', :aggregate_failures do
+ annotation = instance_double(::Metrics::Dashboard::Annotation)
+
+ expect(::Metrics::Dashboard::Annotation)
+ .to receive(:new).with(annotation_params).and_return(annotation)
+ expect(annotation).to receive(:save).and_return(true)
+
+ service_instance.execute
+ end
+ end
+
+ shared_examples 'prevented annotation creation' do |message|
+ it 'returns error response', :aggregate_failures do
+ response = service_instance.execute
+
+ expect(response[:status]).to be :error
+ expect(response[:message]).to eql message
+ end
+
+ it 'does not change db state' do
+ expect(::Metrics::Dashboard::Annotation).not_to receive(:new)
+
+ service_instance.execute
+ end
+ end
+
+ shared_examples 'annotation creation failure' do
+ it 'returns error response', :aggregate_failures do
+ annotation = instance_double(::Metrics::Dashboard::Annotation)
+
+ expect(annotation).to receive(:errors).and_return('Model validation error')
+ expect(::Metrics::Dashboard::Annotation)
+ .to receive(:new).with(annotation_params).and_return(annotation)
+ expect(annotation).to receive(:save).and_return(false)
+
+ response = service_instance.execute
+
+ expect(response[:status]).to be :error
+ expect(response[:message]).to eql 'Model validation error'
+ end
+ end
+
+ describe '.execute' do
+ context 'with environment' do
+ let(:environment) { create(:environment) }
+ let(:cluster) { nil }
+
+ context 'with anonymous user' do
+ it_behaves_like 'prevented annotation creation', 'You are not authorized to create annotation for selected environment'
+ end
+
+ context 'with maintainer user' do
+ before do
+ environment.project.add_maintainer(user)
+ end
+
+ it_behaves_like 'executed annotation creation'
+ end
+ end
+
+ context 'with cluster' do
+ let(:environment) { nil }
+
+ context 'with anonymous user' do
+ let(:cluster) { create(:cluster, :project) }
+
+ it_behaves_like 'prevented annotation creation', 'You are not authorized to create annotation for selected cluster'
+ end
+
+ context 'with maintainer user' do
+ let(:cluster) { create(:cluster, :project) }
+
+ before do
+ cluster.project.add_maintainer(user)
+ end
+
+ it_behaves_like 'executed annotation creation'
+ end
+
+ context 'with owner user' do
+ let(:cluster) { create(:cluster, :group) }
+
+ before do
+ cluster.group.add_owner(user)
+ end
+
+ it_behaves_like 'executed annotation creation'
+ end
+ end
+
+ context 'non cluster nor environment is supplied' do
+ let(:environment) { nil }
+ let(:cluster) { nil }
+
+ it_behaves_like 'annotation creation failure'
+ end
+
+ context 'missing dashboard_path' do
+ let(:cluster) { create(:cluster, :project) }
+ let(:environment) { nil }
+ let(:dashboard_path) { nil }
+
+ context 'with maintainer user' do
+ before do
+ cluster.project.add_maintainer(user)
+ end
+
+ it_behaves_like 'annotation creation failure'
+ end
+ end
+
+ context 'incorrect dashboard_path' do
+ let(:cluster) { create(:cluster, :project) }
+ let(:environment) { nil }
+ let(:dashboard_path) { 'something_incorrect.yml' }
+
+ context 'with maintainer user' do
+ before do
+ cluster.project.add_maintainer(user)
+ end
+
+ it_behaves_like 'prevented annotation creation', 'Dashboard with requested path can not be found'
+ end
+ end
+ end
+end
diff --git a/spec/services/metrics/dashboard/annotations/delete_service_spec.rb b/spec/services/metrics/dashboard/annotations/delete_service_spec.rb
new file mode 100644
index 00000000000..95825db6902
--- /dev/null
+++ b/spec/services/metrics/dashboard/annotations/delete_service_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::Dashboard::Annotations::DeleteService do
+ let(:user) { create(:user) }
+ let(:service_instance) { described_class.new(user, annotation) }
+
+ shared_examples 'executed annotation deletion' do
+ it 'returns success response', :aggregate_failures do
+ expect(annotation).to receive(:destroy).and_return(true)
+
+ response = service_instance.execute
+
+ expect(response[:status]).to be :success
+ end
+ end
+
+ shared_examples 'prevented annotation deletion' do |message|
+ it 'returns error response', :aggregate_failures do
+ response = service_instance.execute
+
+ expect(response[:status]).to be :error
+ expect(response[:message]).to eql message
+ end
+
+ it 'does not change db state' do
+ expect(annotation).not_to receive(:destroy)
+
+ service_instance.execute
+ end
+ end
+
+ describe '.execute' do
+ context 'with specific environment' do
+ let(:annotation) { create(:metrics_dashboard_annotation, environment: environment) }
+ let(:environment) { create(:environment) }
+
+ context 'with anonymous user' do
+ it_behaves_like 'prevented annotation deletion', 'You are not authorized to delete this annotation'
+ end
+
+ context 'with maintainer user' do
+ before do
+ environment.project.add_maintainer(user)
+ end
+
+ it_behaves_like 'executed annotation deletion'
+
+ context 'annotation failed to delete' do
+ it 'returns error response', :aggregate_failures do
+ allow(annotation).to receive(:destroy).and_return(false)
+
+ response = service_instance.execute
+
+ expect(response[:status]).to be :error
+ expect(response[:message]).to eql 'Annotation has not been deleted'
+ end
+ end
+ end
+ end
+
+ context 'with specific cluster' do
+ let(:annotation) { create(:metrics_dashboard_annotation, cluster: cluster, environment: nil) }
+
+ context 'with anonymous user' do
+ let(:cluster) { create(:cluster, :project) }
+
+ it_behaves_like 'prevented annotation deletion', 'You are not authorized to delete this annotation'
+ end
+
+ context 'with maintainer user' do
+ let(:cluster) { create(:cluster, :project) }
+
+ before do
+ cluster.project.add_maintainer(user)
+ end
+
+ it_behaves_like 'executed annotation deletion'
+ end
+
+ context 'with owner user' do
+ let(:cluster) { create(:cluster, :group) }
+
+ before do
+ cluster.group.add_owner(user)
+ end
+
+ it_behaves_like 'executed annotation deletion'
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index 3a306f80b3c..4f81a71f586 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -18,8 +18,8 @@ RSpec.shared_context 'GroupPolicy context' do
]
end
let(:read_group_permissions) { %i[read_label read_list read_milestone read_board] }
- let(:reporter_permissions) { %i[admin_label read_container_image] }
- let(:developer_permissions) { [:admin_milestone] }
+ let(:reporter_permissions) { %i[admin_label read_container_image read_metrics_dashboard_annotation] }
+ let(:developer_permissions) { %i[admin_milestone create_metrics_dashboard_annotation delete_metrics_dashboard_annotation update_metrics_dashboard_annotation] }
let(:maintainer_permissions) do
%i[
create_projects