summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-02-07 12:09:13 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-02-07 12:09:13 +0000
commit211a8c3361ccf4eb92f36edbdcf15c98fcdcc8b7 (patch)
tree0ad37172721a39b0d57240bb1b4e70f200a0d93e
parent456a7247f9e88fc2518b69a1a00e905c6db6d775 (diff)
downloadgitlab-ce-211a8c3361ccf4eb92f36edbdcf15c98fcdcc8b7.tar.gz
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.gitlab/ci/rails.gitlab-ci.yml14
-rw-r--r--app/assets/javascripts/monitoring/components/charts/time_series.vue51
-rw-r--r--app/assets/javascripts/monitoring/components/dashboard.vue16
-rw-r--r--app/assets/javascripts/monitoring/components/embed.vue15
-rw-r--r--app/assets/javascripts/monitoring/components/panel_type.vue33
-rw-r--r--app/assets/javascripts/monitoring/stores/actions.js22
-rw-r--r--app/assets/javascripts/monitoring/stores/mutation_types.js2
-rw-r--r--app/assets/javascripts/monitoring/stores/mutations.js4
-rw-r--r--app/assets/javascripts/monitoring/stores/state.js15
-rw-r--r--app/assets/javascripts/monitoring/utils.js5
-rw-r--r--app/assets/stylesheets/framework/typography.scss2
-rw-r--r--app/controllers/admin/services_controller.rb12
-rw-r--r--app/controllers/concerns/lfs_request.rb4
-rw-r--r--app/controllers/repositories/lfs_storage_controller.rb11
-rw-r--r--app/models/concerns/sortable.rb14
-rw-r--r--app/models/project.rb55
-rw-r--r--app/models/project_services/issue_tracker_service.rb2
-rw-r--r--app/models/project_services/prometheus_service.rb2
-rw-r--r--app/models/service.rb20
-rw-r--r--app/presenters/project_presenter.rb2
-rw-r--r--app/services/merge_requests/create_service.rb5
-rw-r--r--app/services/merge_requests/link_lfs_objects_service.rb31
-rw-r--r--app/services/merge_requests/refresh_service.rb29
-rw-r--r--app/services/projects/create_service.rb8
-rw-r--r--app/services/projects/fork_service.rb12
-rw-r--r--app/services/projects/lfs_pointers/lfs_download_service.rb6
-rw-r--r--app/services/projects/move_lfs_objects_projects_service.rb2
-rw-r--r--app/services/projects/propagate_instance_level_service.rb (renamed from app/services/projects/propagate_service_template.rb)28
-rw-r--r--app/services/projects/unlink_fork_service.rb4
-rw-r--r--app/views/projects/services/mattermost_slash_commands/_help.html.haml4
-rw-r--r--app/views/projects/services/slack_slash_commands/_help.html.haml2
-rw-r--r--app/workers/all_queues.yml2
-rw-r--r--app/workers/propagate_instance_level_service_worker.rb26
-rw-r--r--app/workers/propagate_service_template_worker.rb26
-rw-r--r--app/workers/repository_fork_worker.rb26
-rw-r--r--changelogs/unreleased/20042-create-lfs-objects-project.yml5
-rw-r--r--changelogs/unreleased/202101-make-project-buttons-fit-in-1-row.yml5
-rw-r--r--changelogs/unreleased/24108-fix-sast-multiline.yml5
-rw-r--r--changelogs/unreleased/27142-divider-in-readme-is-overlaying-with-image.yml5
-rw-r--r--changelogs/unreleased/37335-conan-name-validation.yml5
-rw-r--r--changelogs/unreleased/ab-projects-without-feature.yml5
-rw-r--r--changelogs/unreleased/rename_services_template_to_instance.yml5
-rw-r--r--config/sidekiq_queues.yml2
-rw-r--r--db/migrate/20200123040535_add_multi_column_index_on_lfs_objects_projects.rb17
-rw-r--r--db/migrate/20200123045415_remove_project_id_index_on_lfs_objects_projects.rb17
-rw-r--r--db/migrate/20200123092602_rename_services_template_to_instance.rb17
-rw-r--r--db/migrate/20200129234037_replace_conan_metadata_index.rb25
-rw-r--r--db/post_migrate/20191021101942_remove_empty_github_service_templates.rb4
-rw-r--r--db/post_migrate/20200123101859_cleanup_rename_services_template_to_instance.rb17
-rw-r--r--db/post_migrate/20200127111840_fix_projects_without_project_feature.rb22
-rw-r--r--db/post_migrate/20200206111847_migrate_propagate_service_template_sidekiq_queue.rb15
-rw-r--r--db/schema.rb10
-rw-r--r--doc/api/projects.md5
-rw-r--r--doc/development/integrations/secure.md.md465
-rw-r--r--doc/user/application_security/dependency_scanning/index.md2
-rw-r--r--lib/api/services.rb2
-rw-r--r--lib/gitlab/background_migration/fix_projects_without_project_feature.rb72
-rw-r--r--lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml7
-rw-r--r--lib/gitlab/graphql/connections/keyset/conditions/base_condition.rb25
-rw-r--r--lib/gitlab/graphql/connections/keyset/order_info.rb21
-rw-r--r--lib/gitlab/graphql/connections/keyset/query_builder.rb11
-rw-r--r--lib/gitlab/import_export/import_export.yml2
-rw-r--r--lib/gitlab/usage_data.rb2
-rw-r--r--locale/gitlab.pot6
-rw-r--r--qa/qa/specs/features/browser_ui/1_manage/login/log_into_mattermost_via_gitlab_spec.rb2
-rw-r--r--spec/controllers/admin/services_controller_spec.rb12
-rw-r--r--spec/controllers/concerns/lfs_request_spec.rb18
-rw-r--r--spec/controllers/dashboard/projects_controller_spec.rb33
-rw-r--r--spec/controllers/projects/services_controller_spec.rb6
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb6
-rw-r--r--spec/fixtures/trace/sample_trace2
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js59
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js40
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js59
-rw-r--r--spec/frontend/monitoring/embed/embed_spec.js21
-rw-r--r--spec/frontend/monitoring/init_utils.js1
-rw-r--r--spec/frontend/monitoring/panel_type_spec.js146
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js10
-rw-r--r--spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb75
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb44
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb39
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb14
-rw-r--r--spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb30
-rw-r--r--spec/lib/gitlab/import_export/project_tree_restorer_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb2
-rw-r--r--spec/migrations/fix_projects_without_project_feature_spec.rb42
-rw-r--r--spec/migrations/migrate_propagate_service_template_sidekiq_queue_spec.rb29
-rw-r--r--spec/models/concerns/sortable_spec.rb15
-rw-r--r--spec/models/project_spec.rb71
-rw-r--r--spec/models/service_spec.rb52
-rw-r--r--spec/presenters/project_presenter_spec.rb4
-rw-r--r--spec/requests/api/merge_requests_spec.rb2
-rw-r--r--spec/requests/lfs_http_spec.rb4
-rw-r--r--spec/services/merge_requests/create_service_spec.rb8
-rw-r--r--spec/services/merge_requests/link_lfs_objects_service_spec.rb103
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb8
-rw-r--r--spec/services/projects/create_service_spec.rb14
-rw-r--r--spec/services/projects/fork_service_spec.rb8
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb3
-rw-r--r--spec/services/projects/propagate_instance_level_service_spec.rb (renamed from spec/services/projects/propagate_service_template_spec.rb)36
-rw-r--r--spec/workers/propagate_instance_level_service_worker_spec.rb31
-rw-r--r--spec/workers/propagate_service_template_worker_spec.rb31
-rw-r--r--spec/workers/repository_fork_worker_spec.rb24
104 files changed, 1974 insertions, 449 deletions
diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml
index 3fe8411ccad..03685d1f6a8 100644
--- a/.gitlab/ci/rails.gitlab-ci.yml
+++ b/.gitlab/ci/rails.gitlab-ci.yml
@@ -102,13 +102,13 @@ rspec migration pg9:
extends:
- .rspec-base-pg9
- .rspec-base-migration
- parallel: 4
+ parallel: 5
rspec migration pg9-foss:
extends:
- .rspec-base-pg9-foss
- .rspec-base-migration
- parallel: 4
+ parallel: 5
rspec unit pg9:
extends: .rspec-base-pg9
@@ -120,11 +120,11 @@ rspec unit pg9-foss:
rspec integration pg9:
extends: .rspec-base-pg9
- parallel: 6
+ parallel: 8
rspec integration pg9-foss:
extends: .rspec-base-pg9-foss
- parallel: 6
+ parallel: 8
rspec system pg9:
extends: .rspec-base-pg9
@@ -140,7 +140,7 @@ rspec unit pg10:
rspec integration pg10:
extends: .rspec-base-pg10
- parallel: 6
+ parallel: 8
rspec system pg10:
extends: .rspec-base-pg10
@@ -170,11 +170,11 @@ rspec-ee unit pg9:
rspec-ee integration pg9:
extends: .rspec-ee-base-pg9
- parallel: 3
+ parallel: 4
rspec-ee system pg9:
extends: .rspec-ee-base-pg9
- parallel: 5
+ parallel: 6
rspec-ee migration pg10:
extends:
diff --git a/app/assets/javascripts/monitoring/components/charts/time_series.vue b/app/assets/javascripts/monitoring/components/charts/time_series.vue
index e58d9f23201..8b2c5e44bb5 100644
--- a/app/assets/javascripts/monitoring/components/charts/time_series.vue
+++ b/app/assets/javascripts/monitoring/components/charts/time_series.vue
@@ -1,5 +1,5 @@
<script>
-import { omit } from 'lodash';
+import { omit, throttle } from 'lodash';
import { GlLink, GlButton, GlTooltip, GlResizeObserverDirective } from '@gitlab/ui';
import { GlAreaChart, GlLineChart, GlChartSeriesLabel } from '@gitlab/ui/dist/charts';
import dateFormat from 'dateformat';
@@ -18,6 +18,13 @@ import {
import { makeDataSeries } from '~/helpers/monitor_helper';
import { graphDataValidatorForValues } from '../../utils';
+const THROTTLED_DATAZOOM_WAIT = 1000; // miliseconds
+const timestampToISODate = timestamp => new Date(timestamp).toISOString();
+
+const events = {
+ datazoom: 'datazoom',
+};
+
export default {
components: {
GlAreaChart,
@@ -98,6 +105,7 @@ export default {
height: chartHeight,
svgs: {},
primaryColor: null,
+ throttledDatazoom: null,
};
},
computed: {
@@ -245,6 +253,11 @@ export default {
this.setSvg('rocket');
this.setSvg('scroll-handle');
},
+ destroyed() {
+ if (this.throttledDatazoom) {
+ this.throttledDatazoom.cancel();
+ }
+ },
methods: {
formatLegendLabel(query) {
return `${query.label}`;
@@ -287,8 +300,39 @@ export default {
console.error('SVG could not be rendered correctly: ', e);
});
},
- onChartUpdated(chart) {
- [this.primaryColor] = chart.getOption().color;
+ onChartUpdated(eChart) {
+ [this.primaryColor] = eChart.getOption().color;
+ },
+
+ onChartCreated(eChart) {
+ // Emit a datazoom event that corresponds to the eChart
+ // `datazoom` event.
+
+ if (this.throttledDatazoom) {
+ // Chart can be created multiple times in this component's
+ // lifetime, remove previous handlers every time
+ // chart is created.
+ this.throttledDatazoom.cancel();
+ }
+
+ // Emitting is throttled to avoid flurries of calls when
+ // the user changes or scrolls the zoom bar.
+ this.throttledDatazoom = throttle(
+ () => {
+ const { startValue, endValue } = eChart.getOption().dataZoom[0];
+ this.$emit(events.datazoom, {
+ start: timestampToISODate(startValue),
+ end: timestampToISODate(endValue),
+ });
+ },
+ THROTTLED_DATAZOOM_WAIT,
+ {
+ leading: false,
+ },
+ );
+
+ eChart.off('datazoom');
+ eChart.on('datazoom', this.throttledDatazoom);
},
onResize() {
if (!this.$refs.chart) return;
@@ -331,6 +375,7 @@ export default {
:height="height"
:average-text="legendAverageText"
:max-text="legendMaxText"
+ @created="onChartCreated"
@updated="onChartUpdated"
>
<template v-if="tooltip.isDeployment">
diff --git a/app/assets/javascripts/monitoring/components/dashboard.vue b/app/assets/javascripts/monitoring/components/dashboard.vue
index 391cd6dd15e..79f32b357fc 100644
--- a/app/assets/javascripts/monitoring/components/dashboard.vue
+++ b/app/assets/javascripts/monitoring/components/dashboard.vue
@@ -21,7 +21,6 @@ import createFlash from '~/flash';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { mergeUrlParams, redirectTo } from '~/lib/utils/url_utility';
import invalidUrl from '~/lib/utils/invalid_url';
-import { convertToFixedRange } from '~/lib/utils/datetime_range';
import Icon from '~/vue_shared/components/icon.vue';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
@@ -102,6 +101,11 @@ export default {
type: String,
required: true,
},
+ logsPath: {
+ type: String,
+ required: false,
+ default: invalidUrl,
+ },
defaultBranch: {
type: String,
required: true,
@@ -247,22 +251,20 @@ export default {
dashboardsEndpoint: this.dashboardsEndpoint,
currentDashboard: this.currentDashboard,
projectPath: this.projectPath,
+ logsPath: this.logsPath,
});
},
mounted() {
if (!this.hasMetrics) {
this.setGettingStartedEmptyState();
} else {
- const { start, end } = convertToFixedRange(this.selectedTimeRange);
-
- this.fetchData({
- start,
- end,
- });
+ this.setTimeRange(this.selectedTimeRange);
+ this.fetchData();
}
},
methods: {
...mapActions('monitoringDashboard', [
+ 'setTimeRange',
'fetchData',
'setGettingStartedEmptyState',
'setEndpoints',
diff --git a/app/assets/javascripts/monitoring/components/embed.vue b/app/assets/javascripts/monitoring/components/embed.vue
index e55de1c0105..49188a7af8f 100644
--- a/app/assets/javascripts/monitoring/components/embed.vue
+++ b/app/assets/javascripts/monitoring/components/embed.vue
@@ -19,14 +19,8 @@ export default {
},
data() {
const timeRange = timeRangeFromUrl(this.dashboardUrl) || defaultTimeRange;
- const { start, end } = convertToFixedRange(timeRange);
- const params = {
- start,
- end,
- };
-
return {
- params,
+ timeRange: convertToFixedRange(timeRange),
elWidth: 0,
};
},
@@ -49,7 +43,9 @@ export default {
},
mounted() {
this.setInitialState();
- this.fetchMetricsData(this.params);
+ this.setTimeRange(this.timeRange);
+ this.fetchDashboard();
+
sidebarMutationObserver = new MutationObserver(this.onSidebarMutation);
sidebarMutationObserver.observe(document.querySelector('.layout-page'), {
attributes: true,
@@ -64,7 +60,8 @@ export default {
},
methods: {
...mapActions('monitoringDashboard', [
- 'fetchMetricsData',
+ 'setTimeRange',
+ 'fetchDashboard',
'setEndpoints',
'setFeatureFlags',
'setShowErrorBanner',
diff --git a/app/assets/javascripts/monitoring/components/panel_type.vue b/app/assets/javascripts/monitoring/components/panel_type.vue
index 6751f3d31e8..0258a62b390 100644
--- a/app/assets/javascripts/monitoring/components/panel_type.vue
+++ b/app/assets/javascripts/monitoring/components/panel_type.vue
@@ -1,6 +1,7 @@
<script>
import { mapState } from 'vuex';
import { pickBy } from 'lodash';
+import invalidUrl from '~/lib/utils/invalid_url';
import {
GlDropdown,
GlDropdownItem,
@@ -18,7 +19,7 @@ import MonitorColumnChart from './charts/column.vue';
import MonitorStackedColumnChart from './charts/stacked_column.vue';
import MonitorEmptyChart from './charts/empty_chart.vue';
import TrackEventDirective from '~/vue_shared/directives/track_event';
-import { downloadCSVOptions, generateLinkToChartOptions } from '../utils';
+import { timeRangeToUrl, downloadCSVOptions, generateLinkToChartOptions } from '../utils';
export default {
components: {
@@ -58,8 +59,13 @@ export default {
default: 'panel-type-chart',
},
},
+ data() {
+ return {
+ zoomedTimeRange: null,
+ };
+ },
computed: {
- ...mapState('monitoringDashboard', ['deploymentData', 'projectPath']),
+ ...mapState('monitoringDashboard', ['deploymentData', 'projectPath', 'logsPath', 'timeRange']),
alertWidgetAvailable() {
return IS_EE && this.prometheusAlertsAvailable && this.alertsEndpoint && this.graphData;
},
@@ -70,6 +76,14 @@ export default {
this.graphData.metrics[0].result.length > 0
);
},
+ logsPathWithTimeRange() {
+ const timeRange = this.zoomedTimeRange || this.timeRange;
+
+ if (this.logsPath && this.logsPath !== invalidUrl && timeRange) {
+ return timeRangeToUrl(timeRange, this.logsPath);
+ }
+ return null;
+ },
csvText() {
const chartData = this.graphData.metrics[0].result[0].values;
const yLabel = this.graphData.y_label;
@@ -107,6 +121,10 @@ export default {
},
downloadCSVOptions,
generateLinkToChartOptions,
+
+ onDatazoom({ start, end }) {
+ this.zoomedTimeRange = { start, end };
+ },
},
};
</script>
@@ -130,11 +148,13 @@ export default {
<component
:is="monitorChartComponent"
v-else-if="graphDataHasMetrics"
+ ref="timeChart"
:graph-data="graphData"
:deployment-data="deploymentData"
:project-path="projectPath"
:thresholds="getGraphAlertValues(graphData.metrics)"
:group-id="groupId"
+ @datazoom="onDatazoom"
>
<div class="d-flex align-items-center">
<alert-widget
@@ -157,6 +177,15 @@ export default {
<template slot="button-content">
<icon name="ellipsis_v" class="text-secondary" />
</template>
+
+ <gl-dropdown-item
+ v-if="logsPathWithTimeRange"
+ ref="viewLogsLink"
+ :href="logsPathWithTimeRange"
+ >
+ {{ s__('Metrics|View logs') }}
+ </gl-dropdown-item>
+
<gl-dropdown-item
v-track-event="downloadCSVOptions(graphData.title)"
:href="downloadCsv"
diff --git a/app/assets/javascripts/monitoring/stores/actions.js b/app/assets/javascripts/monitoring/stores/actions.js
index 29000475bd4..3a052200ab9 100644
--- a/app/assets/javascripts/monitoring/stores/actions.js
+++ b/app/assets/javascripts/monitoring/stores/actions.js
@@ -1,6 +1,7 @@
import * as types from './mutation_types';
import axios from '~/lib/utils/axios_utils';
import createFlash from '~/flash';
+import { convertToFixedRange } from '~/lib/utils/datetime_range';
import { gqClient, parseEnvironmentsResponse, removeLeadingSlash } from './utils';
import trackDashboardLoad from '../monitoring_tracking_helper';
import getEnvironments from '../queries/getEnvironments.query.graphql';
@@ -32,6 +33,10 @@ export const setEndpoints = ({ commit }, endpoints) => {
commit(types.SET_ENDPOINTS, endpoints);
};
+export const setTimeRange = ({ commit }, timeRange) => {
+ commit(types.SET_TIME_RANGE, timeRange);
+};
+
export const filterEnvironments = ({ commit, dispatch }, searchTerm) => {
commit(types.SET_ENVIRONMENTS_FILTER, searchTerm);
dispatch('fetchEnvironmentsData');
@@ -63,19 +68,24 @@ export const receiveEnvironmentsDataSuccess = ({ commit }, data) =>
export const receiveEnvironmentsDataFailure = ({ commit }) =>
commit(types.RECEIVE_ENVIRONMENTS_DATA_FAILURE);
-export const fetchData = ({ dispatch }, params) => {
- dispatch('fetchMetricsData', params);
+export const fetchData = ({ dispatch }) => {
+ dispatch('fetchDashboard');
dispatch('fetchDeploymentsData');
dispatch('fetchEnvironmentsData');
};
-export const fetchMetricsData = ({ dispatch }, params) => dispatch('fetchDashboard', params);
-
-export const fetchDashboard = ({ state, dispatch }, params) => {
+export const fetchDashboard = ({ state, dispatch }) => {
dispatch('requestMetricsDashboard');
+ const params = {};
+
+ if (state.timeRange) {
+ const { start, end } = convertToFixedRange(state.timeRange);
+ params.start = start;
+ params.end = end;
+ }
+
if (state.currentDashboard) {
- // eslint-disable-next-line no-param-reassign
params.dashboard = state.currentDashboard;
}
diff --git a/app/assets/javascripts/monitoring/stores/mutation_types.js b/app/assets/javascripts/monitoring/stores/mutation_types.js
index bdfaf42b35c..8873142accc 100644
--- a/app/assets/javascripts/monitoring/stores/mutation_types.js
+++ b/app/assets/javascripts/monitoring/stores/mutation_types.js
@@ -14,7 +14,7 @@ export const REQUEST_METRIC_RESULT = 'REQUEST_METRIC_RESULT';
export const RECEIVE_METRIC_RESULT_SUCCESS = 'RECEIVE_METRIC_RESULT_SUCCESS';
export const RECEIVE_METRIC_RESULT_FAILURE = 'RECEIVE_METRIC_RESULT_FAILURE';
-export const SET_TIME_WINDOW = 'SET_TIME_WINDOW';
+export const SET_TIME_RANGE = 'SET_TIME_RANGE';
export const SET_ALL_DASHBOARDS = 'SET_ALL_DASHBOARDS';
export const SET_ENDPOINTS = 'SET_ENDPOINTS';
export const SET_GETTING_STARTED_EMPTY_STATE = 'SET_GETTING_STARTED_EMPTY_STATE';
diff --git a/app/assets/javascripts/monitoring/stores/mutations.js b/app/assets/javascripts/monitoring/stores/mutations.js
index 2a86a6a26d8..5f559290ff7 100644
--- a/app/assets/javascripts/monitoring/stores/mutations.js
+++ b/app/assets/javascripts/monitoring/stores/mutations.js
@@ -182,6 +182,10 @@ export default {
state.dashboardsEndpoint = endpoints.dashboardsEndpoint;
state.currentDashboard = endpoints.currentDashboard;
state.projectPath = endpoints.projectPath;
+ state.logsPath = endpoints.logsPath || state.logsPath;
+ },
+ [types.SET_TIME_RANGE](state, timeRange) {
+ state.timeRange = timeRange;
},
[types.SET_GETTING_STARTED_EMPTY_STATE](state) {
state.emptyState = 'gettingStarted';
diff --git a/app/assets/javascripts/monitoring/stores/state.js b/app/assets/javascripts/monitoring/stores/state.js
index 9d3227e8aae..a2050f8e893 100644
--- a/app/assets/javascripts/monitoring/stores/state.js
+++ b/app/assets/javascripts/monitoring/stores/state.js
@@ -1,22 +1,31 @@
import invalidUrl from '~/lib/utils/invalid_url';
export default () => ({
+ // API endpoints
metricsEndpoint: null,
deploymentsEndpoint: null,
dashboardEndpoint: invalidUrl,
+
+ // Dashboard request parameters
+ timeRange: null,
+ currentDashboard: null,
+
+ // Dashboard data
emptyState: 'gettingStarted',
showEmptyState: true,
showErrorBanner: true,
-
dashboard: {
panel_groups: [],
},
+ allDashboards: [],
+ // Other project data
deploymentData: [],
environments: [],
environmentsSearchTerm: '',
environmentsLoading: false,
- allDashboards: [],
- currentDashboard: null,
+
+ // GitLab paths to other pages
projectPath: null,
+ logsPath: invalidUrl,
});
diff --git a/app/assets/javascripts/monitoring/utils.js b/app/assets/javascripts/monitoring/utils.js
index 915812596c6..b2fa44835e6 100644
--- a/app/assets/javascripts/monitoring/utils.js
+++ b/app/assets/javascripts/monitoring/utils.js
@@ -103,8 +103,9 @@ export const graphDataValidatorForAnomalyValues = graphData => {
/**
* Returns a time range from the current URL params
*
- * @returns {Object} The time range defined by the
- * current URL, reading from `window.location.search`
+ * @returns {Object|null} The time range defined by the
+ * current URL, reading from search query or `window.location.search`.
+ * Returns `null` if no parameters form a time range.
*/
export const timeRangeFromUrl = (search = window.location.search) => {
const params = queryToObject(search);
diff --git a/app/assets/stylesheets/framework/typography.scss b/app/assets/stylesheets/framework/typography.scss
index a1bfa03a5ac..6cd69fe75ce 100644
--- a/app/assets/stylesheets/framework/typography.scss
+++ b/app/assets/stylesheets/framework/typography.scss
@@ -102,6 +102,7 @@
padding-bottom: 0.3em;
border-bottom: 1px solid $white-dark;
color: $gl-text-color;
+ overflow: hidden;
&:first-child {
margin-top: 0;
@@ -115,6 +116,7 @@
padding-bottom: 0.3em;
border-bottom: 1px solid $white-dark;
color: $gl-text-color;
+ overflow: hidden;
}
h3 {
diff --git a/app/controllers/admin/services_controller.rb b/app/controllers/admin/services_controller.rb
index e31e0e09978..9cea25cc7fe 100644
--- a/app/controllers/admin/services_controller.rb
+++ b/app/controllers/admin/services_controller.rb
@@ -7,7 +7,7 @@ class Admin::ServicesController < Admin::ApplicationController
before_action :service, only: [:edit, :update]
def index
- @services = services_templates
+ @services = instance_level_services
end
def edit
@@ -19,7 +19,7 @@ class Admin::ServicesController < Admin::ApplicationController
def update
if service.update(service_params[:service])
- PropagateServiceTemplateWorker.perform_async(service.id) if service.active?
+ PropagateInstanceLevelServiceWorker.perform_async(service.id) if service.active?
redirect_to admin_application_settings_services_path,
notice: 'Application settings saved successfully'
@@ -31,17 +31,17 @@ class Admin::ServicesController < Admin::ApplicationController
private
# rubocop: disable CodeReuse/ActiveRecord
- def services_templates
+ def instance_level_services
Service.available_services_names.map do |service_name|
- service_template = "#{service_name}_service".camelize.constantize
- service_template.where(template: true).first_or_create
+ service = "#{service_name}_service".camelize.constantize
+ service.where(instance: true).first_or_create
end
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def service
- @service ||= Service.where(id: params[:id], template: true).first
+ @service ||= Service.where(id: params[:id], instance: true).first
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/controllers/concerns/lfs_request.rb b/app/controllers/concerns/lfs_request.rb
index 61072eec535..3152d959ae4 100644
--- a/app/controllers/concerns/lfs_request.rb
+++ b/app/controllers/concerns/lfs_request.rb
@@ -112,10 +112,6 @@ module LfsRequest
has_authentication_ability?(:build_download_code) && can?(user, :build_download_code, project)
end
- def storage_project
- @storage_project ||= project.lfs_storage_project
- end
-
def objects
@objects ||= (params[:objects] || []).to_a
end
diff --git a/app/controllers/repositories/lfs_storage_controller.rb b/app/controllers/repositories/lfs_storage_controller.rb
index 58f496e16d3..ec5ca5bbeec 100644
--- a/app/controllers/repositories/lfs_storage_controller.rb
+++ b/app/controllers/repositories/lfs_storage_controller.rb
@@ -80,12 +80,13 @@ module Repositories
LfsObject.create!(oid: oid, size: size, file: uploaded_file)
end
- # rubocop: disable CodeReuse/ActiveRecord
def link_to_project!(object)
- if object && !object.projects.exists?(storage_project.id)
- object.lfs_objects_projects.create!(project: storage_project)
- end
+ return unless object
+
+ LfsObjectsProject.safe_find_or_create_by!(
+ project: project,
+ lfs_object: object
+ )
end
- # rubocop: enable CodeReuse/ActiveRecord
end
end
diff --git a/app/models/concerns/sortable.rb b/app/models/concerns/sortable.rb
index c4af1b1fab2..4fe2a0e1827 100644
--- a/app/models/concerns/sortable.rb
+++ b/app/models/concerns/sortable.rb
@@ -8,13 +8,13 @@ module Sortable
extend ActiveSupport::Concern
included do
- scope :with_order_id_desc, -> { order(id: :desc) }
- scope :order_id_desc, -> { reorder(id: :desc) }
- scope :order_id_asc, -> { reorder(id: :asc) }
- scope :order_created_desc, -> { reorder(created_at: :desc) }
- scope :order_created_asc, -> { reorder(created_at: :asc) }
- scope :order_updated_desc, -> { reorder(updated_at: :desc) }
- scope :order_updated_asc, -> { reorder(updated_at: :asc) }
+ scope :with_order_id_desc, -> { order(self.arel_table['id'].desc) }
+ scope :order_id_desc, -> { reorder(self.arel_table['id'].desc) }
+ scope :order_id_asc, -> { reorder(self.arel_table['id'].asc) }
+ scope :order_created_desc, -> { reorder(self.arel_table['created_at'].desc) }
+ scope :order_created_asc, -> { reorder(self.arel_table['created_at'].asc) }
+ scope :order_updated_desc, -> { reorder(self.arel_table['updated_at'].desc) }
+ scope :order_updated_asc, -> { reorder(self.arel_table['updated_at'].asc) }
scope :order_name_asc, -> { reorder(Arel::Nodes::Ascending.new(arel_table[:name].lower)) }
scope :order_name_desc, -> { reorder(Arel::Nodes::Descending.new(arel_table[:name].lower)) }
end
diff --git a/app/models/project.rb b/app/models/project.rb
index 816d964519d..717075161aa 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -358,6 +358,8 @@ class Project < ApplicationRecord
project_path: true,
length: { maximum: 255 }
+ validates :project_feature, presence: true
+
validates :namespace, presence: true
validates :name, uniqueness: { scope: :namespace_id }
validates :import_url, public_url: { schemes: ->(project) { project.persisted? ? VALID_MIRROR_PROTOCOLS : VALID_IMPORT_PROTOCOLS },
@@ -395,11 +397,11 @@ class Project < ApplicationRecord
# last_activity_at is throttled every minute, but last_repository_updated_at is updated with every push
scope :sorted_by_activity, -> { reorder(Arel.sql("GREATEST(COALESCE(last_activity_at, '1970-01-01'), COALESCE(last_repository_updated_at, '1970-01-01')) DESC")) }
- scope :sorted_by_stars_desc, -> { reorder(star_count: :desc) }
- scope :sorted_by_stars_asc, -> { reorder(star_count: :asc) }
+ scope :sorted_by_stars_desc, -> { reorder(self.arel_table['star_count'].desc) }
+ scope :sorted_by_stars_asc, -> { reorder(self.arel_table['star_count'].asc) }
scope :sorted_by_name_asc_limited, ->(limit) { reorder(name: :asc).limit(limit) }
# Sometimes queries (e.g. using CTEs) require explicit disambiguation with table name
- scope :projects_order_id_desc, -> { reorder("#{table_name}.id DESC") }
+ scope :projects_order_id_desc, -> { reorder(self.arel_table['id'].desc) }
scope :in_namespace, ->(namespace_ids) { where(namespace_id: namespace_ids) }
scope :personal, ->(user) { where(namespace_id: user.namespace_id) }
@@ -595,9 +597,9 @@ class Project < ApplicationRecord
# pass a string to avoid AR adding the table name
reorder('project_statistics.storage_size DESC, projects.id DESC')
when 'latest_activity_desc'
- reorder(last_activity_at: :desc)
+ reorder(self.arel_table['last_activity_at'].desc)
when 'latest_activity_asc'
- reorder(last_activity_at: :asc)
+ reorder(self.arel_table['last_activity_at'].asc)
when 'stars_desc'
sorted_by_stars_desc
when 'stars_asc'
@@ -1219,13 +1221,13 @@ class Project < ApplicationRecord
service = find_service(services, name)
return service if service
- # We should check if template for the service exists
- template = find_service(services_templates, name)
+ # We should check if an instance-level service exists
+ instance_level_service = find_service(instance_level_services, name)
- if template
- Service.build_from_template(id, template)
+ if instance_level_service
+ Service.build_from_instance(id, instance_level_service)
else
- # If no template, we should create an instance. Ex `build_gitlab_ci_service`
+ # If no instance-level service exists, we should create a new service. Ex `build_gitlab_ci_service`
public_send("build_#{name}_service") # rubocop:disable GitlabSecurity/PublicSend
end
end
@@ -1357,6 +1359,10 @@ class Project < ApplicationRecord
forked_from_project || fork_network&.root_project
end
+ # TODO: Remove this method once all LfsObjectsProject records are backfilled
+ # for forks.
+ #
+ # See https://gitlab.com/gitlab-org/gitlab/issues/122002 for more info.
def lfs_storage_project
@lfs_storage_project ||= begin
result = self
@@ -1369,14 +1375,27 @@ class Project < ApplicationRecord
end
end
- # This will return all `lfs_objects` that are accessible to the project.
- # So this might be `self.lfs_objects` if the project is not part of a fork
- # network, or it is the base of the fork network.
+ # This will return all `lfs_objects` that are accessible to the project and
+ # the fork source. This is needed since older forks won't have access to some
+ # LFS objects directly and have to get it from the fork source.
+ #
+ # TODO: Remove this method once all LfsObjectsProject records are backfilled
+ # for forks. At that point, projects can look at their own `lfs_objects`.
#
- # TODO: refactor this to get the correct lfs objects when implementing
- # https://gitlab.com/gitlab-org/gitlab-foss/issues/39769
+ # See https://gitlab.com/gitlab-org/gitlab/issues/122002 for more info.
def all_lfs_objects
- lfs_storage_project.lfs_objects
+ LfsObject
+ .distinct
+ .joins(:lfs_objects_projects)
+ .where(lfs_objects_projects: { project_id: [self, lfs_storage_project] })
+ end
+
+ # TODO: Call `#lfs_objects` instead once all LfsObjectsProject records are
+ # backfilled. At that point, projects can look at their own `lfs_objects`.
+ #
+ # See https://gitlab.com/gitlab-org/gitlab/issues/122002 for more info.
+ def lfs_objects_oids
+ all_lfs_objects.pluck(:oid)
end
def personal?
@@ -2438,8 +2457,8 @@ class Project < ApplicationRecord
end
end
- def services_templates
- @services_templates ||= Service.where(template: true)
+ def instance_level_services
+ @instance_level_services ||= Service.where(instance: true)
end
def ensure_pages_metadatum
diff --git a/app/models/project_services/issue_tracker_service.rb b/app/models/project_services/issue_tracker_service.rb
index 9e1393196ff..2ccf8e094e6 100644
--- a/app/models/project_services/issue_tracker_service.rb
+++ b/app/models/project_services/issue_tracker_service.rb
@@ -164,7 +164,7 @@ class IssueTrackerService < Service
end
def one_issue_tracker
- return if template?
+ return if instance?
return if project.blank?
if project.services.external_issue_trackers.where.not(id: id).any?
diff --git a/app/models/project_services/prometheus_service.rb b/app/models/project_services/prometheus_service.rb
index 00b06ae2595..72dabe2578a 100644
--- a/app/models/project_services/prometheus_service.rb
+++ b/app/models/project_services/prometheus_service.rb
@@ -85,7 +85,7 @@ class PrometheusService < MonitoringService
end
def prometheus_available?
- return false if template?
+ return false if instance?
return false unless project
project.all_clusters.enabled.any? { |cluster| cluster.application_prometheus_available? }
diff --git a/app/models/service.rb b/app/models/service.rb
index 95b7c6927cf..6d0e375e757 100644
--- a/app/models/service.rb
+++ b/app/models/service.rb
@@ -32,7 +32,7 @@ class Service < ApplicationRecord
belongs_to :project, inverse_of: :services
has_one :service_hook
- validates :project_id, presence: true, unless: proc { |service| service.template? }
+ validates :project_id, presence: true, unless: proc { |service| service.instance? }
validates :type, presence: true
scope :visible, -> { where.not(type: 'GitlabIssueTrackerService') }
@@ -70,8 +70,8 @@ class Service < ApplicationRecord
true
end
- def template?
- template
+ def instance?
+ instance
end
def category
@@ -299,15 +299,15 @@ class Service < ApplicationRecord
service_names.sort_by(&:downcase)
end
- def self.build_from_template(project_id, template)
- service = template.dup
+ def self.build_from_instance(project_id, instance_level_service)
+ service = instance_level_service.dup
- if template.supports_data_fields?
- data_fields = template.data_fields.dup
+ if instance_level_service.supports_data_fields?
+ data_fields = instance_level_service.data_fields.dup
data_fields.service = service
end
- service.template = false
+ service.instance = false
service.project_id = project_id
service.active = false if service.active? && !service.valid?
service
@@ -321,10 +321,6 @@ class Service < ApplicationRecord
nil
end
- def self.find_by_template
- find_by(template: true)
- end
-
# override if needed
def supports_data_fields?
false
diff --git a/app/presenters/project_presenter.rb b/app/presenters/project_presenter.rb
index 42c707908e6..3af6be26843 100644
--- a/app/presenters/project_presenter.rb
+++ b/app/presenters/project_presenter.rb
@@ -302,7 +302,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
cluster_link = clusters.count == 1 ? project_cluster_path(project, clusters.first) : project_clusters_path(project)
AnchorData.new(false,
- _('Kubernetes configured'),
+ _('Kubernetes'),
cluster_link,
'default')
end
diff --git a/app/services/merge_requests/create_service.rb b/app/services/merge_requests/create_service.rb
index 9a37a0330fc..4a05d1fd7ef 100644
--- a/app/services/merge_requests/create_service.rb
+++ b/app/services/merge_requests/create_service.rb
@@ -27,6 +27,7 @@ module MergeRequests
create_pipeline_for(issuable, current_user)
issuable.update_head_pipeline
Gitlab::UsageDataCounters::MergeRequestCounter.count(:create)
+ link_lfs_objects(issuable)
super
end
@@ -64,6 +65,10 @@ module MergeRequests
raise Gitlab::Access::AccessDeniedError
end
end
+
+ def link_lfs_objects(issuable)
+ LinkLfsObjectsService.new(issuable.target_project).execute(issuable)
+ end
end
end
diff --git a/app/services/merge_requests/link_lfs_objects_service.rb b/app/services/merge_requests/link_lfs_objects_service.rb
new file mode 100644
index 00000000000..191da594095
--- /dev/null
+++ b/app/services/merge_requests/link_lfs_objects_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module MergeRequests
+ class LinkLfsObjectsService < ::BaseService
+ def execute(merge_request, oldrev: merge_request.diff_base_sha, newrev: merge_request.diff_head_sha)
+ return if merge_request.source_project == project
+ return if no_changes?(oldrev, newrev)
+
+ new_lfs_oids = lfs_oids(merge_request.source_project.repository, oldrev, newrev)
+
+ return if new_lfs_oids.empty?
+
+ Projects::LfsPointers::LfsLinkService
+ .new(project)
+ .execute(new_lfs_oids)
+ end
+
+ private
+
+ def no_changes?(oldrev, newrev)
+ oldrev == newrev
+ end
+
+ def lfs_oids(source_repository, oldrev, newrev)
+ Gitlab::Git::LfsChanges
+ .new(source_repository, newrev)
+ .new_pointers(not_in: [oldrev])
+ .map(&:lfs_oid)
+ end
+ end
+end
diff --git a/app/services/merge_requests/refresh_service.rb b/app/services/merge_requests/refresh_service.rb
index 396ddec6383..c6e1651fa26 100644
--- a/app/services/merge_requests/refresh_service.rb
+++ b/app/services/merge_requests/refresh_service.rb
@@ -21,6 +21,7 @@ module MergeRequests
# empty diff during a manual merge
close_upon_missing_source_branch_ref
post_merge_manually_merged
+ link_forks_lfs_objects
reload_merge_requests
outdate_suggestions
refresh_pipelines_on_merge_requests
@@ -91,17 +92,25 @@ module MergeRequests
end
# rubocop: enable CodeReuse/ActiveRecord
+ # Link LFS objects that exists in forks but does not exists in merge requests
+ # target project
+ def link_forks_lfs_objects
+ return unless @push.branch_updated?
+
+ merge_requests_for_forks.find_each do |mr|
+ LinkLfsObjectsService
+ .new(mr.target_project)
+ .execute(mr, oldrev: @push.oldrev, newrev: @push.newrev)
+ end
+ end
+
# Refresh merge request diff if we push to source or target branch of merge request
# Note: we should update merge requests from forks too
- # rubocop: disable CodeReuse/ActiveRecord
def reload_merge_requests
merge_requests = @project.merge_requests.opened
.by_source_or_target_branch(@push.branch_name).to_a
- # Fork merge requests
- merge_requests += MergeRequest.opened
- .where(source_branch: @push.branch_name, source_project: @project)
- .where.not(target_project: @project).to_a
+ merge_requests += merge_requests_for_forks.to_a
filter_merge_requests(merge_requests).each do |merge_request|
if branch_and_project_match?(merge_request) || @push.force_push?
@@ -117,7 +126,6 @@ module MergeRequests
# @source_merge_requests diffs (for MergeRequest#commit_shas for instance).
merge_requests_for_source_branch(reload: true)
end
- # rubocop: enable CodeReuse/ActiveRecord
def push_commit_ids
@push_commit_ids ||= @commits.map(&:id)
@@ -282,6 +290,15 @@ module MergeRequests
@source_merge_requests = nil if reload
@source_merge_requests ||= merge_requests_for(@push.branch_name)
end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def merge_requests_for_forks
+ @merge_requests_for_forks ||=
+ MergeRequest.opened
+ .where(source_branch: @push.branch_name, source_project: @project)
+ .where.not(target_project: @project)
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
end
end
diff --git a/app/services/projects/create_service.rb b/app/services/projects/create_service.rb
index ef06545b27d..f3666f100a3 100644
--- a/app/services/projects/create_service.rb
+++ b/app/services/projects/create_service.rb
@@ -134,7 +134,7 @@ module Projects
if @project.save
unless @project.gitlab_project_import?
- create_services_from_active_templates(@project)
+ create_services_from_active_instance_level_services(@project)
@project.create_labels
end
@@ -160,9 +160,9 @@ module Projects
end
# rubocop: disable CodeReuse/ActiveRecord
- def create_services_from_active_templates(project)
- Service.where(template: true, active: true).each do |template|
- service = Service.build_from_template(project.id, template)
+ def create_services_from_active_instance_level_services(project)
+ Service.where(instance: true, active: true).each do |template|
+ service = Service.build_from_instance(project.id, template)
service.save!
end
end
diff --git a/app/services/projects/fork_service.rb b/app/services/projects/fork_service.rb
index e66a0ed181a..fcfea567885 100644
--- a/app/services/projects/fork_service.rb
+++ b/app/services/projects/fork_service.rb
@@ -26,17 +26,7 @@ module Projects
build_fork_network_member(fork_to_project)
- if link_fork_network(fork_to_project)
- # A forked project stores its LFS objects in the `forked_from_project`.
- # So the LFS objects become inaccessible, and therefore delete them from
- # the database so they'll get cleaned up.
- #
- # TODO: refactor this to get the correct lfs objects when implementing
- # https://gitlab.com/gitlab-org/gitlab-foss/issues/39769
- fork_to_project.lfs_objects_projects.delete_all
-
- fork_to_project
- end
+ fork_to_project if link_fork_network(fork_to_project)
end
def fork_new_project
diff --git a/app/services/projects/lfs_pointers/lfs_download_service.rb b/app/services/projects/lfs_pointers/lfs_download_service.rb
index a009f479d5d..bd70012c76c 100644
--- a/app/services/projects/lfs_pointers/lfs_download_service.rb
+++ b/app/services/projects/lfs_pointers/lfs_download_service.rb
@@ -39,9 +39,9 @@ module Projects
def download_lfs_file!
with_tmp_file do |tmp_file|
download_and_save_file!(tmp_file)
- project.all_lfs_objects << LfsObject.new(oid: lfs_oid,
- size: lfs_size,
- file: tmp_file)
+ project.lfs_objects << LfsObject.new(oid: lfs_oid,
+ size: lfs_size,
+ file: tmp_file)
success
end
diff --git a/app/services/projects/move_lfs_objects_projects_service.rb b/app/services/projects/move_lfs_objects_projects_service.rb
index 10e19014db4..8cc420d7ba7 100644
--- a/app/services/projects/move_lfs_objects_projects_service.rb
+++ b/app/services/projects/move_lfs_objects_projects_service.rb
@@ -16,7 +16,7 @@ module Projects
private
def move_lfs_objects_projects
- non_existent_lfs_objects_projects.update_all(project_id: @project.lfs_storage_project.id)
+ non_existent_lfs_objects_projects.update_all(project_id: @project.id)
end
def remove_remaining_lfs_objects_project
diff --git a/app/services/projects/propagate_service_template.rb b/app/services/projects/propagate_instance_level_service.rb
index 6013b00b8c6..dc75977ba0f 100644
--- a/app/services/projects/propagate_service_template.rb
+++ b/app/services/projects/propagate_instance_level_service.rb
@@ -1,38 +1,38 @@
# frozen_string_literal: true
module Projects
- class PropagateServiceTemplate
+ class PropagateInstanceLevelService
BATCH_SIZE = 100
def self.propagate(*args)
new(*args).propagate
end
- def initialize(template)
- @template = template
+ def initialize(instance_level_service)
+ @instance_level_service = instance_level_service
end
def propagate
- return unless @template.active?
+ return unless @instance_level_service.active?
- Rails.logger.info("Propagating services for template #{@template.id}") # rubocop:disable Gitlab/RailsLogger
+ Rails.logger.info("Propagating services for instance_level_service #{@instance_level_service.id}") # rubocop:disable Gitlab/RailsLogger
- propagate_projects_with_template
+ propagate_projects_with_instance_level_service
end
private
- def propagate_projects_with_template
+ def propagate_projects_with_instance_level_service
loop do
batch = Project.uncached { project_ids_batch }
- bulk_create_from_template(batch) unless batch.empty?
+ bulk_create_from_instance_level_service(batch) unless batch.empty?
break if batch.size < BATCH_SIZE
end
end
- def bulk_create_from_template(batch)
+ def bulk_create_from_instance_level_service(batch)
service_list = batch.map do |project_id|
service_hash.values << project_id
end
@@ -52,7 +52,7 @@ module Projects
SELECT true
FROM services
WHERE services.project_id = projects.id
- AND services.type = '#{@template.type}'
+ AND services.type = '#{@instance_level_service.type}'
)
AND projects.pending_delete = false
AND projects.archived = false
@@ -73,9 +73,9 @@ module Projects
def service_hash
@service_hash ||=
begin
- template_hash = @template.as_json(methods: :type).except('id', 'template', 'project_id')
+ instance_hash = @instance_level_service.as_json(methods: :type).except('id', 'instance', 'project_id')
- template_hash.each_with_object({}) do |(key, value), service_hash|
+ instance_hash.each_with_object({}) do |(key, value), service_hash|
value = value.is_a?(Hash) ? value.to_json : value
service_hash[ActiveRecord::Base.connection.quote_column_name(key)] =
@@ -97,11 +97,11 @@ module Projects
# rubocop: enable CodeReuse/ActiveRecord
def active_external_issue_tracker?
- @template.issue_tracker? && !@template.default
+ @instance_level_service.issue_tracker? && !@instance_level_service.default
end
def active_external_wiki?
- @template.type == 'ExternalWikiService'
+ @instance_level_service.type == 'ExternalWikiService'
end
end
end
diff --git a/app/services/projects/unlink_fork_service.rb b/app/services/projects/unlink_fork_service.rb
index e7e0141099e..b3cf27373cd 100644
--- a/app/services/projects/unlink_fork_service.rb
+++ b/app/services/projects/unlink_fork_service.rb
@@ -52,6 +52,10 @@ module Projects
Projects::ForksCountService.new(project).refresh_cache
end
+ # TODO: Remove this method once all LfsObjectsProject records are backfilled
+ # for forks.
+ #
+ # See https://gitlab.com/gitlab-org/gitlab/issues/122002 for more info.
def save_lfs_objects
return unless @project.forked?
diff --git a/app/views/projects/services/mattermost_slash_commands/_help.html.haml b/app/views/projects/services/mattermost_slash_commands/_help.html.haml
index cc005dd69b7..7f14128a0cb 100644
--- a/app/views/projects/services/mattermost_slash_commands/_help.html.haml
+++ b/app/views/projects/services/mattermost_slash_commands/_help.html.haml
@@ -10,8 +10,8 @@
%p.inline
= s_("MattermostService|See list of available commands in Mattermost after setting up this service, by entering")
%kbd.inline /&lt;trigger&gt; help
- - unless enabled || @service.template?
+ - unless enabled || @service.instance?
= render 'projects/services/mattermost_slash_commands/detailed_help', subject: @service
-- if enabled && !@service.template?
+- if enabled && !@service.instance?
= render 'projects/services/mattermost_slash_commands/installation_info', subject: @service
diff --git a/app/views/projects/services/slack_slash_commands/_help.html.haml b/app/views/projects/services/slack_slash_commands/_help.html.haml
index 7f6717e298c..447f7f074a8 100644
--- a/app/views/projects/services/slack_slash_commands/_help.html.haml
+++ b/app/views/projects/services/slack_slash_commands/_help.html.haml
@@ -11,7 +11,7 @@
%p.inline
= s_("SlackService|See list of available commands in Slack after setting up this service, by entering")
%kbd.inline /&lt;command&gt; help
- - unless @service.template?
+ - unless @service.instance?
%p= _("To set up this service:")
%ul.list-unstyled.indent-list
%li
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index 35852742b0d..b344e1e36b8 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -951,7 +951,7 @@
:latency_sensitive:
:resource_boundary: :unknown
:weight: 1
-- :name: propagate_service_template
+- :name: propagate_instance_level_service
:feature_category: :source_code_management
:has_external_dependencies:
:latency_sensitive:
diff --git a/app/workers/propagate_instance_level_service_worker.rb b/app/workers/propagate_instance_level_service_worker.rb
new file mode 100644
index 00000000000..64ea61cabfa
--- /dev/null
+++ b/app/workers/propagate_instance_level_service_worker.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+# Worker for updating any project specific caches.
+class PropagateInstanceLevelServiceWorker
+ include ApplicationWorker
+
+ feature_category :source_code_management
+
+ LEASE_TIMEOUT = 4.hours.to_i
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def perform(instance_level_service_id)
+ return unless try_obtain_lease_for(instance_level_service_id)
+
+ Projects::PropagateInstanceLevelService.propagate(Service.find_by(id: instance_level_service_id))
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ private
+
+ def try_obtain_lease_for(instance_level_service_id)
+ Gitlab::ExclusiveLease
+ .new("propagate_instance_level_service_worker:#{instance_level_service_id}", timeout: LEASE_TIMEOUT)
+ .try_obtain
+ end
+end
diff --git a/app/workers/propagate_service_template_worker.rb b/app/workers/propagate_service_template_worker.rb
deleted file mode 100644
index 73a2b453207..00000000000
--- a/app/workers/propagate_service_template_worker.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-# Worker for updating any project specific caches.
-class PropagateServiceTemplateWorker
- include ApplicationWorker
-
- feature_category :source_code_management
-
- LEASE_TIMEOUT = 4.hours.to_i
-
- # rubocop: disable CodeReuse/ActiveRecord
- def perform(template_id)
- return unless try_obtain_lease_for(template_id)
-
- Projects::PropagateServiceTemplate.propagate(Service.find_by(id: template_id))
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- private
-
- def try_obtain_lease_for(template_id)
- Gitlab::ExclusiveLease
- .new("propagate_service_template_worker:#{template_id}", timeout: LEASE_TIMEOUT)
- .try_obtain
- end
-end
diff --git a/app/workers/repository_fork_worker.rb b/app/workers/repository_fork_worker.rb
index 0adf745c7ac..ba141f808a7 100644
--- a/app/workers/repository_fork_worker.rb
+++ b/app/workers/repository_fork_worker.rb
@@ -29,7 +29,15 @@ class RepositoryForkWorker
result = gitlab_shell.fork_repository(source_project, target_project)
- raise "Unable to fork project #{target_project.id} for repository #{source_project.disk_path} -> #{target_project.disk_path}" unless result
+ if result
+ link_lfs_objects(source_project, target_project)
+ else
+ raise_fork_failure(
+ source_project,
+ target_project,
+ 'Failed to create fork repository'
+ )
+ end
target_project.after_import
end
@@ -40,4 +48,20 @@ class RepositoryForkWorker
Rails.logger.info("Project #{project.full_path} was in inconsistent state (#{project.import_status}) while forking.") # rubocop:disable Gitlab/RailsLogger
false
end
+
+ def link_lfs_objects(source_project, target_project)
+ Projects::LfsPointers::LfsLinkService
+ .new(target_project)
+ .execute(source_project.lfs_objects_oids)
+ rescue Projects::LfsPointers::LfsLinkService::TooManyOidsError
+ raise_fork_failure(
+ source_project,
+ target_project,
+ 'Source project has too many LFS objects'
+ )
+ end
+
+ def raise_fork_failure(source_project, target_project, reason)
+ raise "Unable to fork project #{target_project.id} for repository #{source_project.disk_path} -> #{target_project.disk_path}: #{reason}"
+ end
end
diff --git a/changelogs/unreleased/20042-create-lfs-objects-project.yml b/changelogs/unreleased/20042-create-lfs-objects-project.yml
new file mode 100644
index 00000000000..0f942e11670
--- /dev/null
+++ b/changelogs/unreleased/20042-create-lfs-objects-project.yml
@@ -0,0 +1,5 @@
+---
+title: Create LfsObjectsProject record for forks as well
+merge_request: 22418
+author:
+type: fixed
diff --git a/changelogs/unreleased/202101-make-project-buttons-fit-in-1-row.yml b/changelogs/unreleased/202101-make-project-buttons-fit-in-1-row.yml
new file mode 100644
index 00000000000..d3c24b4ecd8
--- /dev/null
+++ b/changelogs/unreleased/202101-make-project-buttons-fit-in-1-row.yml
@@ -0,0 +1,5 @@
+---
+title: Rename 'Kubernetes configured' button
+merge_request: 24487
+author:
+type: changed
diff --git a/changelogs/unreleased/24108-fix-sast-multiline.yml b/changelogs/unreleased/24108-fix-sast-multiline.yml
new file mode 100644
index 00000000000..bb564e80447
--- /dev/null
+++ b/changelogs/unreleased/24108-fix-sast-multiline.yml
@@ -0,0 +1,5 @@
+---
+title: Fix multiline issue when loading env vars from DinD in SAST
+merge_request: 24108
+author:
+type: fixed
diff --git a/changelogs/unreleased/27142-divider-in-readme-is-overlaying-with-image.yml b/changelogs/unreleased/27142-divider-in-readme-is-overlaying-with-image.yml
new file mode 100644
index 00000000000..dd246a35640
--- /dev/null
+++ b/changelogs/unreleased/27142-divider-in-readme-is-overlaying-with-image.yml
@@ -0,0 +1,5 @@
+---
+title: Do not draw heading borders over floated images in markdown
+merge_request:
+author: Gwen_
+type: fixed
diff --git a/changelogs/unreleased/37335-conan-name-validation.yml b/changelogs/unreleased/37335-conan-name-validation.yml
new file mode 100644
index 00000000000..be841928403
--- /dev/null
+++ b/changelogs/unreleased/37335-conan-name-validation.yml
@@ -0,0 +1,5 @@
+---
+title: Conan packages are validated based on full recipe instead of name/version alone
+merge_request: 23467
+author:
+type: changed
diff --git a/changelogs/unreleased/ab-projects-without-feature.yml b/changelogs/unreleased/ab-projects-without-feature.yml
new file mode 100644
index 00000000000..f9baf6d36bb
--- /dev/null
+++ b/changelogs/unreleased/ab-projects-without-feature.yml
@@ -0,0 +1,5 @@
+---
+title: Ensure all Project records have a corresponding ProjectFeature record
+merge_request: 23766
+author:
+type: fixed
diff --git a/changelogs/unreleased/rename_services_template_to_instance.yml b/changelogs/unreleased/rename_services_template_to_instance.yml
new file mode 100644
index 00000000000..999981fed40
--- /dev/null
+++ b/changelogs/unreleased/rename_services_template_to_instance.yml
@@ -0,0 +1,5 @@
+---
+title: 'Service model: Rename template attribute to instance'
+merge_request: 23595
+author:
+type: other
diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml
index 1cb19d18a0d..ede061f0359 100644
--- a/config/sidekiq_queues.yml
+++ b/config/sidekiq_queues.yml
@@ -194,7 +194,7 @@
- 1
- - project_update_repository_storage
- 1
-- - propagate_service_template
+- - propagate_instance_level_service
- 1
- - reactive_caching
- 1
diff --git a/db/migrate/20200123040535_add_multi_column_index_on_lfs_objects_projects.rb b/db/migrate/20200123040535_add_multi_column_index_on_lfs_objects_projects.rb
new file mode 100644
index 00000000000..36ccfa4743e
--- /dev/null
+++ b/db/migrate/20200123040535_add_multi_column_index_on_lfs_objects_projects.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddMultiColumnIndexOnLfsObjectsProjects < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :lfs_objects_projects, [:project_id, :lfs_object_id]
+ end
+
+ def down
+ remove_concurrent_index :lfs_objects_projects, [:project_id, :lfs_object_id]
+ end
+end
diff --git a/db/migrate/20200123045415_remove_project_id_index_on_lfs_objects_projects.rb b/db/migrate/20200123045415_remove_project_id_index_on_lfs_objects_projects.rb
new file mode 100644
index 00000000000..540383e8808
--- /dev/null
+++ b/db/migrate/20200123045415_remove_project_id_index_on_lfs_objects_projects.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class RemoveProjectIdIndexOnLfsObjectsProjects < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ remove_concurrent_index :lfs_objects_projects, :project_id
+ end
+
+ def down
+ add_concurrent_index :lfs_objects_projects, :project_id
+ end
+end
diff --git a/db/migrate/20200123092602_rename_services_template_to_instance.rb b/db/migrate/20200123092602_rename_services_template_to_instance.rb
new file mode 100644
index 00000000000..42964dfe348
--- /dev/null
+++ b/db/migrate/20200123092602_rename_services_template_to_instance.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class RenameServicesTemplateToInstance < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ rename_column_concurrently :services, :template, :instance
+ end
+
+ def down
+ undo_rename_column_concurrently :services, :template, :instance
+ end
+end
diff --git a/db/migrate/20200129234037_replace_conan_metadata_index.rb b/db/migrate/20200129234037_replace_conan_metadata_index.rb
new file mode 100644
index 00000000000..4f55a2974d8
--- /dev/null
+++ b/db/migrate/20200129234037_replace_conan_metadata_index.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+class ReplaceConanMetadataIndex < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ OLD_INDEX = 'index_packages_conan_metadata_on_package_id'
+ NEW_INDEX = 'index_packages_conan_metadata_on_package_id_username_channel'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :packages_conan_metadata,
+ [:package_id, :package_username, :package_channel],
+ unique: true, name: NEW_INDEX
+
+ remove_concurrent_index_by_name :packages_conan_metadata, OLD_INDEX
+ end
+
+ def down
+ add_concurrent_index :packages_conan_metadata, :package_id, name: OLD_INDEX
+
+ remove_concurrent_index_by_name :packages_conan_metadata, NEW_INDEX
+ end
+end
diff --git a/db/post_migrate/20191021101942_remove_empty_github_service_templates.rb b/db/post_migrate/20191021101942_remove_empty_github_service_templates.rb
index 64abe93b3e8..2f9db9e2cf6 100644
--- a/db/post_migrate/20191021101942_remove_empty_github_service_templates.rb
+++ b/db/post_migrate/20191021101942_remove_empty_github_service_templates.rb
@@ -23,6 +23,10 @@ class RemoveEmptyGithubServiceTemplates < ActiveRecord::Migration[5.2]
private
def relationship
+ # The column `template` was renamed to `instance`. Column information needs
+ # to be resetted to avoid cache problems after migrating down.
+ RemoveEmptyGithubServiceTemplates::Service.reset_column_information
+
RemoveEmptyGithubServiceTemplates::Service.where(template: true, type: 'GithubService')
end
end
diff --git a/db/post_migrate/20200123101859_cleanup_rename_services_template_to_instance.rb b/db/post_migrate/20200123101859_cleanup_rename_services_template_to_instance.rb
new file mode 100644
index 00000000000..904cad616d7
--- /dev/null
+++ b/db/post_migrate/20200123101859_cleanup_rename_services_template_to_instance.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class CleanupRenameServicesTemplateToInstance < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ cleanup_concurrent_column_rename :services, :template, :instance
+ end
+
+ def down
+ undo_cleanup_concurrent_column_rename :services, :template, :instance
+ end
+end
diff --git a/db/post_migrate/20200127111840_fix_projects_without_project_feature.rb b/db/post_migrate/20200127111840_fix_projects_without_project_feature.rb
new file mode 100644
index 00000000000..66e892444d1
--- /dev/null
+++ b/db/post_migrate/20200127111840_fix_projects_without_project_feature.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class FixProjectsWithoutProjectFeature < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ BATCH_SIZE = 50_000
+ MIGRATION = 'FixProjectsWithoutProjectFeature'
+
+ disable_ddl_transaction!
+
+ class Project < ActiveRecord::Base
+ include EachBatch
+ end
+
+ def up
+ queue_background_migration_jobs_by_range_at_intervals(Project, MIGRATION, 2.minutes, batch_size: BATCH_SIZE)
+ end
+
+ def down
+ # no-op
+ end
+end
diff --git a/db/post_migrate/20200206111847_migrate_propagate_service_template_sidekiq_queue.rb b/db/post_migrate/20200206111847_migrate_propagate_service_template_sidekiq_queue.rb
new file mode 100644
index 00000000000..30f6c2038d9
--- /dev/null
+++ b/db/post_migrate/20200206111847_migrate_propagate_service_template_sidekiq_queue.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class MigratePropagateServiceTemplateSidekiqQueue < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ sidekiq_queue_migrate 'propagate_service_template', to: 'propagate_instance_level_service'
+ end
+
+ def down
+ sidekiq_queue_migrate 'propagate_instance_level_service', to: 'propagate_service_template'
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 35422461741..45fa6ca279c 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema.define(version: 2020_02_06_091544) do
+ActiveRecord::Schema.define(version: 2020_02_06_111847) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm"
@@ -2347,7 +2347,7 @@ ActiveRecord::Schema.define(version: 2020_02_06_091544) do
t.datetime "updated_at"
t.integer "repository_type", limit: 2
t.index ["lfs_object_id"], name: "index_lfs_objects_projects_on_lfs_object_id"
- t.index ["project_id"], name: "index_lfs_objects_projects_on_project_id"
+ t.index ["project_id", "lfs_object_id"], name: "index_lfs_objects_projects_on_project_id_and_lfs_object_id"
end
create_table "licenses", id: :serial, force: :cascade do |t|
@@ -2945,7 +2945,7 @@ ActiveRecord::Schema.define(version: 2020_02_06_091544) do
t.datetime_with_timezone "updated_at", null: false
t.string "package_username", limit: 255, null: false
t.string "package_channel", limit: 255, null: false
- t.index ["package_id"], name: "index_packages_conan_metadata_on_package_id", unique: true
+ t.index ["package_id", "package_username", "package_channel"], name: "index_packages_conan_metadata_on_package_id_username_channel", unique: true
end
create_table "packages_dependencies", force: :cascade do |t|
@@ -3827,7 +3827,6 @@ ActiveRecord::Schema.define(version: 2020_02_06_091544) do
t.datetime "updated_at"
t.boolean "active", default: false, null: false
t.text "properties"
- t.boolean "template", default: false
t.boolean "push_events", default: true
t.boolean "issues_events", default: true
t.boolean "merge_requests_events", default: true
@@ -3844,9 +3843,10 @@ ActiveRecord::Schema.define(version: 2020_02_06_091544) do
t.boolean "deployment_events", default: false, null: false
t.string "description", limit: 500
t.boolean "comment_on_event_enabled", default: true, null: false
+ t.boolean "instance", default: false
+ t.index ["instance"], name: "index_services_on_instance"
t.index ["project_id"], name: "index_services_on_project_id"
t.index ["project_id"], name: "tmp_index_on_project_id_partial_with_prometheus_services", where: "((type)::text = 'PrometheusService'::text)"
- t.index ["template"], name: "index_services_on_template"
t.index ["type"], name: "index_services_on_type"
end
diff --git a/doc/api/projects.md b/doc/api/projects.md
index dd80e3ce8f3..a0243be1907 100644
--- a/doc/api/projects.md
+++ b/doc/api/projects.md
@@ -2000,11 +2000,6 @@ Allows modification of the forked relationship between existing projects. Availa
### Create a forked from/to relation between existing projects
-CAUTION: **Warning:**
-This will destroy the LFS objects stored in the fork.
-So to retain the LFS objects, make sure you've pulled them **before** creating the fork relation,
-and push them again **after** creating the fork relation.
-
```
POST /projects/:id/fork/:forked_from_id
```
diff --git a/doc/development/integrations/secure.md.md b/doc/development/integrations/secure.md.md
new file mode 100644
index 00000000000..b9b37a7e298
--- /dev/null
+++ b/doc/development/integrations/secure.md.md
@@ -0,0 +1,465 @@
+# Security scanner integration
+
+Integrating a security scanner into GitLab consists of providing end users
+with a [CI job definition](../../ci/yaml/README.md#introduction)
+they can add to their CI configuration files, to scan their GitLab projects.
+The scanning job is usually based on a [Docker image](https://docs.docker.com/)
+that contains the scanner and all its dependencies in a self-contained environment.
+This page documents requirements and guidelines for writing CI jobs implementing a security scanner,
+as well as requirements and guidelines for the Docker image itself.
+
+## Job definition
+
+### Name
+
+For consistency, scanning jobs should be named after the scanner, in lower case.
+The job name is suffixed after the type of scanning:
+`_dependency_scanning`, `_container_scanning`, `_dast`, and `_sast`.
+For instance, the dependency scanning job based on the "MySec" scanner would be named `mysec_dependency_scanning`.
+
+### Image
+
+The [`image`](../../ci/yaml/README.md#image) keyword is used to specify
+the [Docker image](../../ci/docker/using_docker_images.md#what-is-an-image)
+containing the security scanner.
+
+### Script
+
+The [`script`](../../ci/yaml/README.md#script) keyword
+is used to specify the command that the job runs.
+Because the `script` cannot be left empty, it must be set to the command that performs the scan.
+It is not possible to rely on the predefined `ENTRYPOINT` and `CMD` of the Docker image
+to perform the scan automatically, without passing any command.
+
+The [`before_script`](../../ci/yaml/README.md#before_script-and-after_script)
+should not be used in the job definition because users may rely on this to prepare their projects before performing the scan.
+For instance, it is common practice to use `before_script` to install system libraries
+a particular project needs before performing SAST or Dependency Scanning.
+
+Similarly, [`after_script`](../../ci/yaml/README.md#before_script-and-after_script)
+should not not be used in the job definition, because it may be overriden by users.
+
+### Stage
+
+For consistency, scanning jobs should belong to the `test` stage when possible.
+The [`stage`](../../ci/yaml/README.md#stage) keyword can be omitted because `test` is the default value.
+
+### Fail-safe
+
+To be aligned with the [GitLab Security paradigm](https://about.gitlab.com/direction/secure/#security-paradigm),
+scanning jobs should not block the pipeline when they fail,
+so the [`allow_failure`](../../ci/yaml/README.md#allow_failure) parameter should be set to `true`.
+
+### Artifacts
+
+Scanning jobs must declare a report that corresponds to the type of scanning they perform,
+using the [`artifacts:reports`](../../ci/yaml/README.md#artifactsreports) keyword.
+Valid reports are: `dependency_scanning`, `container_scanning`, `dast`, and `sast`.
+
+For example, here is the definition of a SAST job that generates a file named `gl-sast-report.json`,
+and uploads it as a SAST report:
+
+```yaml
+mysec_dependency_scanning:
+ image: regitry.gitlab.com/secure/mysec
+ artifacts:
+ reports:
+ sast: gl-sast-report.json
+```
+
+`gl-sast-report.json` is an example file path. See [the Output file section](#output-file) for more details.
+It is processed as a SAST report because it is declared as such in the job definition.
+
+### Rules
+
+Scanning jobs should be skipped unless the corresponding feature is listed
+in the `GITLAB_FEATURES` variable (comma-separated list of values).
+So Dependency Scanning, Container Scanning, SAST, and DAST should be skipped
+unless `GITLAB_FEATURES` contains `dependency_scanning`, `container_scanning`, `sast`, and `dast`, respectively.
+See [GitLab CI/CD predefined variables](../../ci/variables/predefined_variables.md).
+
+Also, scanning jobs should be skipped when the corresponding variable prefixed with `_DISABLED` is present.
+See `DEPENDENCY_SCANNING_DISABLED`, `CONTAINER_SCANNING_DISABLED`, `SAST_DISABLED`, and `DAST_DISABLED`
+in [Auto DevOps documentation](../../topics/autodevops/index.md#disable-jobs).
+
+Finally, SAST and Dependency Scanning job definitions should use
+`CI_PROJECT_REPOSITORY_LANGUAGES` (comma-separated list of values)
+in order to skip the job when the language or technology is not supported.
+Language detection currently relies on the [`linguist`](https://github.com/github/linguist) Ruby gem.
+See [GitLab CI/CD prefined variables](../../ci/variables/predefined_variables.md#variables-reference).
+
+For instance, here is how to skip the Dependency Scanning job `mysec_dependency_scanning`
+unless the project repository contains Java source code,
+and the `dependency_scanning` feature is enabled:
+
+```yaml
+mysec_dependency_scanning:
+ except:
+ variables:
+ - $DEPENDENCY_SCANNING_DISABLED
+ only:
+ variables:
+ - $GITLAB_FEATURES =~ /\bdependency_scanning\b/ &&
+ $CI_PROJECT_REPOSITORY_LANGUAGES =~ /\bjava\b/
+```
+
+The [`only/except`](../../ci/yaml/README.md#onlyexcept-basic) keywords
+as well as the new [`rules`](../../ci/yaml/README.md#rules) keyword
+make possible to trigger the job depending on the branch, or when some particular file changes.
+Such rules should be defined by users based on their needs,
+and should not be predefined in the job definition of the scanner.
+
+## Docker image
+
+The Docker image is a self-contained environment that combines
+the scanner with all the libraries and tools it depends on.
+
+### Image size
+
+Depending on the CI infrastucture,
+the CI may have to fetch the Docker image every time the job runs.
+To make the scanning job run fast, and to avoid wasting bandwith,
+it is important to make Docker images as small as possible,
+ideally smaller than 50 MB.
+
+If the scanner requires a fully functional Linux environment,
+it is recommended to use a [Debian](https://www.debian.org/intro/about) "slim" distribution or [Alpine Linux](https://www.alpinelinux.org/).
+If possible, it is recommended to build the image from scratch, using the `FROM scratch` instruction,
+and to compile the scanner with all the libraries it needs.
+[Multi-stage builds](https://docs.docker.com/develop/develop-images/multistage-build/)
+might also help with keeping the image small.
+
+### Image tag
+
+As documented in the [Docker Official Images](https://github.com/docker-library/official-images#tags-and-aliases) project,
+it is strongly encouraged that version number tags be given aliases which allows the user to easily refer to the "most recent" release of a particular series.
+See also [Docker Tagging: Best practices for tagging and versioning docker images](https://docs.microsoft.com/en-us/archive/blogs/stevelasker/docker-tagging-best-practices-for-tagging-and-versioning-docker-images).
+
+## Command line
+
+A scanner is a command line tool that takes environment variables as inputs,
+and generates a file that is uploaded as a report (based on the job definition).
+It also generates text output on the standard output and standard error streams, and exits with a status code.
+
+### Variables
+
+All CI variables are passed to the scanner as environment variables.
+The scanned project is described by the [predefined CI variables](../../ci/variables/README.md).
+
+#### SAST, Dependency Scanning
+
+SAST and Dependency Scanning scanners must scan the files in the project directory, given by the `CI_PROJECT_DIR` variable.
+
+#### Container Scanning
+
+In order to be consistent with the official Container Scanning for GitLab,
+scanners must scan the Docker image whose name and tag are given by
+`CI_APPLICATION_REPOSITORY` and `CI_APPLICATION_TAG`, respectively.
+
+If not provided, `CI_APPLICATION_REPOSITORY` should default to
+`$CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG`, which is a combination of predefined CI variables.
+`CI_APPLICATION_TAG` should default to `CI_COMMIT_SHA`.
+
+The scanner should sign in the Docker registry
+using the variables `DOCKER_USER` and `DOCKER_PASSWORD`.
+If these are not defined, then the scanner should use
+`CI_REGISTRY_USER` and `CI_REGISTRY_PASSWORD` as default values.
+
+#### Configuration files
+
+While scanners may use `CI_PROJECT_DIR` to load specific configuration files,
+it is recommended to expose configuration as environment variables, not files.
+
+### Output file
+
+Like any artifact uploaded to the GitLab CI,
+the Secure report generated by the scanner must be written in the project directory,
+given by the `CI_PROJECT_DIR` environment variable.
+
+It is recommended to name the output file after the type of scanning, and to use `gl-` as a prefix.
+Since all Secure reports are JSON files, it is recommended to use `.json` as a file extension.
+For instance, a suggested file name for a Dependency Scanning report is `gl-dependency-scanning.json`.
+
+The [`artifacts:reports`](../../ci/yaml/README.md#artifactsreports) keyword
+of the job definition must be consistent with the file path where the Security report is written.
+For instance, if a Dependency Scanning analyzer writes its report to the CI project directory,
+and if this report file name is `depscan.json`,
+then `artifacts:reports:dependency_scanning` must be set to `depscan.json`.
+
+### Exit code
+
+Following the POSIX exit code standard, the scanner will exit with 0 for success and any number from 1 to 255 for anything else.
+This also includes the case when vulnerabilities are found.
+
+### Logging
+
+The scanner should log error messages and warnings so that users can easily investigate
+misconfiguration and integration issues by looking at the log of the CI scanning job.
+
+Scanners may use [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors)
+to colorize the messages they write to the Unix standard output and standard error streams.
+We recommend using red to report errors, yellow for warnings, and green for notices.
+Also, we recommend prefixing error messages with `[ERRO]`, warnings with `[WARN]`, and notices with `[INFO]`.
+
+## Report
+
+The report is a JSON document that combines vulnerabilities with possible remediations.
+
+This documentation gives an overview of the report JSON format,
+as well as recommendations and examples to help integrators set its fields.
+The format is extensively described in the documentation of
+[SAST](../../user/application_security/sast/index.md#reports-json-format),
+[Dependency Scanning](../../user/application_security/dependency_scanning/index.md#reports-json-format),
+and [Container Scanning](../../user/application_security/container_scanning/index.md#reports-json-format).
+
+The DAST variant of the report JSON format is not documented at the moment.
+
+### Version
+
+The documentation of
+[SAST](../../user/application_security/sast/index.md#reports-json-format),
+[Dependency Scanning](../../user/application_security/dependency_scanning/index.md#reports-json-format),
+and [Container Scanning](../../user/application_security/container_scanning/index.md#reports-json-format)
+describes the Secure report format version.
+
+### Vulnerabilities
+
+The `vulnerabilities` field of the report is an array of vulnerability objects.
+
+#### Category
+
+The value of the `category` field matches the report type:
+`dependency_scanning`, `container_scanning`, `sast`, and `dast`.
+
+#### Scanner
+
+The `scanner` field is an object that embeds a human-readable `name` and a technical `id`.
+The `id` should not collide with any other scanner another integrator would provide.
+
+#### Name, message, and description
+
+The `name` and `message` fields contain a short description of the vulnerability,
+whereas the `description` field provides more details.
+
+The `name` is context-free and contains no information on where the vulnerability has been found,
+whereas the `message` may repeat the location.
+
+For instance, a `message` for a vulnerability
+reported by Dependency Scanning gives information on the vulnerable dependency,
+which is redundant with the `location` field of the vulnerability.
+The `name` field is preferred but the `message` field is used
+when the context/location cannot be removed from the title of the vulnerability.
+
+To illustrate, here is an example vulnerability object reported by a Dependency Scanning scanner,
+and where the `message` repeats the `location` field:
+
+```json
+{
+ "location": {
+ "dependency": {
+ "package": {
+ "name": "debug"
+ }
+ }
+ },
+ "name": "Regular Expression Denial of Service",
+ "message": "Regular Expression Denial of Service in debug",
+ "description": "The debug module is vulnerable to regular expression denial of service
+ when untrusted user input is passed into the `o` formatter.
+ It takes around 50k characters to block for 2 seconds making this a low severity issue."
+}
+```
+
+The `description` might explain how the vulnerability works or give context about the exploit.
+It should not repeat the other fields of the vulnerability object.
+In particular, the `description` should not repeat the `location` (what is affected)
+or the `solution` (how to mitigate the risk).
+
+There is a proposal to remove either the `name` or the `message`, to remove abmiguities.
+See [issue #36779](https://gitlab.com/gitlab-org/gitlab/issues/36779).
+
+#### Solution
+
+The `solution` field may contain instructions users should follow to fix the vulnerability or to mitigate the risk.
+It is intended for users whereas the `remediations` objects are processed automatically by GitLab.
+
+#### Identifiers
+
+The `identifiers` array describes the vulnerability flaw that has been detected.
+An identifier object has a `type` and a `value`;
+these technical fields are used to tell if two identifiers are the same.
+It also has a `name` and a `url`;
+these fields are used to display the identifier in the user interface.
+
+It is recommended to reuse the identifiers the GitLab scanners already define:
+
+| Identifier | Type | Example value |
+|------------|------|---------------|
+| [CVE](https://cve.mitre.org/cve/) | `cve` | CVE-2019-10086 |
+| [CWE](https://cwe.mitre.org/data/index.html) | `cwe` | CWE-1026 |
+| [OSVD](https://cve.mitre.org/data/refs/refmap/source-OSVDB.html) | `osvdb` | OSVDB-113928 |
+| [USN](https://usn.ubuntu.com/) | `usn` | USN-4234-1 |
+| [WASC](http://projects.webappsec.org/Threat-Classification-Reference-Grid) | `wasc` | WASC-19 |
+| [RHSA](https://access.redhat.com/errata) | `rhsa` | RHSA-2020:0111 |
+| [ELSA](https://linux.oracle.com/security/) | `elsa` | ELSA-2020-0085 |
+
+The generic identifiers listed above are defined in the [common library](https://gitlab.com/gitlab-org/security-products/analyzers/common);
+this library is shared by the analyzers maintained by GitLab,
+and this is where you can [contribute](https://gitlab.com/gitlab-org/security-products/analyzers/common/blob/master/issue/identifier.go) new generic identifiers.
+Analyzers may also produce vendor-specific or product-specific identifiers;
+these do not belong to the [common library](https://gitlab.com/gitlab-org/security-products/analyzers/common).
+
+The first item of the `identifiers` array is called the primary identifier.
+The primary identifier is particularly important, because it is used to
+[track vulnerabilities](#tracking-merging-vulnerabilities)
+as new commits are pushed to the repository.
+
+Identifiers are used to [merge duplicate vulnerabilities](#tracking-merging-vulnerabilities)
+reported for the same commit, except for `CWE` and `WASC`.
+
+### Location
+
+The `location` indicates where the vulnerability has been detected.
+The format of the location depends on the type of scanning.
+
+Internally GitLab extracts some attributes of the `location` to generate the **location fingerprint**,
+which is used to [track vulnerabilities](#tracking-merging-vulnerabilities)
+as new commits are pushed to the repository.
+The attributes used to generate the location fingerprint also depend on the type of scanning.
+
+#### Dependency Scanning
+
+The `location` of a Dependency Scanning vulnerability is composed of a `dependency` and a `file`.
+The `dependency` object describes the affected `package` and the dependency `version`.
+`package` embeds the `name` of the affected library/module.
+`file` is the path of the dependency file that declares the affected dependency.
+
+For instance, here is the `location` object for a vulnerability affecting
+version `4.0.11` of npm package [`handlebars`](https://www.npmjs.com/package/handlebars):
+
+```json
+{
+ "file": "client/package.json",
+ "dependency": {
+ "package": {
+ "name": "handlebars"
+ },
+ "version": "4.0.11"
+ }
+}
+```
+
+This affected dependency is listed in `client/package.json`,
+a dependency file processed by npm or yarn.
+
+The location fingerprint of a Dependency Scanning vulnerability
+combines the `file` and the package `name`,
+so these attributes are mandatory.
+All other attributes are optional.
+
+#### Container Scanning
+
+Similar to Dependency Scanning,
+the `location` of a Container Scanning vulnerability has a `dependency` and a `file`.
+It also has an `operating_system` field.
+
+For instance, here is the `location` object for a vulnerability affecting
+version `2.50.3-2+deb9u1` of Debian package `glib2.0`:
+
+```json
+{
+ "dependency": {
+ "package": {
+ "name": "glib2.0"
+ },
+ },
+ "version": "2.50.3-2+deb9u1",
+ "operating_system": "debian:9",
+ "image": "registry.gitlab.com/example/app:latest"
+}
+```
+
+The affected package is found when scanning the Docker image `registry.gitlab.com/example/app:latest`.
+The Docker image is based on `debian:9` (Debian Stretch).
+
+The location fingerprint of a Container Scanning vulnerability
+combines the `operating_system` and the package `name`,
+so these attributes are mandatory.
+The `image` is also mandatory.
+All other attributes are optional.
+
+#### SAST
+
+The `location` of a SAST vulnerability must have a `file` and a `start_line` field,
+giving the path of the affected file, and the affected line number, respectively.
+It may also have an `end_line`, a `class`, and a `method`.
+
+For instance, here is the `location` object for a security flaw found
+at line `41` of `src/main/java/com/gitlab/example/App.java`,
+in the the `generateSecretToken` method of the `com.gitlab.security_products.tests.App` Java class:
+
+```json
+{
+ "file": "src/main/java/com/gitlab/example/App.java",
+ "start_line": 41,
+ "end_line": 41,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "generateSecretToken1"
+}
+```
+
+The location fingerprint of a SAST vulnerability
+combines `file`, `start_line`, and `end_line`,
+so these attributes are mandatory.
+All other attributes are optional.
+
+### Tracking, merging vulnerabilities
+
+Users may give feedback on a vulnerability:
+
+- they may dismiss a vulnerability if it does not apply to their projects
+- or they may create an issue for a vulnerability, if there is a possible threat
+
+GitLab tracks vulnerabilities so that user feedback is not lost
+when new Git commits are pushed to the repository.
+Vulnerabilities are tracked using a combination of three attributes:
+
+- [Report type](#category)
+- [Location fingerprint](#location)
+- [Primary identifier](#identifiers)
+
+Right now, GitLab cannot track a vulnerability if its location changes
+as new Git commits are pushed, and this results in user feedback being lost.
+For instance, user feedback on a SAST vulnerability is lost
+if the affected file is renamed or the affected line moves down.
+This is addressed in [issue #7586](https://gitlab.com/gitlab-org/gitlab/issues/7586).
+
+In some cases, the multiple scans executed in the same CI pipeline result in duplicates
+that are automatically merged using the vulnerability location and identifiers.
+Two vulnerabilities are considered to be the same if they share the same [location fingerprint](#location)
+and at least one [identifier](#identifiers). Two identifiers are the same if they share the same `type` and `id`.
+CWE and WASC identifiers are not considered because they describe categories of vulnerability flaws,
+but not specific security flaws.
+
+#### Severity and confidence
+
+The `severity` field describes how much the vulnerability impacts the software,
+whereas the `confidence` field describes how reliable the assessment of the vulnerability is.
+The severity is used to sort the vulnerabilities in the security dashboard.
+
+The severity ranges from `Info` to `Critical`, but it can also be `Unknown`.
+Valid values are: `Unknown`, `Info`, `Low`, `Medium`, `High`, or `Critical`
+
+The confidence ranges from `Low` to `Confirmed`, but it can also be `Unknown`,
+`Experimental` or even `Ignore` if the vulnerability is to be ignored.
+Valid values are: `Ignore`, `Unknown`, `Experimental`, `Low`, `Medium`, `High`, or `Confirmed`
+
+### Remediations
+
+The `remediations` field of the report is an array of remediation objects.
+Each remediation describes a patch that can be applied to automatically fix
+a set of vulnerabilities.
+
+Currently, remediations rely on a deprecated field named `cve` to reference vulnerabilities,
+so it is recommended not to use them until a new format has been defined.
+See [issue #36777](https://gitlab.com/gitlab-org/gitlab/issues/36777).
diff --git a/doc/user/application_security/dependency_scanning/index.md b/doc/user/application_security/dependency_scanning/index.md
index 24cf7906848..03bd5cb276d 100644
--- a/doc/user/application_security/dependency_scanning/index.md
+++ b/doc/user/application_security/dependency_scanning/index.md
@@ -151,6 +151,8 @@ using environment variables.
| `PIP_REQUIREMENTS_FILE` | Pip requirements file to be scanned. |
| `MAVEN_CLI_OPTS` | List of command line arguments that will be passed to `maven` by the analyzer. The default is `"-DskipTests --batch-mode"`. See an example for [using private repos](#using-private-maven-repos). |
| `BUNDLER_AUDIT_UPDATE_DISABLED` | Disable automatic updates for the `bundler-audit` analyzer (default: `"false"`). Useful if you're running Dependency Scanning in an offline, air-gapped environment.|
+| `BUNDLER_AUDIT_ADVISORY_DB_URL` | URL of the advisory database used by bundler-audit (default: `https://github.com/rubysec/ruby-advisory-db`). |
+| `BUNDLER_AUDIT_ADVISORY_DB_REF_NAME` | Git ref for the advisory database specified by `BUNDLER_AUDIT_ADVISORY_DB_URL` (default: `master`). |
### Using private Maven repos
diff --git a/lib/api/services.rb b/lib/api/services.rb
index a3b5d2cc4b7..e6b48274989 100644
--- a/lib/api/services.rb
+++ b/lib/api/services.rb
@@ -132,7 +132,7 @@ module API
helpers do
# rubocop: disable CodeReuse/ActiveRecord
def slash_command_service(project, service_slug, params)
- project.services.active.where(template: false).find do |service|
+ project.services.active.where(instance: false).find do |service|
service.try(:token) == params[:token] && service.to_param == service_slug.underscore
end
end
diff --git a/lib/gitlab/background_migration/fix_projects_without_project_feature.rb b/lib/gitlab/background_migration/fix_projects_without_project_feature.rb
new file mode 100644
index 00000000000..68665db522e
--- /dev/null
+++ b/lib/gitlab/background_migration/fix_projects_without_project_feature.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # This migration creates missing project_features records
+ # for the projects within the given range of ids
+ class FixProjectsWithoutProjectFeature
+ def perform(from_id, to_id)
+ if number_of_created_records = create_missing!(from_id, to_id) > 0
+ log(number_of_created_records, from_id, to_id)
+ end
+ end
+
+ private
+
+ def create_missing!(from_id, to_id)
+ result = ActiveRecord::Base.connection.select_one(sql(from_id, to_id))
+ return 0 unless result
+
+ result['number_of_created_records']
+ end
+
+ def sql(from_id, to_id)
+ <<~SQL
+ WITH created_records AS (
+ INSERT INTO project_features (
+ project_id,
+ merge_requests_access_level,
+ issues_access_level,
+ wiki_access_level,
+ snippets_access_level,
+ builds_access_level,
+ repository_access_level,
+ forking_access_level,
+ pages_access_level,
+ created_at,
+ updated_at
+ )
+ SELECT projects.id,
+ 20, 20, 20, 20, 20, 20, 20,
+ #{pages_access_level},
+ TIMEZONE('UTC', NOW()), TIMEZONE('UTC', NOW())
+ FROM projects
+ WHERE projects.id BETWEEN #{Integer(from_id)} AND #{Integer(to_id)}
+ AND NOT EXISTS (
+ SELECT 1 FROM project_features
+ WHERE project_features.project_id = projects.id
+ )
+ ON CONFLICT (project_id) DO NOTHING
+ RETURNING *
+ )
+ SELECT COUNT(*) as number_of_created_records
+ FROM created_records
+ SQL
+ end
+
+ def pages_access_level
+ if ::Gitlab::Pages.access_control_is_forced?
+ "10"
+ else
+ "GREATEST(projects.visibility_level, 10)"
+ end
+ end
+
+ def log(count, from_id, to_id)
+ logger = Gitlab::BackgroundMigration::Logger.build
+
+ logger.info(message: "FixProjectsWithoutProjectFeature: created missing project_features for #{count} projects in id=#{from_id}...#{to_id}")
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
index 864e3eb569d..51a1f4e549b 100644
--- a/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
@@ -37,11 +37,8 @@ sast:
fi
fi
- |
- printenv | grep -E '^(DOCKER_|CI|GITLAB_|FF_|HOME|PWD|OLDPWD|PATH|SHLVL|HOSTNAME)' | cut -d'=' -f1 | \
- (while IFS='\\n' read -r VAR; do unset -v "$VAR"; done; /bin/printenv > .env)
- - |
- docker run \
- --env-file .env \
+ ENVS=`printenv | grep -vE '^(DOCKER_|CI|GITLAB_|FF_|HOME|PWD|OLDPWD|PATH|SHLVL|HOSTNAME)' | sed -n '/^[^\t]/s/=.*//p' | sed '/^$/d' | sed 's/^/-e /g' | tr '\n' ' '`
+ docker run "$ENVS" \
--volume "$PWD:/code" \
--volume /var/run/docker.sock:/var/run/docker.sock \
"registry.gitlab.com/gitlab-org/security-products/sast:$SAST_VERSION" /app/bin/run /code
diff --git a/lib/gitlab/graphql/connections/keyset/conditions/base_condition.rb b/lib/gitlab/graphql/connections/keyset/conditions/base_condition.rb
index 8c0d71aff6d..26c9d77a8df 100644
--- a/lib/gitlab/graphql/connections/keyset/conditions/base_condition.rb
+++ b/lib/gitlab/graphql/connections/keyset/conditions/base_condition.rb
@@ -6,6 +6,12 @@ module Gitlab
module Keyset
module Conditions
class BaseCondition
+ # @param [Arel::Table] arel_table for the relation being ordered
+ # @param [Array<OrderInfo>] order_list of extracted orderings
+ # @param [Array] values from the decoded cursor
+ # @param [Array<String>] operators determining sort comparison
+ # @param [Symbol] before_or_after indicates whether we want
+ # items :before the cursor or :after the cursor
def initialize(arel_table, order_list, values, operators, before_or_after)
@arel_table, @order_list, @values, @operators, @before_or_after = arel_table, order_list, values, operators, before_or_after
@@ -20,18 +26,25 @@ module Gitlab
attr_reader :arel_table, :order_list, :values, :operators, :before_or_after
- def table_condition(attribute, value, operator)
+ def table_condition(order_info, value, operator)
+ if order_info.named_function
+ target = order_info.named_function
+ value = value&.downcase if target&.name&.downcase == 'lower'
+ else
+ target = arel_table[order_info.attribute_name]
+ end
+
case operator
when '>'
- arel_table[attribute].gt(value)
+ target.gt(value)
when '<'
- arel_table[attribute].lt(value)
+ target.lt(value)
when '='
- arel_table[attribute].eq(value)
+ target.eq(value)
when 'is_null'
- arel_table[attribute].eq(nil)
+ target.eq(nil)
when 'is_not_null'
- arel_table[attribute].not_eq(nil)
+ target.not_eq(nil)
end
end
end
diff --git a/lib/gitlab/graphql/connections/keyset/order_info.rb b/lib/gitlab/graphql/connections/keyset/order_info.rb
index e112154cfbc..7f61bf937b4 100644
--- a/lib/gitlab/graphql/connections/keyset/order_info.rb
+++ b/lib/gitlab/graphql/connections/keyset/order_info.rb
@@ -5,10 +5,10 @@ module Gitlab
module Connections
module Keyset
class OrderInfo
- attr_reader :attribute_name, :sort_direction
+ attr_reader :attribute_name, :sort_direction, :named_function
def initialize(order_value)
- @attribute_name, @sort_direction =
+ @attribute_name, @sort_direction, @named_function =
if order_value.is_a?(String)
extract_nulls_last_order(order_value)
else
@@ -69,11 +69,24 @@ module Gitlab
def extract_nulls_last_order(order_value)
tokens = order_value.downcase.split
- [tokens.first, (tokens[1] == 'asc' ? :asc : :desc)]
+ [tokens.first, (tokens[1] == 'asc' ? :asc : :desc), nil]
end
def extract_attribute_values(order_value)
- [order_value.expr.name, order_value.direction]
+ named = nil
+ name = if ordering_by_lower?(order_value)
+ named = order_value.expr
+ named.expressions[0].name.to_s
+ else
+ order_value.expr.name
+ end
+
+ [name, order_value.direction, named]
+ end
+
+ # determine if ordering using LOWER, eg. "ORDER BY LOWER(boards.name)"
+ def ordering_by_lower?(order_value)
+ order_value.expr.is_a?(Arel::Nodes::NamedFunction) && order_value.expr&.name&.downcase == 'lower'
end
end
end
diff --git a/lib/gitlab/graphql/connections/keyset/query_builder.rb b/lib/gitlab/graphql/connections/keyset/query_builder.rb
index e93c25d85fc..fe85898f638 100644
--- a/lib/gitlab/graphql/connections/keyset/query_builder.rb
+++ b/lib/gitlab/graphql/connections/keyset/query_builder.rb
@@ -40,17 +40,16 @@ module Gitlab
# "issues"."id" > 500
#
def conditions
- attr_names = order_list.map { |field| field.attribute_name }
- attr_values = attr_names.map { |name| decoded_cursor[name] }
+ attr_values = order_list.map { |field| decoded_cursor[field.attribute_name] }
- if attr_names.count == 1 && attr_values.first.nil?
+ if order_list.count == 1 && attr_values.first.nil?
raise Gitlab::Graphql::Errors::ArgumentError.new('Before/after cursor invalid: `nil` was provided as only sortable value')
end
- if attr_names.count == 1 || attr_values.first.present?
- Keyset::Conditions::NotNullCondition.new(arel_table, attr_names, attr_values, operators, before_or_after).build
+ if order_list.count == 1 || attr_values.first.present?
+ Keyset::Conditions::NotNullCondition.new(arel_table, order_list, attr_values, operators, before_or_after).build
else
- Keyset::Conditions::NullCondition.new(arel_table, attr_names, attr_values, operators, before_or_after).build
+ Keyset::Conditions::NullCondition.new(arel_table, order_list, attr_values, operators, before_or_after).build
end
end
diff --git a/lib/gitlab/import_export/import_export.yml b/lib/gitlab/import_export/import_export.yml
index afa575241a1..9acc2098823 100644
--- a/lib/gitlab/import_export/import_export.yml
+++ b/lib/gitlab/import_export/import_export.yml
@@ -257,7 +257,7 @@ excluded_attributes:
- :token
- :token_encrypted
services:
- - :template
+ - :instance
error_tracking_setting:
- :encrypted_token
- :encrypted_token_iv
diff --git a/lib/gitlab/usage_data.rb b/lib/gitlab/usage_data.rb
index f10eb82e03e..75fa3f4c718 100644
--- a/lib/gitlab/usage_data.rb
+++ b/lib/gitlab/usage_data.rb
@@ -179,7 +179,7 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord
def services_usage
- service_counts = count(Service.active.where(template: false).where.not(type: 'JiraService').group(:type), fallback: Hash.new(-1))
+ service_counts = count(Service.active.where(instance: false).where.not(type: 'JiraService').group(:type), fallback: Hash.new(-1))
results = Service.available_services_names.each_with_object({}) do |service_name, response|
response["projects_#{service_name}_active".to_sym] = service_counts["#{service_name}_service".camelize] || 0
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index e0c59c8555b..2d6837ba8c8 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -10903,9 +10903,6 @@ msgstr ""
msgid "Kubernetes cluster was successfully updated."
msgstr ""
-msgid "Kubernetes configured"
-msgstr ""
-
msgid "Kubernetes deployment not found"
msgstr ""
@@ -12121,6 +12118,9 @@ msgstr ""
msgid "Metrics|Validating query"
msgstr ""
+msgid "Metrics|View logs"
+msgstr ""
+
msgid "Metrics|Y-axis label"
msgstr ""
diff --git a/qa/qa/specs/features/browser_ui/1_manage/login/log_into_mattermost_via_gitlab_spec.rb b/qa/qa/specs/features/browser_ui/1_manage/login/log_into_mattermost_via_gitlab_spec.rb
index ebc4cf8555f..a680cfa96bd 100644
--- a/qa/qa/specs/features/browser_ui/1_manage/login/log_into_mattermost_via_gitlab_spec.rb
+++ b/qa/qa/specs/features/browser_ui/1_manage/login/log_into_mattermost_via_gitlab_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module QA
- context 'Manage', :orchestrated, :mattermost, quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/202069' do
+ context 'Manage', :orchestrated, :mattermost do
describe 'Mattermost login' do
it 'user logs into Mattermost using GitLab OAuth' do
Flow::Login.sign_in
diff --git a/spec/controllers/admin/services_controller_spec.rb b/spec/controllers/admin/services_controller_spec.rb
index 44233776865..6f59a5ac016 100644
--- a/spec/controllers/admin/services_controller_spec.rb
+++ b/spec/controllers/admin/services_controller_spec.rb
@@ -15,11 +15,11 @@ describe Admin::ServicesController do
Service.available_services_names.each do |service_name|
context "#{service_name}" do
let!(:service) do
- service_template = "#{service_name}_service".camelize.constantize
- service_template.where(template: true).first_or_create
+ service_instance = "#{service_name}_service".camelize.constantize
+ service_instance.where(instance: true).first_or_create
end
- it 'successfully displays the template' do
+ it 'successfully displays the service' do
get :edit, params: { id: service.id }
expect(response).to have_gitlab_http_status(:ok)
@@ -34,7 +34,7 @@ describe Admin::ServicesController do
RedmineService.create(
project: project,
active: false,
- template: true,
+ instance: true,
properties: {
project_url: 'http://abc',
issues_url: 'http://abc',
@@ -44,7 +44,7 @@ describe Admin::ServicesController do
end
it 'calls the propagation worker when service is active' do
- expect(PropagateServiceTemplateWorker).to receive(:perform_async).with(service.id)
+ expect(PropagateInstanceLevelServiceWorker).to receive(:perform_async).with(service.id)
put :update, params: { id: service.id, service: { active: true } }
@@ -52,7 +52,7 @@ describe Admin::ServicesController do
end
it 'does not call the propagation worker when service is not active' do
- expect(PropagateServiceTemplateWorker).not_to receive(:perform_async)
+ expect(PropagateInstanceLevelServiceWorker).not_to receive(:perform_async)
put :update, params: { id: service.id, service: { properties: {} } }
diff --git a/spec/controllers/concerns/lfs_request_spec.rb b/spec/controllers/concerns/lfs_request_spec.rb
index 79257e9a7f6..67c81156ca6 100644
--- a/spec/controllers/concerns/lfs_request_spec.rb
+++ b/spec/controllers/concerns/lfs_request_spec.rb
@@ -10,8 +10,6 @@ describe LfsRequest do
include LfsRequest
def show
- storage_project
-
head :ok
end
@@ -38,22 +36,6 @@ describe LfsRequest do
stub_lfs_setting(enabled: true)
end
- describe '#storage_project' do
- it 'assigns the project as storage project' do
- get :show, params: { id: project.id }
-
- expect(assigns(:storage_project)).to eq(project)
- end
-
- it 'assigns the source of a forked project' do
- forked_project = fork_project(project)
-
- get :show, params: { id: forked_project.id }
-
- expect(assigns(:storage_project)).to eq(project)
- end
- end
-
context 'user is authenticated without access to lfs' do
before do
allow(controller).to receive(:authenticate_user)
diff --git a/spec/controllers/dashboard/projects_controller_spec.rb b/spec/controllers/dashboard/projects_controller_spec.rb
index d013093c376..a13b56deb23 100644
--- a/spec/controllers/dashboard/projects_controller_spec.rb
+++ b/spec/controllers/dashboard/projects_controller_spec.rb
@@ -11,7 +11,14 @@ describe Dashboard::ProjectsController do
end
context 'user logged in' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+
+ before_all do
+ project.add_developer(user)
+ project2.add_developer(user)
+ end
before do
sign_in(user)
@@ -28,12 +35,7 @@ describe Dashboard::ProjectsController do
end
it 'orders the projects by last activity by default' do
- project = create(:project)
- project.add_developer(user)
project.update!(last_repository_updated_at: 3.days.ago, last_activity_at: 3.days.ago)
-
- project2 = create(:project)
- project2.add_developer(user)
project2.update!(last_repository_updated_at: 10.days.ago, last_activity_at: 10.days.ago)
get :index
@@ -42,12 +44,27 @@ describe Dashboard::ProjectsController do
end
context 'project sorting' do
- let(:project) { create(:project) }
-
it_behaves_like 'set sort order from user preference' do
let(:sorting_param) { 'created_asc' }
end
end
+
+ context 'with search and sort parameters' do
+ render_views
+
+ shared_examples 'search and sort parameters' do |sort|
+ it 'returns a single project with no ambiguous column errors' do
+ get :index, params: { name: project2.name, sort: sort }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:projects)).to eq([project2])
+ end
+ end
+
+ %w[latest_activity_desc latest_activity_asc stars_desc stars_asc created_desc].each do |sort|
+ it_behaves_like 'search and sort parameters', sort
+ end
+ end
end
end
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index 0c074714bf3..b76d350ebbc 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -154,12 +154,12 @@ describe Projects::ServicesController do
end
end
- context 'when activating Jira service from a template' do
+ context 'when activating Jira service from instance level service' do
let(:service) do
- create(:jira_service, project: project, template: true)
+ create(:jira_service, project: project, instance: true)
end
- it 'activate Jira service from template' do
+ it 'activate Jira service from instance level service' do
expect(flash[:notice]).to eq 'Jira activated.'
end
end
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index 41c3c6b5770..8e20facda15 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -33,7 +33,7 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
expect(page).not_to have_link('Enable Auto DevOps')
expect(page).not_to have_link('Auto DevOps enabled')
expect(page).not_to have_link('Add Kubernetes cluster')
- expect(page).not_to have_link('Kubernetes configured')
+ expect(page).not_to have_link('Kubernetes')
end
end
end
@@ -100,7 +100,7 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
it 'no Kubernetes cluster button if can not manage clusters' do
page.within('.project-buttons') do
expect(page).not_to have_link('Add Kubernetes cluster')
- expect(page).not_to have_link('Kubernetes configured')
+ expect(page).not_to have_link('Kubernetes')
end
end
end
@@ -308,7 +308,7 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
visit project_path(project)
page.within('.project-buttons') do
- expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster))
+ expect(page).to have_link('Kubernetes', href: project_cluster_path(project, cluster))
end
end
end
diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace
index d774d154496..1aba1e76d0b 100644
--- a/spec/fixtures/trace/sample_trace
+++ b/spec/fixtures/trace/sample_trace
@@ -2736,7 +2736,7 @@ Service
when repository is empty
test runs execute
Template
- .build_from_template
+ .build_from_instance
when template is invalid
sets service template to inactive when template is invalid
for pushover service
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index d4f197a708f..0a7e3dca183 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -18,6 +18,15 @@ import * as iconUtils from '~/lib/utils/icon_utils';
const mockWidgets = 'mockWidgets';
const mockSvgPathContent = 'mockSvgPathContent';
+
+jest.mock('lodash/throttle', () =>
+ // this throttle mock executes immediately
+ jest.fn(func => {
+ // eslint-disable-next-line no-param-reassign
+ func.cancel = jest.fn();
+ return func;
+ }),
+);
jest.mock('~/lib/utils/icon_utils', () => ({
getSvgIconPathContent: jest.fn().mockImplementation(() => Promise.resolve(mockSvgPathContent)),
}));
@@ -94,6 +103,56 @@ describe('Time series component', () => {
});
});
+ describe('events', () => {
+ describe('datazoom', () => {
+ let eChartMock;
+ let startValue;
+ let endValue;
+
+ const findChart = () => timeSeriesChart.find({ ref: 'chart' });
+
+ beforeEach(done => {
+ eChartMock = {
+ handlers: {},
+ getOption: () => ({
+ dataZoom: [
+ {
+ startValue,
+ endValue,
+ },
+ ],
+ }),
+ off: jest.fn(eChartEvent => {
+ delete eChartMock.handlers[eChartEvent];
+ }),
+ on: jest.fn((eChartEvent, fn) => {
+ eChartMock.handlers[eChartEvent] = fn;
+ }),
+ };
+
+ timeSeriesChart = makeTimeSeriesChart(mockGraphData);
+ timeSeriesChart.vm.$nextTick(() => {
+ findChart().vm.$emit('created', eChartMock);
+ done();
+ });
+ });
+
+ it('handles datazoom event from chart', () => {
+ startValue = 1577836800000; // 2020-01-01T00:00:00.000Z
+ endValue = 1577840400000; // 2020-01-01T01:00:00.000Z
+ eChartMock.handlers.datazoom();
+
+ expect(timeSeriesChart.emitted('datazoom')).toHaveLength(1);
+ expect(timeSeriesChart.emitted('datazoom')[0]).toEqual([
+ {
+ start: new Date(startValue).toISOString(),
+ end: new Date(endValue).toISOString(),
+ },
+ ]);
+ });
+ });
+ });
+
describe('methods', () => {
describe('formatTooltipText', () => {
let mockDate;
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 31266b4f6d4..70b2c9cf527 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -73,12 +73,20 @@ describe('Dashboard', () => {
describe('no metrics are available yet', () => {
beforeEach(() => {
+ jest.spyOn(store, 'dispatch');
createShallowWrapper();
});
it('shows the environment selector', () => {
expect(findEnvironmentsDropdown().exists()).toBe(true);
});
+
+ it('sets endpoints: logs path', () => {
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setEndpoints',
+ expect.objectContaining({ logsPath: propsData.logsPath }),
+ );
+ });
});
describe('no data found', () => {
@@ -94,6 +102,21 @@ describe('Dashboard', () => {
});
describe('request information to the server', () => {
+ it('calls to set time range and fetch data', () => {
+ jest.spyOn(store, 'dispatch');
+
+ createShallowWrapper({ hasMetrics: true }, { methods: {} });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ expect.any(Object),
+ );
+
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
+ });
+ });
+
it('shows up a loading state', done => {
createShallowWrapper({ hasMetrics: true }, { methods: {} });
@@ -126,7 +149,7 @@ describe('Dashboard', () => {
.catch(done.fail);
});
- it('fetches the metrics data with proper time window', done => {
+ it('fetches the metrics data with proper time window', () => {
jest.spyOn(store, 'dispatch');
createMountedWrapper({ hasMetrics: true }, { stubs: ['graph-group', 'panel-type'] });
@@ -136,14 +159,9 @@ describe('Dashboard', () => {
environmentData,
);
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(store.dispatch).toHaveBeenCalled();
-
- done();
- })
- .catch(done.fail);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalled();
+ });
});
});
@@ -263,10 +281,6 @@ describe('Dashboard', () => {
return wrapper.vm.$nextTick();
});
- afterEach(() => {
- wrapper.destroy();
- });
-
it('renders a search input', () => {
expect(wrapper.find({ ref: 'monitorEnvironmentsDropdownSearch' }).exists()).toBe(true);
});
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index 33fbfac486f..161c64dd74b 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -7,6 +7,7 @@ import { mockProjectDir } from '../mock_data';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores';
+import { defaultTimeRange } from '~/monitoring/constants';
import { propsData } from '../init_utils';
jest.mock('~/flash');
@@ -17,16 +18,11 @@ describe('dashboard invalid url parameters', () => {
let wrapper;
let mock;
- const fetchDataMock = jest.fn();
-
const createMountedWrapper = (props = { hasMetrics: true }, options = {}) => {
wrapper = mount(Dashboard, {
propsData: { ...propsData, ...props },
store,
stubs: ['graph-group', 'panel-type'],
- methods: {
- fetchData: fetchDataMock,
- },
...options,
});
};
@@ -35,6 +31,8 @@ describe('dashboard invalid url parameters', () => {
beforeEach(() => {
store = createStore();
+ jest.spyOn(store, 'dispatch');
+
mock = new MockAdapter(axios);
});
@@ -43,7 +41,6 @@ describe('dashboard invalid url parameters', () => {
wrapper.destroy();
}
mock.restore();
- fetchDataMock.mockReset();
queryToObject.mockReset();
});
@@ -53,15 +50,13 @@ describe('dashboard invalid url parameters', () => {
createMountedWrapper();
return wrapper.vm.$nextTick().then(() => {
- expect(findDateTimePicker().props('value')).toMatchObject({
- duration: { seconds: 28800 },
- });
+ expect(findDateTimePicker().props('value')).toEqual(defaultTimeRange);
- expect(fetchDataMock).toHaveBeenCalledTimes(1);
- expect(fetchDataMock).toHaveBeenCalledWith({
- start: expect.any(String),
- end: expect.any(String),
- });
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ expect.any(Object),
+ );
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
});
});
@@ -78,8 +73,8 @@ describe('dashboard invalid url parameters', () => {
return wrapper.vm.$nextTick().then(() => {
expect(findDateTimePicker().props('value')).toEqual(params);
- expect(fetchDataMock).toHaveBeenCalledTimes(1);
- expect(fetchDataMock).toHaveBeenCalledWith(params);
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/setTimeRange', params);
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
});
});
@@ -91,15 +86,17 @@ describe('dashboard invalid url parameters', () => {
createMountedWrapper();
return wrapper.vm.$nextTick().then(() => {
- expect(findDateTimePicker().props('value')).toMatchObject({
+ const expectedTimeRange = {
duration: { seconds: 60 * 2 },
- });
+ };
- expect(fetchDataMock).toHaveBeenCalledTimes(1);
- expect(fetchDataMock).toHaveBeenCalledWith({
- start: expect.any(String),
- end: expect.any(String),
- });
+ expect(findDateTimePicker().props('value')).toMatchObject(expectedTimeRange);
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ expectedTimeRange,
+ );
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
});
});
@@ -114,15 +111,13 @@ describe('dashboard invalid url parameters', () => {
return wrapper.vm.$nextTick().then(() => {
expect(createFlash).toHaveBeenCalled();
- expect(findDateTimePicker().props('value')).toMatchObject({
- duration: { seconds: 28800 },
- });
+ expect(findDateTimePicker().props('value')).toEqual(defaultTimeRange);
- expect(fetchDataMock).toHaveBeenCalledTimes(1);
- expect(fetchDataMock).toHaveBeenCalledWith({
- start: expect.any(String),
- end: expect.any(String),
- });
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ defaultTimeRange,
+ );
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
});
});
@@ -137,7 +132,7 @@ describe('dashboard invalid url parameters', () => {
duration: { seconds: 120 },
});
- // redirect to plus + new parameters
+ // redirect to with new parameters
expect(mergeUrlParams).toHaveBeenCalledWith({ duration_seconds: '120' }, toUrl);
expect(redirectTo).toHaveBeenCalledTimes(1);
});
diff --git a/spec/frontend/monitoring/embed/embed_spec.js b/spec/frontend/monitoring/embed/embed_spec.js
index 831ab1ed157..3bb70a02bd9 100644
--- a/spec/frontend/monitoring/embed/embed_spec.js
+++ b/spec/frontend/monitoring/embed/embed_spec.js
@@ -26,10 +26,11 @@ describe('Embed', () => {
beforeEach(() => {
actions = {
- setFeatureFlags: () => {},
- setShowErrorBanner: () => {},
- setEndpoints: () => {},
- fetchMetricsData: () => {},
+ setFeatureFlags: jest.fn(),
+ setShowErrorBanner: jest.fn(),
+ setEndpoints: jest.fn(),
+ setTimeRange: jest.fn(),
+ fetchDashboard: jest.fn(),
};
metricsWithDataGetter = jest.fn();
@@ -76,6 +77,18 @@ describe('Embed', () => {
mountComponent();
});
+ it('calls actions to fetch data', () => {
+ const expectedTimeRangePayload = expect.objectContaining({
+ start: expect.any(String),
+ end: expect.any(String),
+ });
+
+ expect(actions.setTimeRange).toHaveBeenCalledTimes(1);
+ expect(actions.setTimeRange.mock.calls[0][1]).toEqual(expectedTimeRangePayload);
+
+ expect(actions.fetchDashboard).toHaveBeenCalled();
+ });
+
it('shows a chart when metrics are present', () => {
expect(wrapper.find('.metrics-embed').exists()).toBe(true);
expect(wrapper.find(PanelType).exists()).toBe(true);
diff --git a/spec/frontend/monitoring/init_utils.js b/spec/frontend/monitoring/init_utils.js
index 30c64a8d885..36c654ba7b3 100644
--- a/spec/frontend/monitoring/init_utils.js
+++ b/spec/frontend/monitoring/init_utils.js
@@ -15,6 +15,7 @@ export const propsData = {
clustersPath: '/path/to/clusters',
tagsPath: '/path/to/tags',
projectPath: '/path/to/project',
+ logsPath: '/path/to/logs',
defaultBranch: 'master',
metricsEndpoint: mockApiEndpoint,
deploymentsEndpoint: null,
diff --git a/spec/frontend/monitoring/panel_type_spec.js b/spec/frontend/monitoring/panel_type_spec.js
index e51b69ef14d..730d67f79d8 100644
--- a/spec/frontend/monitoring/panel_type_spec.js
+++ b/spec/frontend/monitoring/panel_type_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import { setTestTimeout } from 'helpers/timeout';
+import invalidUrl from '~/lib/utils/invalid_url';
import axios from '~/lib/utils/axios_utils';
import PanelType from '~/monitoring/components/panel_type.vue';
import EmptyChart from '~/monitoring/components/charts/empty_chart.vue';
@@ -16,20 +17,25 @@ global.URL.createObjectURL = jest.fn();
describe('Panel Type component', () => {
let axiosMock;
let store;
- let panelType;
- const dashboardWidth = 100;
+ let state;
+ let wrapper;
const exampleText = 'example_text';
- const createWrapper = props =>
- shallowMount(PanelType, {
+ const createWrapper = props => {
+ wrapper = shallowMount(PanelType, {
propsData: {
...props,
},
store,
});
+ };
beforeEach(() => {
setTestTimeout(1000);
+
+ store = createStore();
+ state = store.state.monitoringDashboard;
+
axiosMock = new AxiosMockAdapter(axios);
});
@@ -44,19 +50,18 @@ describe('Panel Type component', () => {
graphDataNoResult.metrics[0].result = [];
beforeEach(() => {
- panelType = createWrapper({
- dashboardWidth,
+ createWrapper({
graphData: graphDataNoResult,
});
});
afterEach(() => {
- panelType.destroy();
+ wrapper.destroy();
});
describe('Empty Chart component', () => {
beforeEach(() => {
- glEmptyChart = panelType.find(EmptyChart);
+ glEmptyChart = wrapper.find(EmptyChart);
});
it('is a Vue instance', () => {
@@ -66,51 +71,126 @@ describe('Panel Type component', () => {
it('it receives a graph title', () => {
const props = glEmptyChart.props();
- expect(props.graphTitle).toBe(panelType.vm.graphData.title);
+ expect(props.graphTitle).toBe(wrapper.vm.graphData.title);
});
});
});
describe('when graph data is available', () => {
beforeEach(() => {
- store = createStore();
- panelType = createWrapper({
- dashboardWidth,
+ createWrapper({
graphData: graphDataPrometheusQueryRange,
});
});
afterEach(() => {
- panelType.destroy();
+ wrapper.destroy();
});
it('sets no clipboard copy link on dropdown by default', () => {
- const link = () => panelType.find('.js-chart-link');
+ const link = () => wrapper.find('.js-chart-link');
expect(link().exists()).toBe(false);
});
describe('Time Series Chart panel type', () => {
it('is rendered', () => {
- expect(panelType.find(TimeSeriesChart).isVueInstance()).toBe(true);
- expect(panelType.find(TimeSeriesChart).exists()).toBe(true);
+ expect(wrapper.find(TimeSeriesChart).isVueInstance()).toBe(true);
+ expect(wrapper.find(TimeSeriesChart).exists()).toBe(true);
});
it('includes a default group id', () => {
- expect(panelType.vm.groupId).toBe('panel-type-chart');
+ expect(wrapper.vm.groupId).toBe('panel-type-chart');
});
});
describe('Anomaly Chart panel type', () => {
- beforeEach(done => {
- panelType.setProps({
+ beforeEach(() => {
+ wrapper.setProps({
graphData: anomalyMockGraphData,
});
- panelType.vm.$nextTick(done);
+ return wrapper.vm.$nextTick();
});
it('is rendered with an anomaly chart', () => {
- expect(panelType.find(AnomalyChart).isVueInstance()).toBe(true);
- expect(panelType.find(AnomalyChart).exists()).toBe(true);
+ expect(wrapper.find(AnomalyChart).isVueInstance()).toBe(true);
+ expect(wrapper.find(AnomalyChart).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('View Logs dropdown item', () => {
+ const mockLogsPath = '/path/to/logs';
+ const mockTimeRange = { duration: { seconds: 120 } };
+
+ const findTimeChart = () => wrapper.find({ ref: 'timeChart' });
+ const findViewLogsLink = () => wrapper.find({ ref: 'viewLogsLink' });
+
+ beforeEach(() => {
+ createWrapper({
+ graphData: graphDataPrometheusQueryRange,
+ });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('is not present by default', () =>
+ wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ }));
+
+ it('is not present if a time range is not set', () => {
+ state.logsPath = mockLogsPath;
+ state.timeRange = null;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ });
+ });
+
+ it('is not present if the logs path is default', () => {
+ state.logsPath = invalidUrl;
+ state.timeRange = mockTimeRange;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ });
+ });
+
+ it('is not present if the logs path is not set', () => {
+ state.logsPath = null;
+ state.timeRange = mockTimeRange;
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findViewLogsLink().exists()).toBe(false);
+ });
+ });
+
+ it('is present when logs path and time a range is present', () => {
+ state.logsPath = mockLogsPath;
+ state.timeRange = mockTimeRange;
+
+ return wrapper.vm.$nextTick(() => {
+ const href = `${mockLogsPath}?duration_seconds=${mockTimeRange.duration.seconds}`;
+ expect(findViewLogsLink().attributes('href')).toMatch(href);
+ });
+ });
+
+ it('it is overriden when a datazoom event is received', () => {
+ state.logsPath = mockLogsPath;
+ state.timeRange = mockTimeRange;
+
+ const zoomedTimeRange = {
+ start: '2020-01-01T00:00:00.000Z',
+ end: '2020-01-01T01:00:00.000Z',
+ };
+
+ findTimeChart().vm.$emit('datazoom', zoomedTimeRange);
+
+ return wrapper.vm.$nextTick(() => {
+ const start = encodeURIComponent(zoomedTimeRange.start);
+ const end = encodeURIComponent(zoomedTimeRange.end);
+ expect(findViewLogsLink().attributes('href')).toMatch(
+ `${mockLogsPath}?start=${start}&end=${end}`,
+ );
});
});
});
@@ -119,20 +199,18 @@ describe('Panel Type component', () => {
const clipboardText = 'A value to copy.';
beforeEach(() => {
- store = createStore();
- panelType = createWrapper({
+ createWrapper({
clipboardText,
- dashboardWidth,
graphData: graphDataPrometheusQueryRange,
});
});
afterEach(() => {
- panelType.destroy();
+ wrapper.destroy();
});
it('sets clipboard text on the dropdown', () => {
- const link = () => panelType.find('.js-chart-link');
+ const link = () => wrapper.find('.js-chart-link');
expect(link().exists()).toBe(true);
expect(link().element.dataset.clipboardText).toBe(clipboardText);
@@ -140,22 +218,20 @@ describe('Panel Type component', () => {
});
describe('when downloading metrics data as CSV', () => {
- beforeEach(done => {
+ beforeEach(() => {
graphDataPrometheusQueryRange.y_label = 'metric';
- store = createStore();
- panelType = shallowMount(PanelType, {
+ wrapper = shallowMount(PanelType, {
propsData: {
clipboardText: exampleText,
- dashboardWidth,
graphData: graphDataPrometheusQueryRange,
},
store,
});
- panelType.vm.$nextTick(done);
+ return wrapper.vm.$nextTick();
});
afterEach(() => {
- panelType.destroy();
+ wrapper.destroy();
});
describe('csvText', () => {
@@ -165,7 +241,7 @@ describe('Panel Type component', () => {
const firstRow = `${data[0][0]},${data[0][1]}`;
const secondRow = `${data[1][0]},${data[1][1]}`;
- expect(panelType.vm.csvText).toBe(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
+ expect(wrapper.vm.csvText).toBe(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
});
});
@@ -174,7 +250,7 @@ describe('Panel Type component', () => {
expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(expect.any(Blob));
expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(
expect.objectContaining({
- size: panelType.vm.csvText.length,
+ size: wrapper.vm.csvText.length,
type: 'text/plain',
}),
);
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 7f8ced8cf43..b0ac42e0e5f 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -90,6 +90,16 @@ describe('Monitoring mutations', () => {
expect(stateCopy.dashboardEndpoint).toEqual('dashboard.json');
expect(stateCopy.projectPath).toEqual('/gitlab-org/gitlab-foss');
});
+
+ it('should not remove default value of logsPath', () => {
+ const defaultLogsPath = stateCopy.logsPath;
+
+ mutations[types.SET_ENDPOINTS](stateCopy, {
+ dashboardEndpoint: 'dashboard.json',
+ });
+
+ expect(stateCopy.logsPath).toBe(defaultLogsPath);
+ });
});
describe('Individual panel/metric results', () => {
const metricId = '12_system_metrics_kubernetes_container_memory_total';
diff --git a/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb b/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb
new file mode 100644
index 00000000000..0dca542cb9f
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::FixProjectsWithoutProjectFeature, :migration, schema: 2020_01_27_111840 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:project_features) { table(:project_features) }
+
+ let(:namespace) { namespaces.create(name: 'foo', path: 'foo') }
+
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:private_project_without_feature) { projects.create!(namespace_id: namespace.id, visibility_level: 0) }
+ let(:public_project_without_feature) { projects.create!(namespace_id: namespace.id, visibility_level: 20) }
+ let!(:projects_without_feature) { [private_project_without_feature, public_project_without_feature] }
+
+ before do
+ project_features.create({ project_id: project.id, pages_access_level: 20 })
+ end
+
+ subject { described_class.new.perform(Project.minimum(:id), Project.maximum(:id)) }
+
+ def project_feature_records
+ project_features.order(:project_id).pluck(:project_id)
+ end
+
+ def features(project)
+ project_features.find_by(project_id: project.id)&.attributes
+ end
+
+ it 'creates a ProjectFeature for projects without it' do
+ expect { subject }.to change { project_feature_records }.from([project.id]).to([project.id, *projects_without_feature.map(&:id)])
+ end
+
+ it 'creates ProjectFeature records with default values for a public project' do
+ subject
+
+ expect(features(public_project_without_feature)).to include(
+ {
+ "merge_requests_access_level" => 20,
+ "issues_access_level" => 20,
+ "wiki_access_level" => 20,
+ "snippets_access_level" => 20,
+ "builds_access_level" => 20,
+ "repository_access_level" => 20,
+ "pages_access_level" => 20,
+ "forking_access_level" => 20
+ }
+ )
+ end
+
+ it 'creates ProjectFeature records with default values for a private project' do
+ subject
+
+ expect(features(private_project_without_feature)).to include("pages_access_level" => 10)
+ end
+
+ context 'when access control to pages is forced' do
+ before do
+ allow(::Gitlab::Pages).to receive(:access_control_is_forced?).and_return(true)
+ end
+
+ it 'creates ProjectFeature records with default values for a public project' do
+ subject
+
+ expect(features(public_project_without_feature)).to include("pages_access_level" => 10)
+ end
+ end
+
+ it 'sets created_at/updated_at timestamps' do
+ subject
+
+ expect(project_features.where('created_at IS NULL OR updated_at IS NULL')).to be_empty
+ end
+end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
index 5e215be4dfb..26fc5344871 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/conditions/not_null_condition_spec.rb
@@ -10,7 +10,7 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do
context 'when there is only one ordering field' do
let(:arel_table) { Issue.arel_table }
- let(:order_list) { ['id'] }
+ let(:order_list) { [double(named_function: nil, attribute_name: 'id')] }
let(:values) { [500] }
let(:operators) { ['>'] }
@@ -25,7 +25,7 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do
context 'when ordering by a column attribute' do
let(:arel_table) { Issue.arel_table }
- let(:order_list) { %w(relative_position id) }
+ let(:order_list) { [double(named_function: nil, attribute_name: 'relative_position'), double(named_function: nil, attribute_name: 'id')] }
let(:values) { [1500, 500] }
shared_examples ':after condition' do
@@ -71,5 +71,45 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NotNullCondition do
it_behaves_like ':after condition'
end
end
+
+ context 'when ordering by LOWER' do
+ let(:arel_table) { Project.arel_table }
+ let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) }
+ let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) }
+ let(:values) { ['Test', 500] }
+
+ context 'when :after' do
+ it 'generates :after sql' do
+ expected_sql = <<~SQL
+ (LOWER("projects"."name") > 'test')
+ OR (
+ LOWER("projects"."name") = 'test'
+ AND
+ "projects"."id" > 500
+ )
+ OR (LOWER("projects"."name") IS NULL)
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates :before sql' do
+ expected_sql = <<~SQL
+ (LOWER("projects"."name") > 'test')
+ OR (
+ LOWER("projects"."name") = 'test'
+ AND
+ "projects"."id" > 500
+ )
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
index 1049890a079..be0a21b2438 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/conditions/null_condition_spec.rb
@@ -11,7 +11,7 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NullCondition do
context 'when ordering by a column attribute' do
let(:arel_table) { Issue.arel_table }
- let(:order_list) { %w(relative_position id) }
+ let(:order_list) { [double(named_function: nil, attribute_name: 'relative_position'), double(named_function: nil, attribute_name: 'id')] }
shared_examples ':after condition' do
it 'generates sql' do
@@ -54,5 +54,42 @@ describe Gitlab::Graphql::Connections::Keyset::Conditions::NullCondition do
it_behaves_like ':after condition'
end
end
+
+ context 'when ordering by LOWER' do
+ let(:arel_table) { Project.arel_table }
+ let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) }
+ let(:order_list) { Gitlab::Graphql::Connections::Keyset::OrderInfo.build_order_list(relation) }
+
+ context 'when :after' do
+ it 'generates sql' do
+ expected_sql = <<~SQL
+ (
+ LOWER("projects"."name") IS NULL
+ AND
+ "projects"."id" > 500
+ )
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates :before sql' do
+ expected_sql = <<~SQL
+ (
+ LOWER("projects"."name") IS NULL
+ AND
+ "projects"."id" > 500
+ )
+ OR (LOWER("projects"."name") IS NOT NULL)
+ SQL
+
+ expect(condition.build.squish).to eq expected_sql.squish
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
index 17ddcaefeeb..eb823fc0122 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/order_info_spec.rb
@@ -37,6 +37,20 @@ describe Gitlab::Graphql::Connections::Keyset::OrderInfo do
expect(order_list.count).to eq 1
end
end
+
+ context 'when order contains LOWER' do
+ let(:relation) { Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(:id) }
+
+ it 'does not ignore the SQL order' do
+ expect(order_list.count).to eq 2
+ expect(order_list.first.attribute_name).to eq 'name'
+ expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::NamedFunction)
+ expect(order_list.first.named_function.to_sql).to eq 'LOWER("projects"."name")'
+ expect(order_list.first.operator_for(:after)).to eq '>'
+ expect(order_list.last.attribute_name).to eq 'id'
+ expect(order_list.last.operator_for(:after)).to eq '>'
+ end
+ end
end
describe '#validate_ordering' do
diff --git a/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
index 7ebf5da264d..b46ce4bf023 100644
--- a/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
+++ b/spec/lib/gitlab/graphql/connections/keyset/query_builder_spec.rb
@@ -101,5 +101,35 @@ describe Gitlab::Graphql::Connections::Keyset::QueryBuilder do
end
end
end
+
+ context 'when sorting using LOWER' do
+ let(:relation) { Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(:id) }
+ let(:arel_table) { Project.arel_table }
+ let(:decoded_cursor) { { 'name' => 'Test', 'id' => 100 } }
+
+ context 'when no values are nil' do
+ context 'when :after' do
+ it 'generates the correct condition' do
+ conditions = builder.conditions
+
+ expect(conditions).to include '(LOWER("projects"."name") > \'test\')'
+ expect(conditions).to include '"projects"."id" > 100'
+ expect(conditions).to include 'OR (LOWER("projects"."name") IS NULL)'
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates the correct condition' do
+ conditions = builder.conditions
+
+ expect(conditions).to include '(LOWER("projects"."name") < \'test\')'
+ expect(conditions).to include '"projects"."id" < 100'
+ expect(conditions).to include 'LOWER("projects"."name") = \'test\''
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
index c899217d164..f4d3c9e613e 100644
--- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb
@@ -652,10 +652,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
setup_import_export_config('light')
end
- it 'does not import any templated services' do
+ it 'does not import any instance-level services' do
expect(restored_project_json).to eq(true)
- expect(project.services.where(template: true).count).to eq(0)
+ expect(project.services.where(instance: true).count).to eq(0)
end
it 'imports labels' do
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index ceed750253d..86bf5710635 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -452,7 +452,7 @@ Service:
- updated_at
- active
- properties
-- template
+- instance
- push_events
- issues_events
- commit_events
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 9a49d334f52..8e9a816ba6a 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -18,7 +18,7 @@ describe Gitlab::UsageData do
create(:service, project: projects[1], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'MattermostService', active: false)
- create(:service, project: projects[2], type: 'MattermostService', active: true, template: true)
+ create(:service, project: projects[2], type: 'MattermostService', active: true, instance: true)
create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
diff --git a/spec/migrations/fix_projects_without_project_feature_spec.rb b/spec/migrations/fix_projects_without_project_feature_spec.rb
new file mode 100644
index 00000000000..6e0345da078
--- /dev/null
+++ b/spec/migrations/fix_projects_without_project_feature_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200127111840_fix_projects_without_project_feature.rb')
+
+describe FixProjectsWithoutProjectFeature, :migration do
+ let(:namespace) { table(:namespaces).create(name: 'gitlab', path: 'gitlab-org') }
+
+ let!(:projects) do
+ [
+ table(:projects).create(namespace_id: namespace.id, name: 'foo 1'),
+ table(:projects).create(namespace_id: namespace.id, name: 'foo 2'),
+ table(:projects).create(namespace_id: namespace.id, name: 'foo 3')
+ ]
+ end
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ around do |example|
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ example.call
+ end
+ end
+ end
+
+ it 'schedules jobs for ranges of projects' do
+ migrate!
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, projects[0].id, projects[1].id)
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(4.minutes, projects[2].id, projects[2].id)
+ end
+
+ it 'schedules jobs according to the configured batch size' do
+ expect { migrate! }.to change { BackgroundMigrationWorker.jobs.size }.by(2)
+ end
+end
diff --git a/spec/migrations/migrate_propagate_service_template_sidekiq_queue_spec.rb b/spec/migrations/migrate_propagate_service_template_sidekiq_queue_spec.rb
new file mode 100644
index 00000000000..2fffe638117
--- /dev/null
+++ b/spec/migrations/migrate_propagate_service_template_sidekiq_queue_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200206111847_migrate_propagate_service_template_sidekiq_queue.rb')
+
+describe MigratePropagateServiceTemplateSidekiqQueue, :sidekiq, :redis do
+ include Gitlab::Database::MigrationHelpers
+ include StubWorker
+
+ context 'when there are jobs in the queue' do
+ it 'correctly migrates queue when migrating up' do
+ Sidekiq::Testing.disable! do
+ stub_worker(queue: 'propagate_service_template').perform_async('Something', [1])
+ stub_worker(queue: 'propagate_instance_level_service').perform_async('Something', [1])
+
+ described_class.new.up
+
+ expect(sidekiq_queue_length('propagate_service_template')).to eq 0
+ expect(sidekiq_queue_length('propagate_instance_level_service')).to eq 2
+ end
+ end
+ end
+
+ context 'when there are no jobs in the queues' do
+ it 'does not raise error when migrating up' do
+ expect { described_class.new.up }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/models/concerns/sortable_spec.rb b/spec/models/concerns/sortable_spec.rb
index 184f7986a6f..18ac4d19938 100644
--- a/spec/models/concerns/sortable_spec.rb
+++ b/spec/models/concerns/sortable_spec.rb
@@ -4,17 +4,18 @@ require 'spec_helper'
describe Sortable do
describe '.order_by' do
+ let(:arel_table) { Group.arel_table }
let(:relation) { Group.all }
describe 'ordering by id' do
it 'ascending' do
- expect(relation).to receive(:reorder).with(id: :asc)
+ expect(relation).to receive(:reorder).with(arel_table['id'].asc)
relation.order_by('id_asc')
end
it 'descending' do
- expect(relation).to receive(:reorder).with(id: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['id'].desc)
relation.order_by('id_desc')
end
@@ -22,19 +23,19 @@ describe Sortable do
describe 'ordering by created day' do
it 'ascending' do
- expect(relation).to receive(:reorder).with(created_at: :asc)
+ expect(relation).to receive(:reorder).with(arel_table['created_at'].asc)
relation.order_by('created_asc')
end
it 'descending' do
- expect(relation).to receive(:reorder).with(created_at: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['created_at'].desc)
relation.order_by('created_desc')
end
it 'order by "date"' do
- expect(relation).to receive(:reorder).with(created_at: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['created_at'].desc)
relation.order_by('created_date')
end
@@ -66,13 +67,13 @@ describe Sortable do
describe 'ordering by Updated Time' do
it 'ascending' do
- expect(relation).to receive(:reorder).with(updated_at: :asc)
+ expect(relation).to receive(:reorder).with(arel_table['updated_at'].asc)
relation.order_by('updated_asc')
end
it 'descending' do
- expect(relation).to receive(:reorder).with(updated_at: :desc)
+ expect(relation).to receive(:reorder).with(arel_table['updated_at'].desc)
relation.order_by('updated_desc')
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index ae4db1c2158..924cc7169ea 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -279,6 +279,12 @@ describe Project do
end
end
+ it 'validates presence of project_feature' do
+ project = build(:project, project_feature: nil)
+
+ expect(project).not_to be_valid
+ end
+
describe 'import_url' do
it 'does not allow an invalid URI as import_url' do
project = build(:project, import_url: 'invalid://')
@@ -2694,16 +2700,44 @@ describe Project do
describe '#all_lfs_objects' do
let(:lfs_object) { create(:lfs_object) }
- before do
- project.lfs_objects << lfs_object
+ context 'when LFS object is only associated to the source' do
+ before do
+ project.lfs_objects << lfs_object
+ end
+
+ it 'returns the lfs object for a project' do
+ expect(project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
+
+ it 'returns the lfs object for a fork' do
+ expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
end
- it 'returns the lfs object for a project' do
- expect(project.all_lfs_objects).to contain_exactly(lfs_object)
+ context 'when LFS object is only associated to the fork' do
+ before do
+ forked_project.lfs_objects << lfs_object
+ end
+
+ it 'returns nothing' do
+ expect(project.all_lfs_objects).to be_empty
+ end
+
+ it 'returns the lfs object for a fork' do
+ expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
end
- it 'returns the lfs object for a fork' do
- expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ context 'when LFS object is associated to both source and fork' do
+ before do
+ project.lfs_objects << lfs_object
+ forked_project.lfs_objects << lfs_object
+ end
+
+ it 'returns the lfs object for the source and fork' do
+ expect(project.all_lfs_objects).to contain_exactly(lfs_object)
+ expect(forked_project.all_lfs_objects).to contain_exactly(lfs_object)
+ end
end
end
end
@@ -5519,6 +5553,31 @@ describe Project do
end
end
+ describe '#lfs_objects_oids' do
+ let(:project) { create(:project) }
+ let(:lfs_object) { create(:lfs_object) }
+ let(:another_lfs_object) { create(:lfs_object) }
+
+ subject { project.lfs_objects_oids }
+
+ context 'when project has associated LFS objects' do
+ before do
+ create(:lfs_objects_project, lfs_object: lfs_object, project: project)
+ create(:lfs_objects_project, lfs_object: another_lfs_object, project: project)
+ end
+
+ it 'returns OIDs of LFS objects' do
+ expect(subject).to match_array([lfs_object.oid, another_lfs_object.oid])
+ end
+ end
+
+ context 'when project has no associated LFS objects' do
+ it 'returns empty array' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
def rugged_config
rugged_repo(project.repository).config
end
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index f58bcbebd67..df2ed4911ec 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -97,23 +97,23 @@ describe Service do
end
end
- describe "Template" do
+ describe "Instance" do
let(:project) { create(:project) }
- describe '.build_from_template' do
- context 'when template is invalid' do
- it 'sets service template to inactive when template is invalid' do
- template = build(:prometheus_service, template: true, active: true, properties: {})
- template.save(validate: false)
+ describe '.build_from_instance' do
+ context 'when instance level integration is invalid' do
+ it 'sets instance level integration to inactive when instance is invalid' do
+ instance = build(:prometheus_service, instance: true, active: true, properties: {})
+ instance.save(validate: false)
- service = described_class.build_from_template(project.id, template)
+ service = described_class.build_from_instance(project.id, instance)
expect(service).to be_valid
expect(service.active).to be false
end
end
- describe 'build issue tracker from a template' do
+ describe 'build issue tracker from a instance level integration' do
let(:title) { 'custom title' }
let(:description) { 'custom description' }
let(:url) { 'http://jira.example.com' }
@@ -127,9 +127,9 @@ describe Service do
}
end
- shared_examples 'service creation from a template' do
+ shared_examples 'integration creation from instance level' do
it 'creates a correct service' do
- service = described_class.build_from_template(project.id, template)
+ service = described_class.build_from_instance(project.id, instance_level_integration)
expect(service).to be_active
expect(service.title).to eq(title)
@@ -144,38 +144,38 @@ describe Service do
# this will be removed as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
context 'when data are stored in properties' do
let(:properties) { data_params.merge(title: title, description: description) }
- let!(:template) do
- create(:jira_service, :without_properties_callback, template: true, properties: properties.merge(additional: 'something'))
+ let!(:instance_level_integration) do
+ create(:jira_service, :without_properties_callback, instance: true, properties: properties.merge(additional: 'something'))
end
- it_behaves_like 'service creation from a template'
+ it_behaves_like 'integration creation from instance level'
end
context 'when data are stored in separated fields' do
- let(:template) do
- create(:jira_service, data_params.merge(properties: {}, title: title, description: description, template: true))
+ let(:instance_level_integration) do
+ create(:jira_service, data_params.merge(properties: {}, title: title, description: description, instance: true))
end
- it_behaves_like 'service creation from a template'
+ it_behaves_like 'integration creation from instance level'
end
context 'when data are stored in both properties and separated fields' do
let(:properties) { data_params.merge(title: title, description: description) }
- let(:template) do
- create(:jira_service, :without_properties_callback, active: true, template: true, properties: properties).tap do |service|
+ let(:instance_level_integration) do
+ create(:jira_service, :without_properties_callback, active: true, instance: true, properties: properties).tap do |service|
create(:jira_tracker_data, data_params.merge(service: service))
end
end
- it_behaves_like 'service creation from a template'
+ it_behaves_like 'integration creation from instance level'
end
end
end
describe "for pushover service" do
- let!(:service_template) do
+ let!(:instance_level_integration) do
PushoverService.create(
- template: true,
+ instance: true,
properties: {
device: 'MyDevice',
sound: 'mic',
@@ -188,7 +188,7 @@ describe Service do
it "has all fields prefilled" do
service = project.find_or_initialize_service('pushover')
- expect(service.template).to eq(false)
+ expect(service.instance).to eq(false)
expect(service.device).to eq('MyDevice')
expect(service.sound).to eq('mic')
expect(service.priority).to eq(4)
@@ -391,14 +391,6 @@ describe Service do
end
end
- describe '.find_by_template' do
- let!(:service) { create(:service, template: true) }
-
- it 'returns service template' do
- expect(described_class.find_by_template).to eq(service)
- end
- end
-
describe '#api_field_names' do
let(:fake_service) do
Class.new(Service) do
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 26fa3803651..af191172d33 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -467,7 +467,7 @@ describe ProjectPresenter do
expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(
is_link: false,
- label: a_string_including('Kubernetes configured'),
+ label: a_string_including('Kubernetes'),
link: presenter.project_cluster_path(project, cluster)
)
end
@@ -480,7 +480,7 @@ describe ProjectPresenter do
expect(presenter.kubernetes_cluster_anchor_data).to have_attributes(
is_link: false,
- label: a_string_including('Kubernetes configured'),
+ label: a_string_including('Kubernetes'),
link: presenter.project_clusters_path(project)
)
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 245a8aa4905..427a361295c 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1492,7 +1492,7 @@ describe API::MergeRequests do
end
end
- context 'forked projects' do
+ context 'forked projects', :sidekiq_might_not_need_inline do
let!(:user2) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let!(:forked_project) { fork_project(project, user2, repository: true) }
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 0e02c0f001b..4e21c08ad5c 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -1193,8 +1193,8 @@ describe 'Git LFS API and storage' do
it_behaves_like 'LFS http 200 response'
- it 'LFS object is linked to the source project' do
- expect(lfs_object.projects.pluck(:id)).to include(upstream_project.id)
+ it 'LFS object is linked to the forked project' do
+ expect(lfs_object.projects.pluck(:id)).to include(project.id)
end
end
end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 3db1471bf3c..8490127058c 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -483,6 +483,14 @@ describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
expect(merge_request).to be_persisted
end
+ it 'calls MergeRequests::LinkLfsObjectsService#execute', :sidekiq_might_not_need_inline do
+ expect_next_instance_of(MergeRequests::LinkLfsObjectsService) do |service|
+ expect(service).to receive(:execute).with(instance_of(MergeRequest))
+ end
+
+ described_class.new(project, user, opts).execute
+ end
+
it 'does not create the merge request when the target project is archived' do
target_project.update!(archived: true)
diff --git a/spec/services/merge_requests/link_lfs_objects_service_spec.rb b/spec/services/merge_requests/link_lfs_objects_service_spec.rb
new file mode 100644
index 00000000000..f07cf13e4f2
--- /dev/null
+++ b/spec/services/merge_requests/link_lfs_objects_service_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequests::LinkLfsObjectsService, :sidekiq_inline do
+ include ProjectForksHelper
+ include RepoHelpers
+
+ let(:target_project) { create(:project, :public, :repository) }
+
+ let(:merge_request) do
+ create(
+ :merge_request,
+ target_project: target_project,
+ target_branch: 'lfs',
+ source_project: source_project,
+ source_branch: 'link-lfs-objects'
+ )
+ end
+
+ subject { described_class.new(target_project) }
+
+ shared_examples_for 'linking LFS objects' do
+ context 'when source project is the same as target project' do
+ let(:source_project) { target_project }
+
+ it 'does not call Projects::LfsPointers::LfsLinkService#execute' do
+ expect(Projects::LfsPointers::LfsLinkService).not_to receive(:new)
+
+ execute
+ end
+ end
+
+ context 'when source project is different from target project' do
+ let(:user) { create(:user) }
+ let(:source_project) { fork_project(target_project, user, namespace: user.namespace, repository: true) }
+
+ before do
+ create_branch(source_project, 'link-lfs-objects', 'lfs')
+ end
+
+ context 'and there are changes' do
+ before do
+ allow(source_project).to receive(:lfs_enabled?).and_return(true)
+ end
+
+ context 'and there are LFS objects added' do
+ before do
+ create_file_in_repo(source_project, 'link-lfs-objects', 'link-lfs-objects', 'one.lfs', 'One')
+ create_file_in_repo(source_project, 'link-lfs-objects', 'link-lfs-objects', 'two.lfs', 'Two')
+ end
+
+ it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of LFS objects in merge request' do
+ expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
+ expect(service).to receive(:execute).with(%w[
+ 8b12507783d5becacbf2ebe5b01a60024d8728a8f86dcc818bce699e8b3320bc
+ 94a72c074cfe574742c9e99e863322f73feff82981d065ff65a0308f44f19f62
+ ])
+ end
+
+ execute
+ end
+ end
+
+ context 'but there are no LFS objects added' do
+ before do
+ create_file_in_repo(source_project, 'link-lfs-objects', 'link-lfs-objects', 'one.txt', 'One')
+ end
+
+ it 'does not call Projects::LfsPointers::LfsLinkService#execute' do
+ expect(Projects::LfsPointers::LfsLinkService).not_to receive(:new)
+
+ execute
+ end
+ end
+ end
+
+ context 'and there are no changes' do
+ it 'does not call Projects::LfsPointers::LfsLinkService#execute' do
+ expect(Projects::LfsPointers::LfsLinkService).not_to receive(:new)
+
+ execute
+ end
+ end
+ end
+ end
+
+ context 'when no oldrev and newrev passed' do
+ let(:execute) { subject.execute(merge_request) }
+
+ it_behaves_like 'linking LFS objects'
+ end
+
+ context 'when oldrev and newrev are passed' do
+ let(:execute) { subject.execute(merge_request, oldrev: merge_request.diff_base_sha, newrev: merge_request.diff_head_sha) }
+
+ it_behaves_like 'linking LFS objects'
+ end
+
+ def create_branch(project, new_name, branch_name)
+ ::Branches::CreateService.new(project, user).execute(new_name, branch_name)
+ end
+end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 1ba216e8ff1..b67779a912d 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -384,6 +384,14 @@ describe MergeRequests::RefreshService do
end
context 'open fork merge request' do
+ it 'calls MergeRequests::LinkLfsObjectsService#execute' do
+ expect_next_instance_of(MergeRequests::LinkLfsObjectsService) do |svc|
+ expect(svc).to receive(:execute).with(@fork_merge_request, oldrev: @oldrev, newrev: @newrev)
+ end
+
+ refresh
+ end
+
it 'executes hooks with update action' do
refresh
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index bce3f72a287..24781ac86be 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -15,7 +15,7 @@ describe Projects::CreateService, '#execute' do
}
end
- it 'creates labels on Project creation if there are templates' do
+ it 'creates labels on Project creation if there are instance level services' do
Label.create(title: "bug", template: true)
project = create_project(user, opts)
@@ -90,7 +90,7 @@ describe Projects::CreateService, '#execute' do
end
it 'sets invalid service as inactive' do
- create(:service, type: 'JiraService', project: nil, template: true, active: true)
+ create(:service, type: 'JiraService', project: nil, instance: true, active: true)
project = create_project(user, opts)
service = project.services.first
@@ -336,22 +336,22 @@ describe Projects::CreateService, '#execute' do
end
end
- context 'when there is an active service template' do
+ context 'when there is an active instance level service' do
before do
- create(:service, project: nil, template: true, active: true)
+ create(:service, project: nil, instance: true, active: true)
end
- it 'creates a service from this template' do
+ it 'creates a service from instance level service' do
project = create_project(user, opts)
expect(project.services.count).to eq 1
end
end
- context 'when a bad service template is created' do
+ context 'when a bad instance level service is created' do
it 'sets service to be inactive' do
opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-foss'
- create(:service, type: 'DroneCiService', project: nil, template: true, active: true)
+ create(:service, type: 'DroneCiService', project: nil, instance: true, active: true)
project = create_project(user, opts)
service = project.services.first
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index e7b904fcd60..e14f1abf018 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -375,14 +375,6 @@ describe Projects::ForkService do
expect(fork_from_project.forks_count).to eq(1)
end
- it 'leaves no LFS objects dangling' do
- create(:lfs_objects_project, project: fork_to_project)
-
- expect { subject.execute(fork_to_project) }
- .to change { fork_to_project.lfs_objects_projects.count }
- .to(0)
- end
-
context 'if the fork is not allowed' do
let(:fork_from_project) { create(:project, :private) }
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index 970e82e7107..21a139cdf3c 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -48,10 +48,11 @@ describe Projects::LfsPointers::LfsDownloadService do
end
shared_examples 'lfs object is created' do
- it do
+ it 'creates and associate the LFS object to project' do
expect(subject).to receive(:download_and_save_file!).and_call_original
expect { subject.execute }.to change { LfsObject.count }.by(1)
+ expect(LfsObject.first.projects).to include(project)
end
it 'returns success result' do
diff --git a/spec/services/projects/propagate_service_template_spec.rb b/spec/services/projects/propagate_instance_level_service_spec.rb
index 2c3effec617..a842842a010 100644
--- a/spec/services/projects/propagate_service_template_spec.rb
+++ b/spec/services/projects/propagate_instance_level_service_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-describe Projects::PropagateServiceTemplate do
+describe Projects::PropagateInstanceLevelService do
describe '.propagate' do
- let!(:service_template) do
+ let!(:instance_level_integration) do
PushoverService.create(
- template: true,
+ instance: true,
active: true,
properties: {
device: 'MyDevice',
@@ -22,14 +22,14 @@ describe Projects::PropagateServiceTemplate do
it 'creates services for projects' do
expect(project.pushover_service).to be_nil
- described_class.propagate(service_template)
+ described_class.propagate(instance_level_integration)
expect(project.reload.pushover_service).to be_present
end
it 'creates services for a project that has another service' do
BambooService.create(
- template: true,
+ instance: true,
active: true,
project: project,
properties: {
@@ -42,14 +42,14 @@ describe Projects::PropagateServiceTemplate do
expect(project.pushover_service).to be_nil
- described_class.propagate(service_template)
+ described_class.propagate(instance_level_integration)
expect(project.reload.pushover_service).to be_present
end
it 'does not create the service if it exists already' do
other_service = BambooService.create(
- template: true,
+ instance: true,
active: true,
properties: {
bamboo_url: 'http://gitlab.com',
@@ -59,17 +59,17 @@ describe Projects::PropagateServiceTemplate do
}
)
- Service.build_from_template(project.id, service_template).save!
- Service.build_from_template(project.id, other_service).save!
+ Service.build_from_instance(project.id, instance_level_integration).save!
+ Service.build_from_instance(project.id, other_service).save!
- expect { described_class.propagate(service_template) }
+ expect { described_class.propagate(instance_level_integration) }
.not_to change { Service.count }
end
- it 'creates the service containing the template attributes' do
- described_class.propagate(service_template)
+ it 'creates the service containing the instance attributes' do
+ described_class.propagate(instance_level_integration)
- expect(project.pushover_service.properties).to eq(service_template.properties)
+ expect(project.pushover_service.properties).to eq(instance_level_integration.properties)
end
describe 'bulk update', :use_sql_query_cache do
@@ -80,7 +80,7 @@ describe Projects::PropagateServiceTemplate do
project_total.times { create(:project) }
- described_class.propagate(service_template)
+ described_class.propagate(instance_level_integration)
end
it 'creates services for all projects' do
@@ -90,18 +90,18 @@ describe Projects::PropagateServiceTemplate do
describe 'external tracker' do
it 'updates the project external tracker' do
- service_template.update!(category: 'issue_tracker', default: false)
+ instance_level_integration.update!(category: 'issue_tracker', default: false)
- expect { described_class.propagate(service_template) }
+ expect { described_class.propagate(instance_level_integration) }
.to change { project.reload.has_external_issue_tracker }.to(true)
end
end
describe 'external wiki' do
it 'updates the project external tracker' do
- service_template.update!(type: 'ExternalWikiService')
+ instance_level_integration.update!(type: 'ExternalWikiService')
- expect { described_class.propagate(service_template) }
+ expect { described_class.propagate(instance_level_integration) }
.to change { project.reload.has_external_wiki }.to(true)
end
end
diff --git a/spec/workers/propagate_instance_level_service_worker_spec.rb b/spec/workers/propagate_instance_level_service_worker_spec.rb
new file mode 100644
index 00000000000..6552b198181
--- /dev/null
+++ b/spec/workers/propagate_instance_level_service_worker_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe PropagateInstanceLevelServiceWorker do
+ include ExclusiveLeaseHelpers
+
+ describe '#perform' do
+ it 'calls the propagate service with the instance level service' do
+ instance_level_service = PushoverService.create(
+ instance: true,
+ active: true,
+ properties: {
+ device: 'MyDevice',
+ sound: 'mic',
+ priority: 4,
+ user_key: 'asdf',
+ api_key: '123456789'
+ })
+
+ stub_exclusive_lease("propagate_instance_level_service_worker:#{instance_level_service.id}",
+ timeout: PropagateInstanceLevelServiceWorker::LEASE_TIMEOUT)
+
+ expect(Projects::PropagateInstanceLevelService)
+ .to receive(:propagate)
+ .with(instance_level_service)
+
+ subject.perform(instance_level_service.id)
+ end
+ end
+end
diff --git a/spec/workers/propagate_service_template_worker_spec.rb b/spec/workers/propagate_service_template_worker_spec.rb
deleted file mode 100644
index fb4ced77832..00000000000
--- a/spec/workers/propagate_service_template_worker_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe PropagateServiceTemplateWorker do
- include ExclusiveLeaseHelpers
-
- describe '#perform' do
- it 'calls the propagate service with the template' do
- template = PushoverService.create(
- template: true,
- active: true,
- properties: {
- device: 'MyDevice',
- sound: 'mic',
- priority: 4,
- user_key: 'asdf',
- api_key: '123456789'
- })
-
- stub_exclusive_lease("propagate_service_template_worker:#{template.id}",
- timeout: PropagateServiceTemplateWorker::LEASE_TIMEOUT)
-
- expect(Projects::PropagateServiceTemplate)
- .to receive(:propagate)
- .with(template)
-
- subject.perform(template.id)
- end
- end
-end
diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb
index 26fd67adfaa..01104049404 100644
--- a/spec/workers/repository_fork_worker_spec.rb
+++ b/spec/workers/repository_fork_worker_spec.rb
@@ -72,13 +72,33 @@ describe RepositoryForkWorker do
perform!
end
- it "handles bad fork" do
- error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}"
+ it 'handles bad fork' do
+ error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Failed to create fork repository"
expect_fork_repository.and_return(false)
expect { perform! }.to raise_error(StandardError, error_message)
end
+
+ it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of source project LFS objects' do
+ expect_fork_repository.and_return(true)
+ expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
+ expect(service).to receive(:execute).with(project.lfs_objects_oids)
+ end
+
+ perform!
+ end
+
+ it "handles LFS objects link failure" do
+ error_message = "Unable to fork project #{forked_project.id} for repository #{project.disk_path} -> #{forked_project.disk_path}: Source project has too many LFS objects"
+
+ expect_fork_repository.and_return(true)
+ expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
+ expect(service).to receive(:execute).and_raise(Projects::LfsPointers::LfsLinkService::TooManyOidsError)
+ end
+
+ expect { perform! }.to raise_error(StandardError, error_message)
+ end
end
context 'only project ID passed' do