diff options
author | GitLab Bot <gitlab-bot@gitlab.com> | 2019-11-28 15:06:57 +0000 |
---|---|---|
committer | GitLab Bot <gitlab-bot@gitlab.com> | 2019-11-28 15:06:57 +0000 |
commit | 7cdd70dcec27402e89e65451b4b1feb75b5eb267 (patch) | |
tree | 1691c8e1afd469fa426ecf5bc127de8df16d4855 | |
parent | 79348faced5e7e62103ad27f6a6594dfdca463e2 (diff) | |
download | gitlab-ce-7cdd70dcec27402e89e65451b4b1feb75b5eb267.tar.gz |
Add latest changes from gitlab-org/gitlab@master
84 files changed, 1418 insertions, 686 deletions
diff --git a/.gitlab/ci/frontend.gitlab-ci.yml b/.gitlab/ci/frontend.gitlab-ci.yml index 0b72461a9fd..c015fdd027e 100644 --- a/.gitlab/ci/frontend.gitlab-ci.yml +++ b/.gitlab/ci/frontend.gitlab-ci.yml @@ -13,7 +13,7 @@ - .default-before_script - .assets-compile-cache - .only:changes-code-backstage-qa - image: registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.3-git-2.22-chrome-73.0-node-12.x-yarn-1.16-graphicsmagick-1.3.33-docker-18.06.1 + image: registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.3-git-2.22-chrome-73.0-node-12.x-yarn-1.16-graphicsmagick-1.3.33-docker-19.03.1 stage: test dependencies: ["setup-test-env"] needs: ["setup-test-env"] @@ -312,8 +312,7 @@ gem 'gettext', '~> 3.2.2', require: false, group: :development gem 'batch-loader', '~> 1.4.0' # Perf bar -# https://gitlab.com/gitlab-org/gitlab/issues/13996 -gem 'gitlab-peek', '~> 0.0.1', require: 'peek' +gem 'peek', '~> 1.1' # Snowplow events tracking gem 'snowplow-tracker', '~> 0.6.1' @@ -381,7 +380,7 @@ group :development, :test do gem 'knapsack', '~> 1.17' - gem 'stackprof', '~> 0.2.10', require: false + gem 'stackprof', '~> 0.2.13', require: false gem 'simple_po_parser', '~> 1.1.2', require: false diff --git a/Gemfile.lock b/Gemfile.lock index 0f9b78cd8e9..2610ac83b1d 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -373,8 +373,6 @@ GEM gitlab-license (1.0.0) gitlab-markup (1.7.0) gitlab-net-dns (0.9.1) - gitlab-peek (0.0.1) - railties (>= 4.0.0) gitlab-sidekiq-fetcher (0.5.2) sidekiq (~> 5) gitlab-styles (2.8.0) @@ -724,6 +722,8 @@ GEM parser (2.6.3.0) ast (~> 2.4.0) parslet (1.8.2) + peek (1.1.0) + railties (>= 4.0.0) pg (1.1.4) po_to_json (1.0.1) json (>= 1.6.0) @@ -1003,7 +1003,7 @@ GEM sprockets (>= 3.0.0) sqlite3 (1.3.13) sshkey (2.0.0) - stackprof (0.2.10) + stackprof (0.2.13) state_machines (0.5.0) state_machines-activemodel (0.7.1) activemodel (>= 4.1) @@ -1201,7 +1201,6 @@ DEPENDENCIES gitlab-license (~> 1.0) gitlab-markup (~> 1.7.0) gitlab-net-dns (~> 0.9.1) - gitlab-peek (~> 0.0.1) gitlab-sidekiq-fetcher (= 0.5.2) gitlab-styles (~> 2.7) gitlab_chronic_duration (~> 0.10.6.2) @@ -1276,6 +1275,7 @@ DEPENDENCIES omniauth_crowd (~> 2.2.0) omniauth_openid_connect (~> 0.3.3) org-ruby (~> 0.9.12) + peek (~> 1.1) pg (~> 1.1) premailer-rails (~> 1.10.3) prometheus-client-mmap (~> 0.9.10) @@ -1340,7 +1340,7 @@ DEPENDENCIES spring-commands-rspec (~> 1.0.4) sprockets (~> 3.7.0) sshkey (~> 2.0) - stackprof (~> 0.2.10) + stackprof (~> 0.2.13) state_machines-activerecord (~> 0.6.0) sys-filesystem (~> 1.1.6) test-prof (~> 0.10.0) diff --git a/app/assets/javascripts/monitoring/components/charts/anomaly.vue b/app/assets/javascripts/monitoring/components/charts/anomaly.vue index b8767f48c5f..1df7ca37a98 100644 --- a/app/assets/javascripts/monitoring/components/charts/anomaly.vue +++ b/app/assets/javascripts/monitoring/components/charts/anomaly.vue @@ -29,7 +29,7 @@ const AREA_COLOR_RGBA = `rgba(${hexToRgb(AREA_COLOR).join(',')},${AREA_OPACITY}) * time series chart, the boundary band shows the normal * range of values the metric should take. * - * This component accepts 3 queries, which contain the + * This component accepts 3 metrics, which contain the * "metric", "upper" limit and "lower" limit. * * The upper and lower series are "stacked areas" visually @@ -62,10 +62,10 @@ export default { }, computed: { series() { - return this.graphData.queries.map(query => { - const values = query.result[0] ? query.result[0].values : []; + return this.graphData.metrics.map(metric => { + const values = metric.result && metric.result[0] ? metric.result[0].values : []; return { - label: query.label, + label: metric.label, // NaN values may disrupt avg., max. & min. calculations in the legend, filter them out data: values.filter(([, value]) => !Number.isNaN(value)), }; @@ -83,7 +83,7 @@ export default { return min < 0 ? -min : 0; }, metricData() { - const originalMetricQuery = this.graphData.queries[0]; + const originalMetricQuery = this.graphData.metrics[0]; const metricQuery = { ...originalMetricQuery }; metricQuery.result[0].values = metricQuery.result[0].values.map(([x, y]) => [ @@ -93,7 +93,7 @@ export default { return { ...this.graphData, type: 'line-chart', - queries: [metricQuery], + metrics: [metricQuery], }; }, metricSeriesConfig() { diff --git a/app/assets/javascripts/monitoring/components/charts/column.vue b/app/assets/javascripts/monitoring/components/charts/column.vue index ee6aaeb7dde..eb407ad1d7f 100644 --- a/app/assets/javascripts/monitoring/components/charts/column.vue +++ b/app/assets/javascripts/monitoring/components/charts/column.vue @@ -32,8 +32,8 @@ export default { }, computed: { chartData() { - const queryData = this.graphData.queries.reduce((acc, query) => { - const series = makeDataSeries(query.result, { + const queryData = this.graphData.metrics.reduce((acc, query) => { + const series = makeDataSeries(query.result || [], { name: this.formatLegendLabel(query), }); @@ -45,13 +45,13 @@ export default { }; }, xAxisTitle() { - return this.graphData.queries[0].result[0].x_label !== undefined - ? this.graphData.queries[0].result[0].x_label + return this.graphData.metrics[0].result[0].x_label !== undefined + ? this.graphData.metrics[0].result[0].x_label : ''; }, yAxisTitle() { - return this.graphData.queries[0].result[0].y_label !== undefined - ? this.graphData.queries[0].result[0].y_label + return this.graphData.metrics[0].result[0].y_label !== undefined + ? this.graphData.metrics[0].result[0].y_label : ''; }, xAxisType() { diff --git a/app/assets/javascripts/monitoring/components/charts/heatmap.vue b/app/assets/javascripts/monitoring/components/charts/heatmap.vue index b8158247e49..6ab5aaeba1d 100644 --- a/app/assets/javascripts/monitoring/components/charts/heatmap.vue +++ b/app/assets/javascripts/monitoring/components/charts/heatmap.vue @@ -24,7 +24,7 @@ export default { }, computed: { chartData() { - return this.queries.result.reduce( + return this.metrics.result.reduce( (acc, result, i) => [...acc, ...result.values.map((value, j) => [i, j, value[1]])], [], ); @@ -36,7 +36,7 @@ export default { return this.graphData.y_label || ''; }, xAxisLabels() { - return this.queries.result.map(res => Object.values(res.metric)[0]); + return this.metrics.result.map(res => Object.values(res.metric)[0]); }, yAxisLabels() { return this.result.values.map(val => { @@ -46,10 +46,10 @@ export default { }); }, result() { - return this.queries.result[0]; + return this.metrics.result[0]; }, - queries() { - return this.graphData.queries[0]; + metrics() { + return this.graphData.metrics[0]; }, }, }; diff --git a/app/assets/javascripts/monitoring/components/charts/single_stat.vue b/app/assets/javascripts/monitoring/components/charts/single_stat.vue index 076682820e6..e75ddb05808 100644 --- a/app/assets/javascripts/monitoring/components/charts/single_stat.vue +++ b/app/assets/javascripts/monitoring/components/charts/single_stat.vue @@ -17,7 +17,7 @@ export default { }, computed: { queryInfo() { - return this.graphData.queries[0]; + return this.graphData.metrics[0]; }, engineeringNotation() { return `${roundOffFloat(this.queryInfo.result[0].value[1], 1)}${this.queryInfo.unit}`; diff --git a/app/assets/javascripts/monitoring/components/charts/time_series.vue b/app/assets/javascripts/monitoring/components/charts/time_series.vue index 067a727fe16..ed01d0ee553 100644 --- a/app/assets/javascripts/monitoring/components/charts/time_series.vue +++ b/app/assets/javascripts/monitoring/components/charts/time_series.vue @@ -105,7 +105,7 @@ export default { // Transforms & supplements query data to render appropriate labels & styles // Input: [{ queryAttributes1 }, { queryAttributes2 }] // Output: [{ seriesAttributes1 }, { seriesAttributes2 }] - return this.graphData.queries.reduce((acc, query) => { + return this.graphData.metrics.reduce((acc, query) => { const { appearance } = query; const lineType = appearance && appearance.line && appearance.line.type @@ -121,7 +121,7 @@ export default { ? appearance.area.opacity : undefined, }; - const series = makeDataSeries(query.result, { + const series = makeDataSeries(query.result || [], { name: this.formatLegendLabel(query), lineStyle: { type: lineType, diff --git a/app/assets/javascripts/monitoring/components/dashboard.vue b/app/assets/javascripts/monitoring/components/dashboard.vue index 91aa5cf0e7b..823ea9e1641 100644 --- a/app/assets/javascripts/monitoring/components/dashboard.vue +++ b/app/assets/javascripts/monitoring/components/dashboard.vue @@ -250,14 +250,9 @@ export default { 'setEndpoints', 'setPanelGroupMetrics', ]), - chartsWithData(charts) { - return charts.filter(chart => - chart.metrics.some(metric => this.metricsWithData.includes(metric.metric_id)), - ); - }, - updateMetrics(key, metrics) { + updateMetrics(key, panels) { this.setPanelGroupMetrics({ - metrics, + panels, key, }); }, @@ -292,8 +287,13 @@ export default { submitCustomMetricsForm() { this.$refs.customMetricsForm.submit(); }, + chartsWithData(panels) { + return panels.filter(panel => + panel.metrics.some(metric => this.metricsWithData.includes(metric.metric_id)), + ); + }, groupHasData(group) { - return this.chartsWithData(group.metrics).length > 0; + return this.chartsWithData(group.panels).length > 0; }, onDateTimePickerApply(timeWindowUrlParams) { return redirectTo(mergeUrlParams(timeWindowUrlParams, window.location.href)); @@ -453,14 +453,14 @@ export default { :collapse-group="groupHasData(groupData)" > <vue-draggable - :value="groupData.metrics" + :value="groupData.panels" group="metrics-dashboard" :component-data="{ attrs: { class: 'row mx-0 w-100' } }" :disabled="!isRearrangingPanels" @input="updateMetrics(groupData.key, $event)" > <div - v-for="(graphData, graphIndex) in groupData.metrics" + v-for="(graphData, graphIndex) in groupData.panels" :key="`panel-type-${graphIndex}`" class="col-12 col-lg-6 px-2 mb-2 draggable" :class="{ 'draggable-enabled': isRearrangingPanels }" @@ -469,7 +469,7 @@ export default { <div v-if="isRearrangingPanels" class="draggable-remove js-draggable-remove p-2 w-100 position-absolute d-flex justify-content-end" - @click="removeGraph(groupData.metrics, graphIndex)" + @click="removeGraph(groupData.panels, graphIndex)" > <a class="mx-2 p-2 draggable-remove-link" :aria-label="__('Remove')" ><icon name="close" diff --git a/app/assets/javascripts/monitoring/components/embed.vue b/app/assets/javascripts/monitoring/components/embed.vue index f75839c7c6b..581b2093d44 100644 --- a/app/assets/javascripts/monitoring/components/embed.vue +++ b/app/assets/javascripts/monitoring/components/embed.vue @@ -37,11 +37,14 @@ export default { computed: { ...mapState('monitoringDashboard', ['dashboard', 'metricsWithData']), charts() { + if (!this.dashboard || !this.dashboard.panel_groups) { + return []; + } const groupWithMetrics = this.dashboard.panel_groups.find(group => - group.metrics.find(chart => this.chartHasData(chart)), - ) || { metrics: [] }; + group.panels.find(chart => this.chartHasData(chart)), + ) || { panels: [] }; - return groupWithMetrics.metrics.filter(chart => this.chartHasData(chart)); + return groupWithMetrics.panels.filter(chart => this.chartHasData(chart)); }, isSingleChart() { return this.charts.length === 1; diff --git a/app/assets/javascripts/monitoring/components/panel_type.vue b/app/assets/javascripts/monitoring/components/panel_type.vue index effcf334cbc..fab1dd0f981 100644 --- a/app/assets/javascripts/monitoring/components/panel_type.vue +++ b/app/assets/javascripts/monitoring/components/panel_type.vue @@ -54,10 +54,14 @@ export default { return IS_EE && this.prometheusAlertsAvailable && this.alertsEndpoint && this.graphData; }, graphDataHasMetrics() { - return this.graphData.queries[0].result.length > 0; + return ( + this.graphData.metrics && + this.graphData.metrics[0].result && + this.graphData.metrics[0].result.length > 0 + ); }, csvText() { - const chartData = this.graphData.queries[0].result[0].values; + const chartData = this.graphData.metrics[0].result[0].values; const yLabel = this.graphData.y_label; const header = `timestamp,${yLabel}\r\n`; // eslint-disable-line @gitlab/i18n/no-non-i18n-strings return chartData.reduce((csv, data) => { @@ -112,7 +116,7 @@ export default { :graph-data="graphData" :deployment-data="deploymentData" :project-path="projectPath" - :thresholds="getGraphAlertValues(graphData.queries)" + :thresholds="getGraphAlertValues(graphData.metrics)" group-id="panel-type-chart" > <div class="d-flex align-items-center"> @@ -120,8 +124,8 @@ export default { v-if="alertWidgetAvailable && graphData" :modal-id="`alert-modal-${index}`" :alerts-endpoint="alertsEndpoint" - :relevant-queries="graphData.queries" - :alerts-to-manage="getGraphAlerts(graphData.queries)" + :relevant-queries="graphData.metrics" + :alerts-to-manage="getGraphAlerts(graphData.metrics)" @setAlerts="setAlerts" /> <gl-dropdown diff --git a/app/assets/javascripts/monitoring/stores/mutations.js b/app/assets/javascripts/monitoring/stores/mutations.js index 696af5aed75..bfa76aa7cea 100644 --- a/app/assets/javascripts/monitoring/stores/mutations.js +++ b/app/assets/javascripts/monitoring/stores/mutations.js @@ -1,9 +1,13 @@ import Vue from 'vue'; import { slugify } from '~/lib/utils/text_utility'; import * as types from './mutation_types'; -import { normalizeMetrics, normalizeMetric, normalizeQueryResult } from './utils'; +import { normalizeMetric, normalizeQueryResult } from './utils'; -const normalizePanel = panel => panel.metrics.map(normalizeMetric); +const normalizePanelMetrics = (metrics, defaultLabel) => + metrics.map(metric => ({ + ...normalizeMetric(metric), + label: metric.label || defaultLabel, + })); export default { [types.REQUEST_METRICS_DATA](state) { @@ -13,28 +17,18 @@ export default { [types.RECEIVE_METRICS_DATA_SUCCESS](state, groupData) { state.dashboard.panel_groups = groupData.map((group, i) => { const key = `${slugify(group.group || 'default')}-${i}`; - let { metrics = [], panels = [] } = group; + let { panels = [] } = group; // each panel has metric information that needs to be normalized - panels = panels.map(panel => ({ ...panel, - metrics: normalizePanel(panel), - })); - - // for backwards compatibility, and to limit Vue template changes: - // for each group alias panels to metrics - // for each panel alias metrics to queries - metrics = panels.map(panel => ({ - ...panel, - queries: panel.metrics, + metrics: normalizePanelMetrics(panel.metrics, panel.y_label), })); return { ...group, panels, key, - metrics: normalizeMetrics(metrics), }; }); @@ -58,6 +52,7 @@ export default { [types.RECEIVE_ENVIRONMENTS_DATA_FAILURE](state) { state.environments = []; }, + [types.SET_QUERY_RESULT](state, { metricId, result }) { if (!metricId || !result || result.length === 0) { return; @@ -65,14 +60,17 @@ export default { state.showEmptyState = false; + /** + * Search the dashboard state for a matching id + */ state.dashboard.panel_groups.forEach(group => { - group.metrics.forEach(metric => { - metric.queries.forEach(query => { - if (query.metric_id === metricId) { + group.panels.forEach(panel => { + panel.metrics.forEach(metric => { + if (metric.metric_id === metricId) { state.metricsWithData.push(metricId); // ensure dates/numbers are correctly formatted for charts const normalizedResults = result.map(normalizeQueryResult); - Vue.set(query, 'result', Object.freeze(normalizedResults)); + Vue.set(metric, 'result', Object.freeze(normalizedResults)); } }); }); @@ -101,6 +99,6 @@ export default { }, [types.SET_PANEL_GROUP_METRICS](state, payload) { const panelGroup = state.dashboard.panel_groups.find(pg => payload.key === pg.key); - panelGroup.metrics = payload.metrics; + panelGroup.panels = payload.panels; }, }; diff --git a/app/assets/javascripts/monitoring/stores/utils.js b/app/assets/javascripts/monitoring/stores/utils.js index 8a396b15a31..3300d2032d0 100644 --- a/app/assets/javascripts/monitoring/stores/utils.js +++ b/app/assets/javascripts/monitoring/stores/utils.js @@ -1,83 +1,21 @@ import _ from 'underscore'; -function checkQueryEmptyData(query) { - return { - ...query, - result: query.result.filter(timeSeries => { - const newTimeSeries = timeSeries; - const hasValue = series => - !Number.isNaN(series[1]) && (series[1] !== null || series[1] !== undefined); - const hasNonNullValue = timeSeries.values.find(hasValue); - - newTimeSeries.values = hasNonNullValue ? newTimeSeries.values : []; - - return newTimeSeries.values.length > 0; - }), - }; -} - -function removeTimeSeriesNoData(queries) { - return queries.reduce((series, query) => series.concat(checkQueryEmptyData(query)), []); -} - -// Metrics and queries are currently stored 1:1, so `queries` is an array of length one. -// We want to group queries onto a single chart by title & y-axis label. -// This function will no longer be required when metrics:queries are 1:many, -// though there is no consequence if the function stays in use. -// @param metrics [Array<Object>] -// Ex) [ -// { id: 1, title: 'title', y_label: 'MB', queries: [{ ...query1Attrs }] }, -// { id: 2, title: 'title', y_label: 'MB', queries: [{ ...query2Attrs }] }, -// { id: 3, title: 'new title', y_label: 'MB', queries: [{ ...query3Attrs }] } -// ] -// @return [Array<Object>] -// Ex) [ -// { title: 'title', y_label: 'MB', queries: [{ metricId: 1, ...query1Attrs }, -// { metricId: 2, ...query2Attrs }] }, -// { title: 'new title', y_label: 'MB', queries: [{ metricId: 3, ...query3Attrs }]} -// ] -export function groupQueriesByChartInfo(metrics) { - const metricsByChart = metrics.reduce((accumulator, metric) => { - const { queries, ...chart } = metric; - - const chartKey = `${chart.title}|${chart.y_label}`; - accumulator[chartKey] = accumulator[chartKey] || { ...chart, queries: [] }; - - queries.forEach(queryAttrs => { - let metricId; - - if (chart.id) { - metricId = chart.id.toString(); - } else if (queryAttrs.metric_id) { - metricId = queryAttrs.metric_id.toString(); - } else { - metricId = null; - } - - accumulator[chartKey].queries.push({ metricId, ...queryAttrs }); - }); - - return accumulator; - }, {}); - - return Object.values(metricsByChart); -} - export const uniqMetricsId = metric => `${metric.metric_id}_${metric.id}`; /** - * Not to confuse with normalizeMetrics (plural) * Metrics loaded from project-defined dashboards do not have a metric_id. * This method creates a unique ID combining metric_id and id, if either is present. * This is hopefully a temporary solution until BE processes metrics before passing to fE * @param {Object} metric - metric * @returns {Object} - normalized metric with a uniqueID */ + export const normalizeMetric = (metric = {}) => _.omit( { ...metric, metric_id: uniqMetricsId(metric), + metricId: uniqMetricsId(metric), }, 'id', ); @@ -93,6 +31,11 @@ export const normalizeQueryResult = timeSeries => { Number(value), ]), }; + // Check result for empty data + normalizedResult.values = normalizedResult.values.filter(series => { + const hasValue = d => !Number.isNaN(d[1]) && (d[1] !== null || d[1] !== undefined); + return series.find(hasValue); + }); } else if (timeSeries.value) { normalizedResult = { ...timeSeries, @@ -102,21 +45,3 @@ export const normalizeQueryResult = timeSeries => { return normalizedResult; }; - -export const normalizeMetrics = metrics => { - const groupedMetrics = groupQueriesByChartInfo(metrics); - - return groupedMetrics.map(metric => { - const queries = metric.queries.map(query => ({ - ...query, - // custom metrics do not require a label, so we should ensure this attribute is defined - label: query.label || metric.y_label, - result: (query.result || []).map(normalizeQueryResult), - })); - - return { - ...metric, - queries: removeTimeSeriesNoData(queries), - }; - }); -}; diff --git a/app/assets/javascripts/monitoring/utils.js b/app/assets/javascripts/monitoring/utils.js index 2ae1647011d..336f81aeeff 100644 --- a/app/assets/javascripts/monitoring/utils.js +++ b/app/assets/javascripts/monitoring/utils.js @@ -72,10 +72,9 @@ export const ISODateToString = date => dateformat(date, dateFormats.dateTimePick */ export const graphDataValidatorForValues = (isValues, graphData) => { const responseValueKeyName = isValues ? 'value' : 'values'; - return ( - Array.isArray(graphData.queries) && - graphData.queries.filter(query => { + Array.isArray(graphData.metrics) && + graphData.metrics.filter(query => { if (Array.isArray(query.result)) { return ( query.result.filter(res => Array.isArray(res[responseValueKeyName])).length === @@ -83,7 +82,7 @@ export const graphDataValidatorForValues = (isValues, graphData) => { ); } return false; - }).length === graphData.queries.length + }).length === graphData.metrics.filter(query => query.result).length ); }; @@ -131,7 +130,7 @@ export const downloadCSVOptions = title => { }; /** - * This function validates the graph data contains exactly 3 queries plus + * This function validates the graph data contains exactly 3 metrics plus * value validations from graphDataValidatorForValues. * @param {Object} isValues * @param {Object} graphData the graph data response from a prometheus request @@ -140,8 +139,8 @@ export const downloadCSVOptions = title => { export const graphDataValidatorForAnomalyValues = graphData => { const anomalySeriesCount = 3; // metric, upper, lower return ( - graphData.queries && - graphData.queries.length === anomalySeriesCount && + graphData.metrics && + graphData.metrics.length === anomalySeriesCount && graphDataValidatorForValues(false, graphData) ); }; diff --git a/app/finders/snippets_finder.rb b/app/finders/snippets_finder.rb index bd6b6190fb5..5819f279eaa 100644 --- a/app/finders/snippets_finder.rb +++ b/app/finders/snippets_finder.rb @@ -40,15 +40,14 @@ # Any other value will be ignored. class SnippetsFinder < UnionFinder include FinderMethods + include Gitlab::Utils::StrongMemoize - attr_accessor :current_user, :project, :author, :scope, :explore + attr_accessor :current_user, :params + delegate :explore, :only_personal, :only_project, :scope, to: :params def initialize(current_user = nil, params = {}) @current_user = current_user - @project = params[:project] - @author = params[:author] - @scope = params[:scope].to_s - @explore = params[:explore] + @params = OpenStruct.new(params) if project && author raise( @@ -60,8 +59,15 @@ class SnippetsFinder < UnionFinder end def execute - base = init_collection - base.with_optional_visibility(visibility_from_scope).fresh + # The snippet query can be expensive, therefore if the + # author or project params have been passed and they don't + # exist, it's better to return + return Snippet.none if author.nil? && params[:author].present? + return Snippet.none if project.nil? && params[:project].present? + + items = init_collection + items = by_ids(items) + items.with_optional_visibility(visibility_from_scope).fresh end private @@ -69,10 +75,12 @@ class SnippetsFinder < UnionFinder def init_collection if explore snippets_for_explore + elsif only_personal + personal_snippets elsif project snippets_for_a_single_project else - snippets_for_multiple_projects + snippets_for_personal_and_multiple_projects end end @@ -96,8 +104,9 @@ class SnippetsFinder < UnionFinder # # Each collection is constructed in isolation, allowing for greater control # over the resulting SQL query. - def snippets_for_multiple_projects - queries = [personal_snippets] + def snippets_for_personal_and_multiple_projects + queries = [] + queries << personal_snippets unless only_project if Ability.allowed?(current_user, :read_cross_project) queries << snippets_of_visible_projects @@ -158,7 +167,7 @@ class SnippetsFinder < UnionFinder end def visibility_from_scope - case scope + case scope.to_s when 'are_private' Snippet::PRIVATE when 'are_internal' @@ -169,6 +178,28 @@ class SnippetsFinder < UnionFinder nil end end + + def by_ids(items) + return items unless params[:ids].present? + + items.id_in(params[:ids]) + end + + def author + strong_memoize(:author) do + next unless params[:author].present? + + params[:author].is_a?(User) ? params[:author] : User.find_by_id(params[:author]) + end + end + + def project + strong_memoize(:project) do + next unless params[:project].present? + + params[:project].is_a?(Project) ? params[:project] : Project.find_by_id(params[:project]) + end + end end SnippetsFinder.prepend_if_ee('EE::SnippetsFinder') diff --git a/app/helpers/blob_helper.rb b/app/helpers/blob_helper.rb index 912f0b61978..659f9778892 100644 --- a/app/helpers/blob_helper.rb +++ b/app/helpers/blob_helper.rb @@ -141,7 +141,7 @@ module BlobHelper if @build && @entry raw_project_job_artifacts_url(@project, @build, path: @entry.path, **kwargs) elsif @snippet - reliable_raw_snippet_url(@snippet) + raw_snippet_url(@snippet) elsif @blob project_raw_url(@project, @id, **kwargs) end diff --git a/app/helpers/gitlab_routing_helper.rb b/app/helpers/gitlab_routing_helper.rb index 404ea7b00d4..38ca12e6f90 100644 --- a/app/helpers/gitlab_routing_helper.rb +++ b/app/helpers/gitlab_routing_helper.rb @@ -193,6 +193,101 @@ module GitlabRoutingHelper project = schedule.project take_ownership_project_pipeline_schedule_path(project, schedule, *args) end + + def snippet_path(snippet, *args) + if snippet.is_a?(ProjectSnippet) + application_url_helpers.project_snippet_path(snippet.project, snippet, *args) + else + new_args = snippet_query_params(snippet, *args) + application_url_helpers.snippet_path(snippet, *new_args) + end + end + + def snippet_url(snippet, *args) + if snippet.is_a?(ProjectSnippet) + application_url_helpers.project_snippet_url(snippet.project, snippet, *args) + else + new_args = snippet_query_params(snippet, *args) + application_url_helpers.snippet_url(snippet, *new_args) + end + end + + def raw_snippet_path(snippet, *args) + if snippet.is_a?(ProjectSnippet) + application_url_helpers.raw_project_snippet_path(snippet.project, snippet, *args) + else + new_args = snippet_query_params(snippet, *args) + application_url_helpers.raw_snippet_path(snippet, *new_args) + end + end + + def raw_snippet_url(snippet, *args) + if snippet.is_a?(ProjectSnippet) + application_url_helpers.raw_project_snippet_url(snippet.project, snippet, *args) + else + new_args = snippet_query_params(snippet, *args) + application_url_helpers.raw_snippet_url(snippet, *new_args) + end + end + + def snippet_notes_path(snippet, *args) + new_args = snippet_query_params(snippet, *args) + application_url_helpers.snippet_notes_path(snippet, *new_args) + end + + def snippet_notes_url(snippet, *args) + new_args = snippet_query_params(snippet, *args) + application_url_helpers.snippet_notes_url(snippet, *new_args) + end + + def snippet_note_path(snippet, note, *args) + new_args = snippet_query_params(snippet, *args) + application_url_helpers.snippet_note_path(snippet, note, *new_args) + end + + def snippet_note_url(snippet, note, *args) + new_args = snippet_query_params(snippet, *args) + application_url_helpers.snippet_note_url(snippet, note, *new_args) + end + + def toggle_award_emoji_snippet_note_path(snippet, note, *args) + new_args = snippet_query_params(snippet, *args) + application_url_helpers.toggle_award_emoji_snippet_note_path(snippet, note, *new_args) + end + + def toggle_award_emoji_snippet_note_url(snippet, note, *args) + new_args = snippet_query_params(snippet, *args) + application_url_helpers.toggle_award_emoji_snippet_note_url(snippet, note, *new_args) + end + + def toggle_award_emoji_snippet_path(snippet, *args) + new_args = snippet_query_params(snippet, *args) + application_url_helpers.toggle_award_emoji_snippet_path(snippet, *new_args) + end + + def toggle_award_emoji_snippet_url(snippet, *args) + new_args = snippet_query_params(snippet, *args) + application_url_helpers.toggle_award_emoji_snippet_url(snippet, *new_args) + end + + private + + def application_url_helpers + Gitlab::Routing.url_helpers + end + + def snippet_query_params(snippet, *args) + opts = case args.last + when Hash + args.pop + when ActionController::Parameters + args.pop.to_h + else + {} + end + + args << opts + end end GitlabRoutingHelper.include_if_ee('EE::GitlabRoutingHelper') diff --git a/app/helpers/snippets_helper.rb b/app/helpers/snippets_helper.rb index 10e31fb8888..7d7d8646c25 100644 --- a/app/helpers/snippets_helper.rb +++ b/app/helpers/snippets_helper.rb @@ -11,33 +11,9 @@ module SnippetsHelper end end - def reliable_snippet_path(snippet, opts = {}) - reliable_snippet_url(snippet, opts.merge(only_path: true)) - end - - def reliable_raw_snippet_path(snippet, opts = {}) - reliable_raw_snippet_url(snippet, opts.merge(only_path: true)) - end - - def reliable_snippet_url(snippet, opts = {}) - if snippet.project_id? - project_snippet_url(snippet.project, snippet, nil, opts) - else - snippet_url(snippet, nil, opts) - end - end - - def reliable_raw_snippet_url(snippet, opts = {}) - if snippet.project_id? - raw_project_snippet_url(snippet.project, snippet, nil, opts) - else - raw_snippet_url(snippet, nil, opts) - end - end - def download_raw_snippet_button(snippet) link_to(icon('download'), - reliable_raw_snippet_path(snippet, inline: false), + raw_snippet_path(snippet, inline: false), target: '_blank', rel: 'noopener noreferrer', class: "btn btn-sm has-tooltip", @@ -133,7 +109,7 @@ module SnippetsHelper end def snippet_embed_tag(snippet) - content_tag(:script, nil, src: reliable_snippet_url(snippet, format: :js, only_path: false)) + content_tag(:script, nil, src: snippet_url(snippet, format: :js)) end def snippet_badge(snippet) @@ -158,7 +134,7 @@ module SnippetsHelper return if blob.empty? || blob.binary? || blob.stored_externally? link_to(external_snippet_icon('doc-code'), - reliable_raw_snippet_url(@snippet), + raw_snippet_url(@snippet), class: 'btn', target: '_blank', rel: 'noopener noreferrer', @@ -167,7 +143,7 @@ module SnippetsHelper def embedded_snippet_download_button link_to(external_snippet_icon('download'), - reliable_raw_snippet_url(@snippet, inline: false), + raw_snippet_url(@snippet, inline: false), class: 'btn', target: '_blank', title: 'Download', diff --git a/app/models/commit.rb b/app/models/commit.rb index aae49c36899..95993089426 100644 --- a/app/models/commit.rb +++ b/app/models/commit.rb @@ -464,8 +464,20 @@ class Commit "commit:#{sha}" end + def expire_note_etag_cache + super + + expire_note_etag_cache_for_related_mrs + end + private + def expire_note_etag_cache_for_related_mrs + MergeRequest.includes(target_project: :namespace).by_commit_sha(id).find_each do |mr| + mr.expire_note_etag_cache + end + end + def commit_reference(from, referable_commit_id, full: false) reference = project.to_reference(from, full: full) diff --git a/app/models/import_failure.rb b/app/models/import_failure.rb new file mode 100644 index 00000000000..998572853d3 --- /dev/null +++ b/app/models/import_failure.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +class ImportFailure < ApplicationRecord + belongs_to :project + + validates :project, presence: true +end diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb index 16d36d42eac..eb98bf3da7f 100644 --- a/app/models/merge_request.rb +++ b/app/models/merge_request.rb @@ -199,6 +199,9 @@ class MergeRequest < ApplicationRecord scope :by_milestone, ->(milestone) { where(milestone_id: milestone) } scope :of_projects, ->(ids) { where(target_project_id: ids) } scope :from_project, ->(project) { where(source_project_id: project.id) } + scope :from_and_to_forks, ->(project) do + where('source_project_id <> target_project_id AND (source_project_id = ? OR target_project_id = ?)', project.id, project.id) + end scope :merged, -> { with_state(:merged) } scope :closed_and_merged, -> { with_states(:closed, :merged) } scope :open_and_closed, -> { with_states(:opened, :closed) } @@ -1468,6 +1471,10 @@ class MergeRequest < ApplicationRecord all_pipelines.for_sha_or_source_sha(diff_head_sha).first end + def etag_caching_enabled? + true + end + private def with_rebase_lock diff --git a/app/models/project.rb b/app/models/project.rb index 4cfbb963ddf..26392964ced 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -179,6 +179,7 @@ class Project < ApplicationRecord has_one :forked_from_project, through: :fork_network_member has_many :forked_to_members, class_name: 'ForkNetworkMember', foreign_key: 'forked_from_project_id' has_many :forks, through: :forked_to_members, source: :project, inverse_of: :forked_from_project + has_many :fork_network_projects, through: :fork_network, source: :projects has_one :import_state, autosave: true, class_name: 'ProjectImportState', inverse_of: :project has_one :import_export_upload, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent @@ -295,6 +296,8 @@ class Project < ApplicationRecord has_one :pages_metadatum, class_name: 'ProjectPagesMetadatum', inverse_of: :project + has_many :import_failures, inverse_of: :project + accepts_nested_attributes_for :variables, allow_destroy: true accepts_nested_attributes_for :project_feature, update_only: true accepts_nested_attributes_for :import_data @@ -715,6 +718,10 @@ class Project < ApplicationRecord Feature.enabled?(:project_daily_statistics, self, default_enabled: true) end + def unlink_forks_upon_visibility_decrease_enabled? + Feature.enabled?(:unlink_fork_network_upon_visibility_decrease, self) + end + def empty_repo? repository.empty? end @@ -1533,6 +1540,7 @@ class Project < ApplicationRecord # update visibility_level of forks def update_forks_visibility_level + return if unlink_forks_upon_visibility_decrease_enabled? return unless visibility_level < visibility_level_before_last_save forks.each do |forked_project| diff --git a/app/services/metrics/dashboard/grafana_metric_embed_service.rb b/app/services/metrics/dashboard/grafana_metric_embed_service.rb index 60591e9a6f3..44b58ad9729 100644 --- a/app/services/metrics/dashboard/grafana_metric_embed_service.rb +++ b/app/services/metrics/dashboard/grafana_metric_embed_service.rb @@ -133,7 +133,7 @@ module Metrics def uid_regex base_url = @project.grafana_integration.grafana_url.chomp('/') - %r{(#{Regexp.escape(base_url)}\/d\/(?<uid>\w+)\/)}x + %r{^(#{Regexp.escape(base_url)}\/d\/(?<uid>.+)\/)}x end end diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb index 90e703e7050..cbed794f92e 100644 --- a/app/services/projects/destroy_service.rb +++ b/app/services/projects/destroy_service.rb @@ -31,13 +31,6 @@ module Projects Projects::UnlinkForkService.new(project, current_user).execute - # The project is not necessarily a fork, so update the fork network originating - # from this project - if fork_network = project.root_of_fork_network - fork_network.update(root_project: nil, - deleted_root_project_name: project.full_name) - end - attempt_destroy_transaction(project) system_hook_service.execute_hooks_for(project, :destroy) diff --git a/app/services/projects/overwrite_project_service.rb b/app/services/projects/overwrite_project_service.rb index 696e1b665b2..c5e38f166da 100644 --- a/app/services/projects/overwrite_project_service.rb +++ b/app/services/projects/overwrite_project_service.rb @@ -7,7 +7,9 @@ module Projects Project.transaction do move_before_destroy_relationships(source_project) - destroy_old_project(source_project) + # Reset is required in order to get the proper + # uncached fork network method calls value. + destroy_old_project(source_project.reset) rename_project(source_project.name, source_project.path) @project diff --git a/app/services/projects/unlink_fork_service.rb b/app/services/projects/unlink_fork_service.rb index 1b8a920268f..e7e0141099e 100644 --- a/app/services/projects/unlink_fork_service.rb +++ b/app/services/projects/unlink_fork_service.rb @@ -2,34 +2,67 @@ module Projects class UnlinkForkService < BaseService - # rubocop: disable CodeReuse/ActiveRecord + # If a fork is given, it: + # + # - Saves LFS objects to the root project + # - Close existing MRs coming from it + # - Is removed from the fork network + # + # If a root of fork(s) is given, it does the same, + # but not updating LFS objects (there'll be no related root to cache it). def execute - return unless @project.forked? + fork_network = @project.fork_network - if fork_source = @project.fork_source - fork_source.lfs_objects.find_each do |lfs_object| - lfs_object.projects << @project unless lfs_object.projects.include?(@project) - end + return unless fork_network - refresh_forks_count(fork_source) - end + save_lfs_objects - merge_requests = @project.fork_network + merge_requests = fork_network .merge_requests .opened - .where.not(target_project: @project) - .from_project(@project) + .from_and_to_forks(@project) - merge_requests.each do |mr| + merge_requests.find_each do |mr| ::MergeRequests::CloseService.new(@project, @current_user).execute(mr) end - @project.fork_network_member.destroy + Project.transaction do + # Get out of the fork network as a member and + # remove references from all its direct forks. + @project.fork_network_member.destroy + @project.forked_to_members.update_all(forked_from_project_id: nil) + + # The project is not necessarily a fork, so update the fork network originating + # from this project + if fork_network = @project.root_of_fork_network + fork_network.update(root_project: nil, deleted_root_project_name: @project.full_name) + end + end + + # When the project getting out of the network is a node with parent + # and children, both the parent and the node needs a cache refresh. + [@project.forked_from_project, @project].compact.each do |project| + refresh_forks_count(project) + end end - # rubocop: enable CodeReuse/ActiveRecord + + private def refresh_forks_count(project) Projects::ForksCountService.new(project).refresh_cache end + + def save_lfs_objects + return unless @project.forked? + + lfs_storage_project = @project.lfs_storage_project + + return unless lfs_storage_project + return if lfs_storage_project == @project # that project is being unlinked + + lfs_storage_project.lfs_objects.find_each do |lfs_object| + lfs_object.projects << @project unless lfs_object.projects.include?(@project) + end + end end end diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb index 2dad1d05a2c..aedd7252f63 100644 --- a/app/services/projects/update_service.rb +++ b/app/services/projects/update_service.rb @@ -65,7 +65,7 @@ module Projects ) project_changed_feature_keys = project.project_feature.previous_changes.keys - if project.previous_changes.include?(:visibility_level) && project.private? + if project.visibility_level_previous_changes && project.private? # don't enqueue immediately to prevent todos removal in case of a mistake TodosDestroyer::ConfidentialIssueWorker.perform_in(Todo::WAIT_FOR_DELETE, nil, project.id) TodosDestroyer::ProjectPrivateWorker.perform_in(Todo::WAIT_FOR_DELETE, project.id) @@ -79,6 +79,11 @@ module Projects system_hook_service.execute_hooks_for(project, :update) end + if project.visibility_level_decreased? && project.unlink_forks_upon_visibility_decrease_enabled? + # It's a system-bounded operation, so no extra authorization check is required. + Projects::UnlinkForkService.new(project, current_user).execute + end + update_pages_config if changing_pages_related_config? end diff --git a/app/views/search/results/_snippet_blob.html.haml b/app/views/search/results/_snippet_blob.html.haml index 37f4efee9d2..2dbd2a74602 100644 --- a/app/views/search/results/_snippet_blob.html.haml +++ b/app/views/search/results/_snippet_blob.html.haml @@ -1,7 +1,7 @@ - snippet_blob = chunk_snippet(snippet_blob, @search_term) - snippet = snippet_blob[:snippet_object] - snippet_chunks = snippet_blob[:snippet_chunks] -- snippet_path = reliable_snippet_path(snippet) +- snippet_path = snippet_path(snippet) .search-result-row %span diff --git a/app/views/search/results/_snippet_title.html.haml b/app/views/search/results/_snippet_title.html.haml index 7280146720e..a544e59c405 100644 --- a/app/views/search/results/_snippet_title.html.haml +++ b/app/views/search/results/_snippet_title.html.haml @@ -1,6 +1,6 @@ .search-result-row %h4.snippet-title.term - = link_to reliable_snippet_path(snippet_title) do + = link_to snippet_path(snippet_title) do = truncate(snippet_title.title, length: 60) = snippet_badge(snippet_title) %span.cgray.monospace.tiny.float-right.term diff --git a/app/views/shared/snippets/_snippet.html.haml b/app/views/shared/snippets/_snippet.html.haml index 5602ea37b5c..7d102a1b280 100644 --- a/app/views/shared/snippets/_snippet.html.haml +++ b/app/views/shared/snippets/_snippet.html.haml @@ -5,7 +5,7 @@ = image_tag avatar_icon_for_user(snippet.author), class: "avatar s40 d-none d-sm-block", alt: '' .title - = link_to reliable_snippet_path(snippet) do + = link_to snippet_path(snippet) do = snippet.title - if snippet.file_name.present? %span.snippet-filename.d-none.d-sm-inline-block.ml-2 @@ -14,7 +14,7 @@ %ul.controls %li - = link_to reliable_snippet_path(snippet, anchor: 'notes'), class: ('no-comments' if notes_count.zero?) do + = link_to snippet_path(snippet, anchor: 'notes'), class: ('no-comments' if notes_count.zero?) do = icon('comments') = notes_count %li diff --git a/changelogs/unreleased/17580-enable-etag-caching-notes-for-mrs.yml b/changelogs/unreleased/17580-enable-etag-caching-notes-for-mrs.yml new file mode 100644 index 00000000000..65e274c0bb0 --- /dev/null +++ b/changelogs/unreleased/17580-enable-etag-caching-notes-for-mrs.yml @@ -0,0 +1,5 @@ +--- +title: Enable ETag caching for MR notes polling +merge_request: 20440 +author: +type: performance diff --git a/changelogs/unreleased/37057-record-import-failures.yml b/changelogs/unreleased/37057-record-import-failures.yml new file mode 100644 index 00000000000..2358220ef29 --- /dev/null +++ b/changelogs/unreleased/37057-record-import-failures.yml @@ -0,0 +1,5 @@ +--- +title: Collect project import failures instead of failing fast +merge_request: 20727 +author: +type: other diff --git a/changelogs/unreleased/7603-make-it-easy-to-generate-and-share-the-maven-xml-for-a-library.yml b/changelogs/unreleased/7603-make-it-easy-to-generate-and-share-the-maven-xml-for-a-library.yml new file mode 100644 index 00000000000..ed02816f7d1 --- /dev/null +++ b/changelogs/unreleased/7603-make-it-easy-to-generate-and-share-the-maven-xml-for-a-library.yml @@ -0,0 +1,5 @@ +--- +title: Add Maven installation commands to package detail page for Maven packages +merge_request: 20300 +author: +type: added diff --git a/changelogs/unreleased/Replace-BoardService_in_assignee_select_spec-js.yml b/changelogs/unreleased/Replace-BoardService_in_assignee_select_spec-js.yml new file mode 100644 index 00000000000..8bc76beeb8b --- /dev/null +++ b/changelogs/unreleased/Replace-BoardService_in_assignee_select_spec-js.yml @@ -0,0 +1,5 @@ +--- +title: removes references of BoardService +merge_request: 20880 +author: nuwe1 +type: other diff --git a/changelogs/unreleased/Replace-BoardService_in_board_list_common_spec-js.yml b/changelogs/unreleased/Replace-BoardService_in_board_list_common_spec-js.yml new file mode 100644 index 00000000000..faf5c2a0ef0 --- /dev/null +++ b/changelogs/unreleased/Replace-BoardService_in_board_list_common_spec-js.yml @@ -0,0 +1,5 @@ +--- +title: removes references of BoardService +merge_request: 20872 +author: nuwe1 +type: other diff --git a/changelogs/unreleased/Replace-BoardService_in_board_new_issue_spec-js.yml b/changelogs/unreleased/Replace-BoardService_in_board_new_issue_spec-js.yml new file mode 100644 index 00000000000..22830aa8af6 --- /dev/null +++ b/changelogs/unreleased/Replace-BoardService_in_board_new_issue_spec-js.yml @@ -0,0 +1,5 @@ +--- +title: removes references of BoardService +merge_request: 20874 +author: nuwe1 +type: other diff --git a/changelogs/unreleased/fj-add-filters-to-snippets-finder.yml b/changelogs/unreleased/fj-add-filters-to-snippets-finder.yml new file mode 100644 index 00000000000..df8c70152d1 --- /dev/null +++ b/changelogs/unreleased/fj-add-filters-to-snippets-finder.yml @@ -0,0 +1,5 @@ +--- +title: Add more filters to SnippetsFinder +merge_request: 20767 +author: +type: changed diff --git a/changelogs/unreleased/osw-delete-fork-relation-upon-visibility-change.yml b/changelogs/unreleased/osw-delete-fork-relation-upon-visibility-change.yml new file mode 100644 index 00000000000..64a7f6c7427 --- /dev/null +++ b/changelogs/unreleased/osw-delete-fork-relation-upon-visibility-change.yml @@ -0,0 +1,5 @@ +--- +title: Adjust fork network relations upon project visibility change +merge_request: 20466 +author: +type: added diff --git a/changelogs/unreleased/sy-grafana-fix-uid.yml b/changelogs/unreleased/sy-grafana-fix-uid.yml new file mode 100644 index 00000000000..778706d540b --- /dev/null +++ b/changelogs/unreleased/sy-grafana-fix-uid.yml @@ -0,0 +1,5 @@ +--- +title: Accept user-defined dashboard uids in Grafana embeds +merge_request: 20486 +author: +type: fixed diff --git a/db/migrate/20191125140458_create_import_failures.rb b/db/migrate/20191125140458_create_import_failures.rb new file mode 100644 index 00000000000..43e8efe90a4 --- /dev/null +++ b/db/migrate/20191125140458_create_import_failures.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +class CreateImportFailures < ActiveRecord::Migration[5.2] + DOWNTIME = false + + def change + create_table :import_failures do |t| + t.integer :relation_index + t.references :project, null: false, index: true + t.datetime_with_timezone :created_at, null: false + t.string :relation_key, limit: 64 + t.string :exception_class, limit: 128 + t.string :correlation_id_value, limit: 128, index: true + t.string :exception_message, limit: 255 + end + end +end diff --git a/db/schema.rb b/db/schema.rb index 57d05abd980..782610b62f5 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -10,7 +10,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 2019_11_24_150431) do +ActiveRecord::Schema.define(version: 2019_11_25_140458) do # These are extensions that must be enabled in order to support this database enable_extension "pg_trgm" @@ -1948,6 +1948,18 @@ ActiveRecord::Schema.define(version: 2019_11_24_150431) do t.index ["updated_at"], name: "index_import_export_uploads_on_updated_at" end + create_table "import_failures", force: :cascade do |t| + t.integer "relation_index" + t.bigint "project_id", null: false + t.datetime_with_timezone "created_at", null: false + t.string "relation_key", limit: 64 + t.string "exception_class", limit: 128 + t.string "correlation_id_value", limit: 128 + t.string "exception_message", limit: 255 + t.index ["correlation_id_value"], name: "index_import_failures_on_correlation_id_value" + t.index ["project_id"], name: "index_import_failures_on_project_id" + end + create_table "index_statuses", id: :serial, force: :cascade do |t| t.integer "project_id", null: false t.datetime "indexed_at" diff --git a/doc/topics/git/useful_git_commands.md b/doc/topics/git/useful_git_commands.md index cfe19c89618..abd06b95b1e 100644 --- a/doc/topics/git/useful_git_commands.md +++ b/doc/topics/git/useful_git_commands.md @@ -167,6 +167,14 @@ With HTTPS: GIT_TRACE_PACKET=1 GIT_TRACE=2 GIT_CURL_VERBOSE=1 git clone <url> ``` +### Debugging with Git embedded traces + +Git includes a complete set of [traces for debugging Git commands](https://git-scm.com/book/en/v2/Git-Internals-Environment-Variables#_debugging), for example: + +- `GIT_TRACE_PERFORMANCE=1`: enables tracing of performance data, showing how long each particular `git` invocation takes. +- `GIT_TRACE_SETUP=1`: enables tracing of what `git` is discovering about the repository and environment it’s interacting with. +- `GIT_TRACE_PACKET=1`: enables packet-level tracing for network operations. + ## Rebasing ### Rebase your branch onto master diff --git a/lib/api/entities.rb b/lib/api/entities.rb index 22c88a7f7f0..d16786e47b2 100644 --- a/lib/api/entities.rb +++ b/lib/api/entities.rb @@ -532,7 +532,7 @@ module API class PersonalSnippet < Snippet expose :raw_url do |snippet| - Gitlab::UrlBuilder.build(snippet) + "/raw" + Gitlab::UrlBuilder.build(snippet, raw: true) end end diff --git a/lib/gitlab/etag_caching/router.rb b/lib/gitlab/etag_caching/router.rb index efddda0ec65..17d9cf08367 100644 --- a/lib/gitlab/etag_caching/router.rb +++ b/lib/gitlab/etag_caching/router.rb @@ -23,6 +23,10 @@ module Gitlab 'issue_notes' ), Gitlab::EtagCaching::Router::Route.new( + %r(#{RESERVED_WORDS_PREFIX}/noteable/merge_request/\d+/notes\z), + 'merge_request_notes' + ), + Gitlab::EtagCaching::Router::Route.new( %r(#{RESERVED_WORDS_PREFIX}/issues/\d+/realtime_changes\z), 'issue_title' ), diff --git a/lib/gitlab/import_export/project_tree_restorer.rb b/lib/gitlab/import_export/project_tree_restorer.rb index c401f96b5c1..c89b8f563dc 100644 --- a/lib/gitlab/import_export/project_tree_restorer.rb +++ b/lib/gitlab/import_export/project_tree_restorer.rb @@ -85,13 +85,16 @@ module Gitlab # we do not care if we process array or hash data_hashes = [data_hashes] unless data_hashes.is_a?(Array) + relation_index = 0 + # consume and remove objects from memory while data_hash = data_hashes.shift - process_project_relation_item!(relation_key, relation_definition, data_hash) + process_project_relation_item!(relation_key, relation_definition, relation_index, data_hash) + relation_index += 1 end end - def process_project_relation_item!(relation_key, relation_definition, data_hash) + def process_project_relation_item!(relation_key, relation_definition, relation_index, data_hash) relation_object = build_relation(relation_key, relation_definition, data_hash) return unless relation_object return if group_model?(relation_object) @@ -100,6 +103,25 @@ module Gitlab relation_object.save! save_id_mapping(relation_key, data_hash, relation_object) + rescue => e + # re-raise if not enabled + raise e unless Feature.enabled?(:import_graceful_failures, @project.group) + + log_import_failure(relation_key, relation_index, e) + end + + def log_import_failure(relation_key, relation_index, exception) + Gitlab::Sentry.track_acceptable_exception(exception, + extra: { project_id: @project.id, relation_key: relation_key, relation_index: relation_index }) + + ImportFailure.create( + project: @project, + relation_key: relation_key, + relation_index: relation_index, + exception_class: exception.class.to_s, + exception_message: exception.message.truncate(255), + correlation_id_value: Labkit::Correlation::CorrelationId.current_id + ) end # Older, serialized CI pipeline exports may only have a diff --git a/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled.rb b/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled.rb index cddd4f18cc3..805283b0f93 100644 --- a/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled.rb +++ b/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled.rb @@ -5,7 +5,7 @@ module Gitlab module PerformanceBar module RedisAdapterWhenPeekEnabled def save(request_id) - super if ::Gitlab::PerformanceBar.enabled_for_request? && request_id.present? + super if ::Gitlab::PerformanceBar.enabled_for_request? end end end diff --git a/lib/gitlab/url_builder.rb b/lib/gitlab/url_builder.rb index 038067eeae4..5a9eef8288f 100644 --- a/lib/gitlab/url_builder.rb +++ b/lib/gitlab/url_builder.rb @@ -6,10 +6,10 @@ module Gitlab include GitlabRoutingHelper include ActionView::RecordIdentifier - attr_reader :object + attr_reader :object, :opts - def self.build(object) - new(object).url + def self.build(object, opts = {}) + new(object, opts).url end def url @@ -24,10 +24,8 @@ module Gitlab note_url when WikiPage wiki_page_url - when ProjectSnippet - project_snippet_url(object.project, object) when Snippet - snippet_url(object) + opts[:raw].present? ? raw_snippet_url(object) : snippet_url(object) when Milestone milestone_url(object) when ::Ci::Build @@ -41,8 +39,9 @@ module Gitlab private - def initialize(object) + def initialize(object, opts = {}) @object = object + @opts = opts end def commit_url(opts = {}) @@ -66,13 +65,7 @@ module Gitlab merge_request_url(object.noteable, anchor: dom_id(object)) elsif object.for_snippet? - snippet = object.noteable - - if snippet.is_a?(PersonalSnippet) - snippet_url(snippet, anchor: dom_id(object)) - else - project_snippet_url(snippet.project, snippet, anchor: dom_id(object)) - end + snippet_url(object.noteable, anchor: dom_id(object)) end end diff --git a/lib/gitlab/visibility_level.rb b/lib/gitlab/visibility_level.rb index e2787744f09..a1d462ea9f5 100644 --- a/lib/gitlab/visibility_level.rb +++ b/lib/gitlab/visibility_level.rb @@ -115,6 +115,18 @@ module Gitlab end end + def visibility_level_decreased? + return false unless visibility_level_previous_changes + + before, after = visibility_level_previous_changes + + before && after && after < before + end + + def visibility_level_previous_changes + previous_changes[:visibility_level] + end + def private? visibility_level_value == PRIVATE end diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 0518b7450b3..15c05f1f37d 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -12053,6 +12053,18 @@ msgstr "" msgid "Package was removed" msgstr "" +msgid "PackageRegistry|Copy Maven XML" +msgstr "" + +msgid "PackageRegistry|Copy Maven command" +msgstr "" + +msgid "PackageRegistry|Copy Maven registry XML" +msgstr "" + +msgid "PackageRegistry|Copy and paste this inside your %{codeStart}pom.xml%{codeEnd} %{codeStart}dependencies%{codeEnd} block." +msgstr "" + msgid "PackageRegistry|Copy npm command" msgstr "" @@ -12071,12 +12083,24 @@ msgstr "" msgid "PackageRegistry|Delete package" msgstr "" +msgid "PackageRegistry|For more information on the Maven registry, %{linkStart}see the documentation%{linkEnd}." +msgstr "" + +msgid "PackageRegistry|If you haven't already done so, you will need to add the below to your %{codeStart}pom.xml%{codeEnd} file." +msgstr "" + msgid "PackageRegistry|Installation" msgstr "" msgid "PackageRegistry|Learn how to %{noPackagesLinkStart}publish and share your packages%{noPackagesLinkEnd} with GitLab." msgstr "" +msgid "PackageRegistry|Maven Command" +msgstr "" + +msgid "PackageRegistry|Maven XML" +msgstr "" + msgid "PackageRegistry|Package installation" msgstr "" diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb index c8a697cfed4..d4fab48b426 100644 --- a/spec/db/schema_spec.rb +++ b/spec/db/schema_spec.rb @@ -43,6 +43,7 @@ describe 'Database schema' do geo_nodes: %w[oauth_application_id], geo_repository_deleted_events: %w[project_id], geo_upload_deleted_events: %w[upload_id model_id], + import_failures: %w[project_id], identities: %w[user_id], issues: %w[last_edited_by_id state_id], jira_tracker_data: %w[jira_issue_transition_id], diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb index bcb762664f7..8f83cb77709 100644 --- a/spec/finders/snippets_finder_spec.rb +++ b/spec/finders/snippets_finder_spec.rb @@ -60,10 +60,20 @@ describe SnippetsFinder do end context 'filter by author' do - it 'returns all public and internal snippets' do - snippets = described_class.new(create(:user), author: user).execute + context 'when the author is a User object' do + it 'returns all public and internal snippets' do + snippets = described_class.new(create(:user), author: user).execute - expect(snippets).to contain_exactly(internal_personal_snippet, public_personal_snippet) + expect(snippets).to contain_exactly(internal_personal_snippet, public_personal_snippet) + end + end + + context 'when the author is the User id' do + it 'returns all public and internal snippets' do + snippets = described_class.new(create(:user), author: user.id).execute + + expect(snippets).to contain_exactly(internal_personal_snippet, public_personal_snippet) + end end it 'returns internal snippets' do @@ -101,13 +111,33 @@ describe SnippetsFinder do expect(snippets).to contain_exactly(private_personal_snippet, internal_personal_snippet, public_personal_snippet) end + + context 'when author is not valid' do + it 'returns quickly' do + finder = described_class.new(admin, author: 1234) + + expect(finder).not_to receive(:init_collection) + expect(Snippet).to receive(:none).and_call_original + expect(finder.execute).to be_empty + end + end end - context 'project snippets' do - it 'returns public personal and project snippets for unauthorized user' do - snippets = described_class.new(nil, project: project).execute + context 'filter by project' do + context 'when project is a Project object' do + it 'returns public personal and project snippets for unauthorized user' do + snippets = described_class.new(nil, project: project).execute - expect(snippets).to contain_exactly(public_project_snippet) + expect(snippets).to contain_exactly(public_project_snippet) + end + end + + context 'when project is a Project id' do + it 'returns public personal and project snippets for unauthorized user' do + snippets = described_class.new(nil, project: project.id).execute + + expect(snippets).to contain_exactly(public_project_snippet) + end end it 'returns public and internal snippets for non project members' do @@ -175,6 +205,49 @@ describe SnippetsFinder do ) end end + + context 'when project is not valid' do + it 'returns quickly' do + finder = described_class.new(admin, project: 1234) + + expect(finder).not_to receive(:init_collection) + expect(Snippet).to receive(:none).and_call_original + expect(finder.execute).to be_empty + end + end + end + + context 'filter by snippet type' do + context 'when filtering by only_personal snippet' do + it 'returns only personal snippet' do + snippets = described_class.new(admin, only_personal: true).execute + + expect(snippets).to contain_exactly(private_personal_snippet, + internal_personal_snippet, + public_personal_snippet) + end + end + + context 'when filtering by only_project snippet' do + it 'returns only project snippet' do + snippets = described_class.new(admin, only_project: true).execute + + expect(snippets).to contain_exactly(private_project_snippet, + internal_project_snippet, + public_project_snippet) + end + end + end + + context 'filtering by ids' do + it 'returns only personal snippet' do + snippets = described_class.new( + admin, ids: [private_personal_snippet.id, + internal_personal_snippet.id] + ).execute + + expect(snippets).to contain_exactly(private_personal_snippet, internal_personal_snippet) + end end context 'explore snippets' do diff --git a/spec/fixtures/lib/gitlab/import_export/with_invalid_records/project.json b/spec/fixtures/lib/gitlab/import_export/with_invalid_records/project.json new file mode 100644 index 00000000000..52b5649ae59 --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/with_invalid_records/project.json @@ -0,0 +1,38 @@ +{ + "description": "Nisi et repellendus ut enim quo accusamus vel magnam.", + "import_type": "gitlab_project", + "creator_id": 123, + "visibility_level": 10, + "archived": false, + "milestones": [ + { + "id": 1, + "title": null, + "project_id": 8, + "description": 123, + "due_date": null, + "created_at": "NOT A DATE", + "updated_at": "NOT A DATE", + "state": "active", + "iid": 1, + "group_id": null + }, + { + "id": 42, + "title": "A valid milestone", + "project_id": 8, + "description": "Project-level milestone", + "due_date": null, + "created_at": "2016-06-14T15:02:04.415Z", + "updated_at": "2016-06-14T15:02:04.415Z", + "state": "active", + "iid": 1, + "group_id": null + } + ], + "labels": [], + "issues": [], + "services": [], + "snippets": [], + "hooks": [] +} diff --git a/spec/frontend/monitoring/charts/time_series_spec.js b/spec/frontend/monitoring/charts/time_series_spec.js index c561c5edf3c..6fa2d15ccbf 100644 --- a/spec/frontend/monitoring/charts/time_series_spec.js +++ b/spec/frontend/monitoring/charts/time_series_spec.js @@ -48,7 +48,7 @@ describe('Time series component', () => { // Mock data contains 2 panels, pick the first one store.commit(`monitoringDashboard/${types.SET_QUERY_RESULT}`, mockedQueryResultPayload); - [mockGraphData] = store.state.monitoringDashboard.dashboard.panel_groups[0].metrics; + [mockGraphData] = store.state.monitoringDashboard.dashboard.panel_groups[0].panels; makeTimeSeriesChart = (graphData, type) => shallowMount(TimeSeries, { @@ -235,7 +235,7 @@ describe('Time series component', () => { }); it('utilizes all data points', () => { - const { values } = mockGraphData.queries[0].result[0]; + const { values } = mockGraphData.metrics[0].result[0]; expect(chartData.length).toBe(1); expect(seriesData().data.length).toBe(values.length); diff --git a/spec/frontend/monitoring/components/charts/anomaly_spec.js b/spec/frontend/monitoring/components/charts/anomaly_spec.js index 6707d0b1fe8..38aef6e6052 100644 --- a/spec/frontend/monitoring/components/charts/anomaly_spec.js +++ b/spec/frontend/monitoring/components/charts/anomaly_spec.js @@ -17,8 +17,8 @@ const mockProjectPath = `${TEST_HOST}${mockProjectDir}`; jest.mock('~/lib/utils/icon_utils'); // mock getSvgIconPathContent const makeAnomalyGraphData = (datasetName, template = anomalyMockGraphData) => { - const queries = anomalyMockResultValues[datasetName].map((values, index) => ({ - ...template.queries[index], + const metrics = anomalyMockResultValues[datasetName].map((values, index) => ({ + ...template.metrics[index], result: [ { metrics: {}, @@ -26,7 +26,7 @@ const makeAnomalyGraphData = (datasetName, template = anomalyMockGraphData) => { }, ], })); - return { ...template, queries }; + return { ...template, metrics }; }; describe('Anomaly chart component', () => { @@ -67,19 +67,19 @@ describe('Anomaly chart component', () => { describe('graph-data', () => { it('receives a single "metric" series', () => { const { graphData } = getTimeSeriesProps(); - expect(graphData.queries.length).toBe(1); + expect(graphData.metrics.length).toBe(1); }); it('receives "metric" with all data', () => { const { graphData } = getTimeSeriesProps(); - const query = graphData.queries[0]; - const expectedQuery = makeAnomalyGraphData(dataSetName).queries[0]; + const query = graphData.metrics[0]; + const expectedQuery = makeAnomalyGraphData(dataSetName).metrics[0]; expect(query).toEqual(expectedQuery); }); it('receives the "metric" results', () => { const { graphData } = getTimeSeriesProps(); - const { result } = graphData.queries[0]; + const { result } = graphData.metrics[0]; const { values } = result[0]; const [metricDataset] = dataSet; expect(values).toEqual(expect.any(Array)); @@ -266,12 +266,12 @@ describe('Anomaly chart component', () => { describe('graph-data', () => { it('receives a single "metric" series', () => { const { graphData } = getTimeSeriesProps(); - expect(graphData.queries.length).toBe(1); + expect(graphData.metrics.length).toBe(1); }); it('receives "metric" results and applies the offset to them', () => { const { graphData } = getTimeSeriesProps(); - const { result } = graphData.queries[0]; + const { result } = graphData.metrics[0]; const { values } = result[0]; const [metricDataset] = dataSet; expect(values).toEqual(expect.any(Array)); diff --git a/spec/frontend/monitoring/embed/embed_spec.js b/spec/frontend/monitoring/embed/embed_spec.js index 3e22b0858e6..c5219f6130e 100644 --- a/spec/frontend/monitoring/embed/embed_spec.js +++ b/spec/frontend/monitoring/embed/embed_spec.js @@ -62,7 +62,7 @@ describe('Embed', () => { describe('metrics are available', () => { beforeEach(() => { store.state.monitoringDashboard.dashboard.panel_groups = groups; - store.state.monitoringDashboard.dashboard.panel_groups[0].metrics = metricsData; + store.state.monitoringDashboard.dashboard.panel_groups[0].panels = metricsData; store.state.monitoringDashboard.metricsWithData = metricsWithData; mountComponent(); diff --git a/spec/frontend/monitoring/embed/mock_data.js b/spec/frontend/monitoring/embed/mock_data.js index 1685021fd4b..8941c183807 100644 --- a/spec/frontend/monitoring/embed/mock_data.js +++ b/spec/frontend/monitoring/embed/mock_data.js @@ -42,38 +42,34 @@ export const metrics = [ }, ]; -const queries = [ +const result = [ { - result: [ - { - values: [ - ['Mon', 1220], - ['Tue', 932], - ['Wed', 901], - ['Thu', 934], - ['Fri', 1290], - ['Sat', 1330], - ['Sun', 1320], - ], - }, + values: [ + ['Mon', 1220], + ['Tue', 932], + ['Wed', 901], + ['Thu', 934], + ['Fri', 1290], + ['Sat', 1330], + ['Sun', 1320], ], }, ]; export const metricsData = [ { - queries, metrics: [ { metric_id: 15, + result, }, ], }, { - queries, metrics: [ { metric_id: 16, + result, }, ], }, diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js index c42366ab484..758e86235be 100644 --- a/spec/frontend/monitoring/mock_data.js +++ b/spec/frontend/monitoring/mock_data.js @@ -110,9 +110,6 @@ export const anomalyMockGraphData = { type: 'anomaly-chart', weight: 3, metrics: [ - // Not used - ], - queries: [ { metricId: '90', id: 'metric', diff --git a/spec/frontend/monitoring/panel_type_spec.js b/spec/frontend/monitoring/panel_type_spec.js index 54a63e7f61f..b30ad747a12 100644 --- a/spec/frontend/monitoring/panel_type_spec.js +++ b/spec/frontend/monitoring/panel_type_spec.js @@ -33,7 +33,7 @@ describe('Panel Type component', () => { let glEmptyChart; // Deep clone object before modifying const graphDataNoResult = JSON.parse(JSON.stringify(graphDataPrometheusQueryRange)); - graphDataNoResult.queries[0].result = []; + graphDataNoResult.metrics[0].result = []; beforeEach(() => { panelType = shallowMount(PanelType, { @@ -143,7 +143,7 @@ describe('Panel Type component', () => { describe('csvText', () => { it('converts metrics data from json to csv', () => { const header = `timestamp,${graphDataPrometheusQueryRange.y_label}`; - const data = graphDataPrometheusQueryRange.queries[0].result[0].values; + const data = graphDataPrometheusQueryRange.metrics[0].result[0].values; const firstRow = `${data[0][0]},${data[0][1]}`; const secondRow = `${data[1][0]},${data[1][1]}`; diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js index fdad290a8d6..42031e01878 100644 --- a/spec/frontend/monitoring/store/mutations_spec.js +++ b/spec/frontend/monitoring/store/mutations_spec.js @@ -27,7 +27,7 @@ describe('Monitoring mutations', () => { it('normalizes values', () => { mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups); const expectedLabel = 'Pod average'; - const { label, query_range } = stateCopy.dashboard.panel_groups[0].metrics[0].metrics[0]; + const { label, query_range } = stateCopy.dashboard.panel_groups[0].panels[0].metrics[0]; expect(label).toEqual(expectedLabel); expect(query_range.length).toBeGreaterThan(0); }); @@ -39,23 +39,12 @@ describe('Monitoring mutations', () => { expect(stateCopy.dashboard.panel_groups[0].panels[0].metrics.length).toEqual(1); expect(stateCopy.dashboard.panel_groups[0].panels[1].metrics.length).toEqual(1); }); - it('assigns queries a metric id', () => { + it('assigns metrics a metric id', () => { mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups); - expect(stateCopy.dashboard.panel_groups[0].metrics[0].queries[0].metricId).toEqual( + expect(stateCopy.dashboard.panel_groups[0].panels[0].metrics[0].metricId).toEqual( '17_system_metrics_kubernetes_container_memory_average', ); }); - describe('dashboard endpoint', () => { - const dashboardGroups = metricsDashboardResponse.dashboard.panel_groups; - it('aliases group panels to metrics for backwards compatibility', () => { - mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups); - expect(stateCopy.dashboard.panel_groups[0].metrics[0]).toBeDefined(); - }); - it('aliases panel metrics to queries for backwards compatibility', () => { - mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups); - expect(stateCopy.dashboard.panel_groups[0].metrics[0].queries).toBeDefined(); - }); - }); }); describe('RECEIVE_DEPLOYMENTS_DATA_SUCCESS', () => { diff --git a/spec/frontend/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js index 98388ac19f8..d562aaaefe9 100644 --- a/spec/frontend/monitoring/store/utils_spec.js +++ b/spec/frontend/monitoring/store/utils_spec.js @@ -1,44 +1,4 @@ -import { groupQueriesByChartInfo, normalizeMetric, uniqMetricsId } from '~/monitoring/stores/utils'; - -describe('groupQueriesByChartInfo', () => { - let input; - let output; - - it('groups metrics with the same chart title and y_axis label', () => { - input = [ - { title: 'title', y_label: 'MB', queries: [{}] }, - { title: 'title', y_label: 'MB', queries: [{}] }, - { title: 'new title', y_label: 'MB', queries: [{}] }, - ]; - - output = [ - { - title: 'title', - y_label: 'MB', - queries: [{ metricId: null }, { metricId: null }], - }, - { title: 'new title', y_label: 'MB', queries: [{ metricId: null }] }, - ]; - - expect(groupQueriesByChartInfo(input)).toEqual(output); - }); - - // Functionality associated with the /additional_metrics endpoint - it("associates a chart's stringified metric_id with the metric", () => { - input = [{ id: 3, title: 'new title', y_label: 'MB', queries: [{}] }]; - output = [{ id: 3, title: 'new title', y_label: 'MB', queries: [{ metricId: '3' }] }]; - - expect(groupQueriesByChartInfo(input)).toEqual(output); - }); - - // Functionality associated with the /metrics_dashboard endpoint - it('aliases a stringified metrics_id on the metric to the metricId key', () => { - input = [{ title: 'new title', y_label: 'MB', queries: [{ metric_id: 3 }] }]; - output = [{ title: 'new title', y_label: 'MB', queries: [{ metricId: '3', metric_id: 3 }] }]; - - expect(groupQueriesByChartInfo(input)).toEqual(output); - }); -}); +import { normalizeMetric, uniqMetricsId } from '~/monitoring/stores/utils'; describe('normalizeMetric', () => { [ @@ -54,7 +14,7 @@ describe('normalizeMetric', () => { }, ].forEach(({ args, expected }) => { it(`normalizes metric to "${expected}" with args=${JSON.stringify(args)}`, () => { - expect(normalizeMetric(...args)).toEqual({ metric_id: expected }); + expect(normalizeMetric(...args)).toEqual({ metric_id: expected, metricId: expected }); }); }); }); diff --git a/spec/frontend/vue_shared/components/clipboard_button_spec.js b/spec/frontend/vue_shared/components/clipboard_button_spec.js new file mode 100644 index 00000000000..4fb6924daba --- /dev/null +++ b/spec/frontend/vue_shared/components/clipboard_button_spec.js @@ -0,0 +1,60 @@ +import { shallowMount } from '@vue/test-utils'; +import { GlButton } from '@gitlab/ui'; +import ClipboardButton from '~/vue_shared/components/clipboard_button.vue'; +import Icon from '~/vue_shared/components/icon.vue'; + +describe('clipboard button', () => { + let wrapper; + + const createWrapper = propsData => { + wrapper = shallowMount(ClipboardButton, { + propsData, + sync: false, + attachToDocument: true, + }); + }; + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + }); + + describe('without gfm', () => { + beforeEach(() => { + createWrapper({ + text: 'copy me', + title: 'Copy this value', + cssClass: 'btn-danger', + }); + }); + + it('renders a button for clipboard', () => { + expect(wrapper.find(GlButton).exists()).toBe(true); + expect(wrapper.attributes('data-clipboard-text')).toBe('copy me'); + expect(wrapper.find(Icon).props('name')).toBe('duplicate'); + }); + + it('should have a tooltip with default values', () => { + expect(wrapper.attributes('data-original-title')).toBe('Copy this value'); + }); + + it('should render provided classname', () => { + expect(wrapper.classes()).toContain('btn-danger'); + }); + }); + + describe('with gfm', () => { + it('sets data-clipboard-text with gfm', () => { + createWrapper({ + text: 'copy me', + gfm: '`path/to/file`', + title: 'Copy this value', + cssClass: 'btn-danger', + }); + + expect(wrapper.attributes('data-clipboard-text')).toBe( + '{"text":"copy me","gfm":"`path/to/file`"}', + ); + }); + }); +}); diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js new file mode 100644 index 00000000000..bdd18110629 --- /dev/null +++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js @@ -0,0 +1,113 @@ +import _ from 'underscore'; +import { trimText } from 'helpers/text_helper'; +import { shallowMount } from '@vue/test-utils'; +import { GlLink } from '@gitlab/ui'; +import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue'; +import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue'; +import { TEST_HOST } from 'spec/test_constants'; + +describe('User Avatar Link Component', () => { + let wrapper; + + const defaultProps = { + linkHref: `${TEST_HOST}/myavatarurl.com`, + imgSize: 99, + imgSrc: `${TEST_HOST}/myavatarurl.com`, + imgAlt: 'mydisplayname', + imgCssClasses: 'myextraavatarclass', + tooltipText: 'tooltip text', + tooltipPlacement: 'bottom', + username: 'username', + }; + + const createWrapper = props => { + wrapper = shallowMount(UserAvatarLink, { + propsData: { + ...defaultProps, + ...props, + }, + sync: false, + attachToDocument: true, + }); + }; + + beforeEach(() => { + createWrapper(); + }); + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + }); + + it('should return a defined Vue component', () => { + expect(wrapper.isVueInstance()).toBe(true); + }); + + it('should have user-avatar-image registered as child component', () => { + expect(wrapper.vm.$options.components.userAvatarImage).toBeDefined(); + }); + + it('user-avatar-link should have user-avatar-image as child component', () => { + expect(wrapper.find(UserAvatarImage).exists()).toBe(true); + }); + + it('should render GlLink as a child element', () => { + const link = wrapper.find(GlLink); + + expect(link.exists()).toBe(true); + expect(link.attributes('href')).toBe(defaultProps.linkHref); + }); + + it('should return necessary props as defined', () => { + _.each(defaultProps, (val, key) => { + expect(wrapper.vm[key]).toBeDefined(); + }); + }); + + describe('no username', () => { + beforeEach(() => { + createWrapper({ + username: '', + }); + }); + + it('should only render image tag in link', () => { + const childElements = wrapper.vm.$el.childNodes; + + expect(wrapper.find('img')).not.toBe('null'); + + // Vue will render the hidden component as <!----> + expect(childElements[1].tagName).toBeUndefined(); + }); + + it('should render avatar image tooltip', () => { + expect(wrapper.vm.shouldShowUsername).toBe(false); + expect(wrapper.vm.avatarTooltipText).toEqual(defaultProps.tooltipText); + }); + }); + + describe('username', () => { + it('should not render avatar image tooltip', () => { + expect(wrapper.find('.js-user-avatar-image-toolip').exists()).toBe(false); + }); + + it('should render username prop in <span>', () => { + expect(trimText(wrapper.find('.js-user-avatar-link-username').text())).toEqual( + defaultProps.username, + ); + }); + + it('should render text tooltip for <span>', () => { + expect( + wrapper.find('.js-user-avatar-link-username').attributes('data-original-title'), + ).toEqual(defaultProps.tooltipText); + }); + + it('should render text tooltip placement for <span>', () => { + expect(wrapper.find('.js-user-avatar-link-username').attributes('tooltip-placement')).toBe( + defaultProps.tooltipPlacement, + ); + }); + }); +}); diff --git a/spec/helpers/award_emoji_helper_spec.rb b/spec/helpers/award_emoji_helper_spec.rb index 2ee27bc5427..2ad6b68a34c 100644 --- a/spec/helpers/award_emoji_helper_spec.rb +++ b/spec/helpers/award_emoji_helper_spec.rb @@ -4,59 +4,69 @@ require 'spec_helper' describe AwardEmojiHelper do describe '.toggle_award_url' do + subject { helper.toggle_award_url(awardable) } + context 'note on personal snippet' do - let(:note) { create(:note_on_personal_snippet) } + let(:snippet) { create(:personal_snippet) } + let(:note) { create(:note_on_personal_snippet, noteable: snippet) } + let(:awardable) { note } + + subject { helper.toggle_award_url(note) } it 'returns correct url' do expected_url = "/snippets/#{note.noteable.id}/notes/#{note.id}/toggle_award_emoji" - expect(helper.toggle_award_url(note)).to eq(expected_url) + expect(subject).to eq(expected_url) end end context 'note on project item' do let(:note) { create(:note_on_project_snippet) } + let(:awardable) { note } it 'returns correct url' do @project = note.noteable.project expected_url = "/#{@project.namespace.path}/#{@project.path}/notes/#{note.id}/toggle_award_emoji" - expect(helper.toggle_award_url(note)).to eq(expected_url) + expect(subject).to eq(expected_url) end end context 'personal snippet' do let(:snippet) { create(:personal_snippet) } + let(:awardable) { snippet } it 'returns correct url' do expected_url = "/snippets/#{snippet.id}/toggle_award_emoji" - expect(helper.toggle_award_url(snippet)).to eq(expected_url) + expect(subject).to eq(expected_url) end end context 'merge request' do let(:merge_request) { create(:merge_request) } + let(:awardable) { merge_request } it 'returns correct url' do @project = merge_request.project expected_url = "/#{@project.namespace.path}/#{@project.path}/merge_requests/#{merge_request.iid}/toggle_award_emoji" - expect(helper.toggle_award_url(merge_request)).to eq(expected_url) + expect(subject).to eq(expected_url) end end context 'issue' do let(:issue) { create(:issue) } + let(:awardable) { issue } it 'returns correct url' do @project = issue.project expected_url = "/#{@project.namespace.path}/#{@project.path}/issues/#{issue.iid}/toggle_award_emoji" - expect(helper.toggle_award_url(issue)).to eq(expected_url) + expect(subject).to eq(expected_url) end end end diff --git a/spec/helpers/gitlab_routing_helper_spec.rb b/spec/helpers/gitlab_routing_helper_spec.rb index 38699108b06..9cc1a9dfd5b 100644 --- a/spec/helpers/gitlab_routing_helper_spec.rb +++ b/spec/helpers/gitlab_routing_helper_spec.rb @@ -112,4 +112,98 @@ describe GitlabRoutingHelper do expect(edit_milestone_path(milestone)).to eq("/#{milestone.project.full_path}/-/milestones/#{milestone.iid}/edit") end end + + context 'snippets' do + let_it_be(:personal_snippet) { create(:personal_snippet) } + let_it_be(:project_snippet) { create(:project_snippet) } + let_it_be(:note) { create(:note_on_personal_snippet, noteable: personal_snippet) } + + describe '#snippet_path' do + it 'returns the personal snippet path' do + expect(snippet_path(personal_snippet)).to eq("/snippets/#{personal_snippet.id}") + end + + it 'returns the project snippet path' do + expect(snippet_path(project_snippet)).to eq("/#{project_snippet.project.full_path}/snippets/#{project_snippet.id}") + end + end + + describe '#snippet_url' do + it 'returns the personal snippet url' do + expect(snippet_url(personal_snippet)).to eq("#{Settings.gitlab['url']}/snippets/#{personal_snippet.id}") + end + + it 'returns the project snippet url' do + expect(snippet_url(project_snippet)).to eq("#{Settings.gitlab['url']}/#{project_snippet.project.full_path}/snippets/#{project_snippet.id}") + end + end + + describe '#raw_snippet_path' do + it 'returns the raw personal snippet path' do + expect(raw_snippet_path(personal_snippet)).to eq("/snippets/#{personal_snippet.id}/raw") + end + + it 'returns the raw project snippet path' do + expect(raw_snippet_path(project_snippet)).to eq("/#{project_snippet.project.full_path}/snippets/#{project_snippet.id}/raw") + end + end + + describe '#raw_snippet_url' do + it 'returns the raw personal snippet url' do + expect(raw_snippet_url(personal_snippet)).to eq("#{Settings.gitlab['url']}/snippets/#{personal_snippet.id}/raw") + end + + it 'returns the raw project snippet url' do + expect(raw_snippet_url(project_snippet)).to eq("#{Settings.gitlab['url']}/#{project_snippet.project.full_path}/snippets/#{project_snippet.id}/raw") + end + end + + describe '#snippet_notes_path' do + it 'returns the notes path for the personal snippet' do + expect(snippet_notes_path(personal_snippet)).to eq("/snippets/#{personal_snippet.id}/notes") + end + end + + describe '#snippet_notes_url' do + it 'returns the notes url for the personal snippet' do + expect(snippet_notes_url(personal_snippet)).to eq("#{Settings.gitlab['url']}/snippets/#{personal_snippet.id}/notes") + end + end + + describe '#snippet_note_path' do + it 'returns the note path for the personal snippet' do + expect(snippet_note_path(personal_snippet, note)).to eq("/snippets/#{personal_snippet.id}/notes/#{note.id}") + end + end + + describe '#snippet_note_url' do + it 'returns the note url for the personal snippet' do + expect(snippet_note_url(personal_snippet, note)).to eq("#{Settings.gitlab['url']}/snippets/#{personal_snippet.id}/notes/#{note.id}") + end + end + + describe '#toggle_award_emoji_snippet_note_path' do + it 'returns the note award emoji path for the personal snippet' do + expect(toggle_award_emoji_snippet_note_path(personal_snippet, note)).to eq("/snippets/#{personal_snippet.id}/notes/#{note.id}/toggle_award_emoji") + end + end + + describe '#toggle_award_emoji_snippet_note_url' do + it 'returns the note award emoji url for the personal snippet' do + expect(toggle_award_emoji_snippet_note_url(personal_snippet, note)).to eq("#{Settings.gitlab['url']}/snippets/#{personal_snippet.id}/notes/#{note.id}/toggle_award_emoji") + end + end + + describe '#toggle_award_emoji_snippet_path' do + it 'returns the award emoji path for the personal snippet' do + expect(toggle_award_emoji_snippet_path(personal_snippet)).to eq("/snippets/#{personal_snippet.id}/toggle_award_emoji") + end + end + + describe '#toggle_award_emoji_snippet_url' do + it 'returns the award url for the personal snippet' do + expect(toggle_award_emoji_snippet_url(personal_snippet)).to eq("#{Settings.gitlab['url']}/snippets/#{personal_snippet.id}/toggle_award_emoji") + end + end + end end diff --git a/spec/helpers/snippets_helper_spec.rb b/spec/helpers/snippets_helper_spec.rb index d88e151a11c..2a24950502b 100644 --- a/spec/helpers/snippets_helper_spec.rb +++ b/spec/helpers/snippets_helper_spec.rb @@ -9,107 +9,18 @@ describe SnippetsHelper do let_it_be(:public_personal_snippet) { create(:personal_snippet, :public) } let_it_be(:public_project_snippet) { create(:project_snippet, :public) } - describe '#reliable_snippet_path' do - subject { reliable_snippet_path(snippet) } - - context 'personal snippets' do - let(:snippet) { public_personal_snippet } - - context 'public' do - it 'returns a full path' do - expect(subject).to eq("/snippets/#{snippet.id}") - end - end - end - - context 'project snippets' do - let(:snippet) { public_project_snippet } - - it 'returns a full path' do - expect(subject).to eq("/#{snippet.project.full_path}/snippets/#{snippet.id}") - end - end - end - - describe '#reliable_snippet_url' do - subject { reliable_snippet_url(snippet) } - - context 'personal snippets' do - let(:snippet) { public_personal_snippet } - - context 'public' do - it 'returns a full url' do - expect(subject).to eq("http://test.host/snippets/#{snippet.id}") - end - end - end - - context 'project snippets' do - let(:snippet) { public_project_snippet } - - it 'returns a full url' do - expect(subject).to eq("http://test.host/#{snippet.project.full_path}/snippets/#{snippet.id}") - end - end - end - - describe '#reliable_raw_snippet_path' do - subject { reliable_raw_snippet_path(snippet) } - - context 'personal snippets' do - let(:snippet) { public_personal_snippet } - - context 'public' do - it 'returns a full path' do - expect(subject).to eq("/snippets/#{snippet.id}/raw") - end - end - end - - context 'project snippets' do - let(:snippet) { public_project_snippet } - - it 'returns a full path' do - expect(subject).to eq("/#{snippet.project.full_path}/snippets/#{snippet.id}/raw") - end - end - end - - describe '#reliable_raw_snippet_url' do - subject { reliable_raw_snippet_url(snippet) } - - context 'personal snippets' do - let(:snippet) { public_personal_snippet } - - context 'public' do - it 'returns a full url' do - expect(subject).to eq("http://test.host/snippets/#{snippet.id}/raw") - end - end - end - - context 'project snippets' do - let(:snippet) { public_project_snippet } - - it 'returns a full url' do - expect(subject).to eq("http://test.host/#{snippet.project.full_path}/snippets/#{snippet.id}/raw") - end - end - end - describe '#embedded_raw_snippet_button' do subject { embedded_raw_snippet_button.to_s } it 'returns view raw button of embedded snippets for personal snippets' do @snippet = create(:personal_snippet, :public) - - expect(subject).to eq(download_link("http://test.host/snippets/#{@snippet.id}/raw")) + expect(subject).to eq(download_link("#{Settings.gitlab['url']}/snippets/#{@snippet.id}/raw")) end it 'returns view raw button of embedded snippets for project snippets' do @snippet = create(:project_snippet, :public) - expect(subject).to eq(download_link("http://test.host/#{@snippet.project.path_with_namespace}/snippets/#{@snippet.id}/raw")) + expect(subject).to eq(download_link("#{Settings.gitlab['url']}/#{@snippet.project.path_with_namespace}/snippets/#{@snippet.id}/raw")) end def download_link(url) @@ -123,13 +34,13 @@ describe SnippetsHelper do it 'returns download button of embedded snippets for personal snippets' do @snippet = create(:personal_snippet, :public) - expect(subject).to eq(download_link("http://test.host/snippets/#{@snippet.id}/raw")) + expect(subject).to eq(download_link("#{Settings.gitlab['url']}/snippets/#{@snippet.id}/raw")) end it 'returns download button of embedded snippets for project snippets' do @snippet = create(:project_snippet, :public) - expect(subject).to eq(download_link("http://test.host/#{@snippet.project.path_with_namespace}/snippets/#{@snippet.id}/raw")) + expect(subject).to eq(download_link("#{Settings.gitlab['url']}/#{@snippet.project.path_with_namespace}/snippets/#{@snippet.id}/raw")) end def download_link(url) @@ -145,7 +56,7 @@ describe SnippetsHelper do context 'public' do it 'returns a script tag with the snippet full url' do - expect(subject).to eq(script_embed("http://test.host/snippets/#{snippet.id}")) + expect(subject).to eq(script_embed("#{Settings.gitlab['url']}/snippets/#{snippet.id}")) end end end @@ -154,7 +65,7 @@ describe SnippetsHelper do let(:snippet) { public_project_snippet } it 'returns a script tag with the snippet full url' do - expect(subject).to eq(script_embed("http://test.host/#{snippet.project.path_with_namespace}/snippets/#{snippet.id}")) + expect(subject).to eq(script_embed("#{Settings.gitlab['url']}/#{snippet.project.path_with_namespace}/snippets/#{snippet.id}")) end end diff --git a/spec/javascripts/boards/board_list_common_spec.js b/spec/javascripts/boards/board_list_common_spec.js index ada7589b795..a92d885790d 100644 --- a/spec/javascripts/boards/board_list_common_spec.js +++ b/spec/javascripts/boards/board_list_common_spec.js @@ -9,7 +9,7 @@ import BoardList from '~/boards/components/board_list.vue'; import '~/boards/models/issue'; import '~/boards/models/list'; -import { listObj, boardsMockInterceptor, mockBoardService } from './mock_data'; +import { listObj, boardsMockInterceptor } from './mock_data'; import store from '~/boards/stores'; import boardsStore from '~/boards/stores/boards_store'; @@ -26,7 +26,6 @@ export default function createComponent({ document.body.appendChild(el); const mock = new MockAdapter(axios); mock.onAny().reply(boardsMockInterceptor); - gl.boardService = mockBoardService(); boardsStore.create(); const BoardListComp = Vue.extend(BoardList); diff --git a/spec/javascripts/boards/board_new_issue_spec.js b/spec/javascripts/boards/board_new_issue_spec.js index 76675a78db2..8e4093cc25c 100644 --- a/spec/javascripts/boards/board_new_issue_spec.js +++ b/spec/javascripts/boards/board_new_issue_spec.js @@ -7,7 +7,7 @@ import boardNewIssue from '~/boards/components/board_new_issue.vue'; import boardsStore from '~/boards/stores/boards_store'; import '~/boards/models/list'; -import { listObj, boardsMockInterceptor, mockBoardService } from './mock_data'; +import { listObj, boardsMockInterceptor } from './mock_data'; describe('Issue boards new issue form', () => { let vm; @@ -36,7 +36,6 @@ describe('Issue boards new issue form', () => { mock = new MockAdapter(axios); mock.onAny().reply(boardsMockInterceptor); - gl.boardService = mockBoardService(); boardsStore.create(); list = new List(listObj); diff --git a/spec/javascripts/monitoring/charts/column_spec.js b/spec/javascripts/monitoring/charts/column_spec.js index 27b3d435f08..9676617e8e1 100644 --- a/spec/javascripts/monitoring/charts/column_spec.js +++ b/spec/javascripts/monitoring/charts/column_spec.js @@ -11,7 +11,7 @@ describe('Column component', () => { columnChart = shallowMount(localVue.extend(ColumnChart), { propsData: { graphData: { - queries: [ + metrics: [ { x_label: 'Time', y_label: 'Usage', diff --git a/spec/javascripts/monitoring/mock_data.js b/spec/javascripts/monitoring/mock_data.js index f9cc839bde6..f59c4ee4264 100644 --- a/spec/javascripts/monitoring/mock_data.js +++ b/spec/javascripts/monitoring/mock_data.js @@ -105,22 +105,11 @@ export const graphDataPrometheusQuery = { metrics: [ { id: 'metric_a1', - metric_id: 2, + metricId: '2', query: 'max(go_memstats_alloc_bytes{job="prometheus"}) by (job) /1024/1024', unit: 'MB', label: 'Total Consumption', - prometheus_endpoint_path: - '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024', - }, - ], - queries: [ - { - metricId: null, - id: 'metric_a1', metric_id: 2, - query: 'max(go_memstats_alloc_bytes{job="prometheus"}) by (job) /1024/1024', - unit: 'MB', - label: 'Total Consumption', prometheus_endpoint_path: '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024', result: [ @@ -140,24 +129,12 @@ export const graphDataPrometheusQueryRange = { metrics: [ { id: 'metric_a1', - metric_id: 2, + metricId: '2', query_range: 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024', unit: 'MB', label: 'Total Consumption', - prometheus_endpoint_path: - '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024', - }, - ], - queries: [ - { - metricId: '10', - id: 'metric_a1', metric_id: 2, - query_range: - 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024', - unit: 'MB', - label: 'Total Consumption', prometheus_endpoint_path: '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024', result: [ @@ -176,8 +153,7 @@ export const graphDataPrometheusQueryRangeMultiTrack = { weight: 3, x_label: 'Status Code', y_label: 'Time', - metrics: [], - queries: [ + metrics: [ { metricId: '1', id: 'response_metrics_nginx_ingress_throughput_status_code', diff --git a/spec/javascripts/monitoring/utils_spec.js b/spec/javascripts/monitoring/utils_spec.js index 202b4ec8f2e..3459b44c7ec 100644 --- a/spec/javascripts/monitoring/utils_spec.js +++ b/spec/javascripts/monitoring/utils_spec.js @@ -314,32 +314,32 @@ describe('isDateTimePickerInputValid', () => { }); describe('graphDataValidatorForAnomalyValues', () => { - let oneQuery; - let threeQueries; - let fourQueries; + let oneMetric; + let threeMetrics; + let fourMetrics; beforeEach(() => { - oneQuery = graphDataPrometheusQuery; - threeQueries = anomalyMockGraphData; + oneMetric = graphDataPrometheusQuery; + threeMetrics = anomalyMockGraphData; - const queries = [...threeQueries.queries]; - queries.push(threeQueries.queries[0]); - fourQueries = { + const metrics = [...threeMetrics.metrics]; + metrics.push(threeMetrics.metrics[0]); + fourMetrics = { ...anomalyMockGraphData, - queries, + metrics, }; }); /* - * Anomaly charts can accept results for exactly 3 queries, + * Anomaly charts can accept results for exactly 3 metrics, */ it('validates passes with the right query format', () => { - expect(graphDataValidatorForAnomalyValues(threeQueries)).toBe(true); + expect(graphDataValidatorForAnomalyValues(threeMetrics)).toBe(true); }); it('validation fails for wrong format, 1 metric', () => { - expect(graphDataValidatorForAnomalyValues(oneQuery)).toBe(false); + expect(graphDataValidatorForAnomalyValues(oneMetric)).toBe(false); }); it('validation fails for wrong format, more than 3 metrics', () => { - expect(graphDataValidatorForAnomalyValues(fourQueries)).toBe(false); + expect(graphDataValidatorForAnomalyValues(fourMetrics)).toBe(false); }); }); diff --git a/spec/javascripts/vue_shared/components/clipboard_button_spec.js b/spec/javascripts/vue_shared/components/clipboard_button_spec.js deleted file mode 100644 index 29a76574b89..00000000000 --- a/spec/javascripts/vue_shared/components/clipboard_button_spec.js +++ /dev/null @@ -1,51 +0,0 @@ -import Vue from 'vue'; -import clipboardButton from '~/vue_shared/components/clipboard_button.vue'; -import mountComponent from 'spec/helpers/vue_mount_component_helper'; - -describe('clipboard button', () => { - const Component = Vue.extend(clipboardButton); - let vm; - - afterEach(() => { - vm.$destroy(); - }); - - describe('without gfm', () => { - beforeEach(() => { - vm = mountComponent(Component, { - text: 'copy me', - title: 'Copy this value', - cssClass: 'btn-danger', - }); - }); - - it('renders a button for clipboard', () => { - expect(vm.$el.tagName).toEqual('BUTTON'); - expect(vm.$el.getAttribute('data-clipboard-text')).toEqual('copy me'); - expect(vm.$el).toHaveSpriteIcon('duplicate'); - }); - - it('should have a tooltip with default values', () => { - expect(vm.$el.getAttribute('data-original-title')).toEqual('Copy this value'); - }); - - it('should render provided classname', () => { - expect(vm.$el.classList).toContain('btn-danger'); - }); - }); - - describe('with gfm', () => { - it('sets data-clipboard-text with gfm', () => { - vm = mountComponent(Component, { - text: 'copy me', - gfm: '`path/to/file`', - title: 'Copy this value', - cssClass: 'btn-danger', - }); - - expect(vm.$el.getAttribute('data-clipboard-text')).toEqual( - '{"text":"copy me","gfm":"`path/to/file`"}', - ); - }); - }); -}); diff --git a/spec/javascripts/vue_shared/components/user_avatar/user_avatar_link_spec.js b/spec/javascripts/vue_shared/components/user_avatar/user_avatar_link_spec.js deleted file mode 100644 index 80aa75847ae..00000000000 --- a/spec/javascripts/vue_shared/components/user_avatar/user_avatar_link_spec.js +++ /dev/null @@ -1,109 +0,0 @@ -import _ from 'underscore'; -import Vue from 'vue'; -import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue'; -import { TEST_HOST } from 'spec/test_constants'; - -describe('User Avatar Link Component', function() { - beforeEach(function() { - this.propsData = { - linkHref: `${TEST_HOST}/myavatarurl.com`, - imgSize: 99, - imgSrc: `${TEST_HOST}/myavatarurl.com`, - imgAlt: 'mydisplayname', - imgCssClasses: 'myextraavatarclass', - tooltipText: 'tooltip text', - tooltipPlacement: 'bottom', - username: 'username', - }; - - const UserAvatarLinkComponent = Vue.extend(UserAvatarLink); - - this.userAvatarLink = new UserAvatarLinkComponent({ - propsData: this.propsData, - }).$mount(); - - [this.userAvatarImage] = this.userAvatarLink.$children; - }); - - it('should return a defined Vue component', function() { - expect(this.userAvatarLink).toBeDefined(); - }); - - it('should have user-avatar-image registered as child component', function() { - expect(this.userAvatarLink.$options.components.userAvatarImage).toBeDefined(); - }); - - it('user-avatar-link should have user-avatar-image as child component', function() { - expect(this.userAvatarImage).toBeDefined(); - }); - - it('should render <a> as a child element', function() { - const link = this.userAvatarLink.$el; - - expect(link.tagName).toBe('A'); - expect(link.href).toBe(this.propsData.linkHref); - }); - - it('renders imgSrc with imgSize as image', function() { - const { imgSrc, imgSize } = this.propsData; - const image = this.userAvatarLink.$el.querySelector('img'); - - expect(image).not.toBeNull(); - expect(image.src).toBe(`${imgSrc}?width=${imgSize}`); - }); - - it('should return necessary props as defined', function() { - _.each(this.propsData, (val, key) => { - expect(this.userAvatarLink[key]).toBeDefined(); - }); - }); - - describe('no username', function() { - beforeEach(function(done) { - this.userAvatarLink.username = ''; - - Vue.nextTick(done); - }); - - it('should only render image tag in link', function() { - const childElements = this.userAvatarLink.$el.childNodes; - - expect(this.userAvatarLink.$el.querySelector('img')).not.toBe('null'); - - // Vue will render the hidden component as <!----> - expect(childElements[1].tagName).toBeUndefined(); - }); - - it('should render avatar image tooltip', function() { - expect(this.userAvatarLink.shouldShowUsername).toBe(false); - expect(this.userAvatarLink.avatarTooltipText).toEqual(this.propsData.tooltipText); - }); - }); - - describe('username', function() { - it('should not render avatar image tooltip', function() { - expect(this.userAvatarLink.$el.querySelector('.js-user-avatar-image-toolip')).toBeNull(); - }); - - it('should render username prop in <span>', function() { - expect( - this.userAvatarLink.$el.querySelector('.js-user-avatar-link-username').innerText.trim(), - ).toEqual(this.propsData.username); - }); - - it('should render text tooltip for <span>', function() { - expect( - this.userAvatarLink.$el.querySelector('.js-user-avatar-link-username').dataset - .originalTitle, - ).toEqual(this.propsData.tooltipText); - }); - - it('should render text tooltip placement for <span>', function() { - expect( - this.userAvatarLink.$el - .querySelector('.js-user-avatar-link-username') - .getAttribute('tooltip-placement'), - ).toEqual(this.propsData.tooltipPlacement); - }); - }); -}); diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb index 8fcd4eb3c21..e25ce4df4aa 100644 --- a/spec/lib/gitlab/etag_caching/router_spec.rb +++ b/spec/lib/gitlab/etag_caching/router_spec.rb @@ -12,6 +12,15 @@ describe Gitlab::EtagCaching::Router do expect(result.name).to eq 'issue_notes' end + it 'matches MR notes endpoint' do + result = described_class.match( + '/my-group/and-subgroup/here-comes-the-project/noteable/merge_request/1/notes' + ) + + expect(result).to be_present + expect(result.name).to eq 'merge_request_notes' + end + it 'matches issue title endpoint' do result = described_class.match( '/my-group/my-project/issues/123/realtime_changes' diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 5612b0dc270..64d1a98ae71 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -365,6 +365,7 @@ project: - root_of_fork_network - fork_network_member - fork_network +- fork_network_projects - custom_attributes - lfs_file_locks - project_badges @@ -434,6 +435,7 @@ project: - upstream_project_subscriptions - downstream_project_subscriptions - service_desk_setting +- import_failures award_emoji: - awardable - user diff --git a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb index 2d8a603172d..d0e5ca2dde3 100644 --- a/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project_tree_restorer_spec.rb @@ -362,7 +362,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do expect(restored_project_json).to eq(true) end - it_behaves_like 'restores project correctly', + it_behaves_like 'restores project successfully', issues: 1, labels: 2, label_with_priorities: 'A project label', @@ -375,7 +375,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do create(:ci_build, token: 'abcd') end - it_behaves_like 'restores project correctly', + it_behaves_like 'restores project successfully', issues: 1, labels: 2, label_with_priorities: 'A project label', @@ -452,7 +452,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do expect(restored_project_json).to eq(true) end - it_behaves_like 'restores project correctly', + it_behaves_like 'restores project successfully', issues: 2, labels: 2, label_with_priorities: 'A project label', @@ -633,4 +633,46 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do end end end + + context 'JSON with invalid records' do + let(:user) { create(:user) } + let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') } + let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) } + let(:restored_project_json) { project_tree_restorer.restore } + + context 'when some failures occur' do + context 'because a relation fails to be processed' do + let(:correlation_id) { 'my-correlation-id' } + + before do + setup_import_export_config('with_invalid_records') + + Labkit::Correlation::CorrelationId.use_id(correlation_id) do + expect(restored_project_json).to eq(true) + end + end + + it_behaves_like 'restores project successfully', + issues: 0, + labels: 0, + label_with_priorities: nil, + milestones: 1, + first_issue_labels: 0, + services: 0, + import_failures: 1 + + it 'records the failures in the database' do + import_failure = ImportFailure.last + + expect(import_failure.project_id).to eq(project.id) + expect(import_failure.relation_key).to eq('milestones') + expect(import_failure.relation_index).to be_present + expect(import_failure.exception_class).to eq('ActiveRecord::RecordInvalid') + expect(import_failure.exception_message).to be_present + expect(import_failure.correlation_id_value).to eq('my-correlation-id') + expect(import_failure.created_at).to be_present + end + end + end + end end diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb index 08d3c638f9e..0aab02b6c4c 100644 --- a/spec/lib/gitlab/url_builder_spec.rb +++ b/spec/lib/gitlab/url_builder_spec.rb @@ -59,6 +59,26 @@ describe Gitlab::UrlBuilder do end end + context 'when passing a ProjectSnippet' do + it 'returns a proper URL' do + project_snippet = create(:project_snippet) + + url = described_class.build(project_snippet) + + expect(url).to eq "#{Settings.gitlab['url']}/#{project_snippet.project.full_path}/snippets/#{project_snippet.id}" + end + end + + context 'when passing a PersonalSnippet' do + it 'returns a proper URL' do + personal_snippet = create(:personal_snippet) + + url = described_class.build(personal_snippet) + + expect(url).to eq "#{Settings.gitlab['url']}/snippets/#{personal_snippet.id}" + end + end + context 'when passing a Note' do context 'on a Commit' do it 'returns a proper URL' do diff --git a/spec/lib/gitlab/visibility_level_spec.rb b/spec/lib/gitlab/visibility_level_spec.rb index 75dc7d8e6d1..16a05af2216 100644 --- a/spec/lib/gitlab/visibility_level_spec.rb +++ b/spec/lib/gitlab/visibility_level_spec.rb @@ -95,4 +95,28 @@ describe Gitlab::VisibilityLevel do expect(described_class.valid_level?(described_class::PUBLIC)).to be_truthy end end + + describe '#visibility_level_decreased?' do + let(:project) { create(:project, :internal) } + + context 'when visibility level decreases' do + before do + project.update!(visibility_level: described_class::PRIVATE) + end + + it 'returns true' do + expect(project.visibility_level_decreased?).to be(true) + end + end + + context 'when visibility level does not decrease' do + before do + project.update!(visibility_level: described_class::PUBLIC) + end + + it 'returns false' do + expect(project.visibility_level_decreased?).to be(false) + end + end + end end diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index 79ea29c84a4..f3d270cdf7e 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -33,6 +33,21 @@ describe MergeRequest do end end + describe '.from_and_to_forks' do + it 'returns only MRs from and to forks (with no internal MRs)' do + project = create(:project) + fork = fork_project(project) + fork_2 = fork_project(project) + mr_from_fork = create(:merge_request, source_project: fork, target_project: project) + mr_to_fork = create(:merge_request, source_project: project, target_project: fork) + + create(:merge_request, source_project: fork, target_project: fork_2) + create(:merge_request, source_project: project, target_project: project) + + expect(described_class.from_and_to_forks(project)).to contain_exactly(mr_from_fork, mr_to_fork) + end + end + describe 'locking' do using RSpec::Parameterized::TableSyntax diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb index 74f2fc1bb61..a6d9ecaa7c5 100644 --- a/spec/models/note_spec.rb +++ b/spec/models/note_spec.rb @@ -1048,20 +1048,20 @@ describe Note do describe 'expiring ETag cache' do let(:note) { build(:note_on_issue) } - def expect_expiration(note) + def expect_expiration(noteable) expect_any_instance_of(Gitlab::EtagCaching::Store) .to receive(:touch) - .with("/#{note.project.namespace.to_param}/#{note.project.to_param}/noteable/issue/#{note.noteable.id}/notes") + .with("/#{noteable.project.namespace.to_param}/#{noteable.project.to_param}/noteable/#{noteable.class.name.underscore}/#{noteable.id}/notes") end it "expires cache for note's issue when note is saved" do - expect_expiration(note) + expect_expiration(note.noteable) note.save! end it "expires cache for note's issue when note is destroyed" do - expect_expiration(note) + expect_expiration(note.noteable) note.destroy! end @@ -1076,28 +1076,54 @@ describe Note do end end - describe '#with_notes_filter' do - let!(:comment) { create(:note) } - let!(:system_note) { create(:note, system: true) } + context 'for merge requests' do + let_it_be(:merge_request) { create(:merge_request) } - context 'when notes filter is nil' do - subject { described_class.with_notes_filter(nil) } + context 'when adding a note to the MR' do + let(:note) { build(:note, noteable: merge_request, project: merge_request.project) } - it { is_expected.to include(comment, system_note) } + it 'expires the MR note etag cache' do + expect_expiration(merge_request) + + note.save! + end end - context 'when notes filter is set to all notes' do - subject { described_class.with_notes_filter(UserPreference::NOTES_FILTERS[:all_notes]) } + context 'when adding a note to a commit on the MR' do + let(:note) { build(:note_on_commit, commit_id: merge_request.commits.first.id, project: merge_request.project) } - it { is_expected.to include(comment, system_note) } + it 'expires the MR note etag cache' do + expect_expiration(merge_request) + + note.save! + end end + end + end - context 'when notes filter is set to only comments' do - subject { described_class.with_notes_filter(UserPreference::NOTES_FILTERS[:only_comments]) } + describe '#with_notes_filter' do + let!(:comment) { create(:note) } + let!(:system_note) { create(:note, system: true) } - it { is_expected.to include(comment) } - it { is_expected.not_to include(system_note) } - end + subject { described_class.with_notes_filter(filter) } + + context 'when notes filter is nil' do + let(:filter) { nil } + + it { is_expected.to include(comment, system_note) } + end + + context 'when notes filter is set to all notes' do + let(:filter) { UserPreference::NOTES_FILTERS[:all_notes] } + + it { is_expected.to include(comment, system_note) } + end + + context 'when notes filter is set to only comments' do + let(:filter) { UserPreference::NOTES_FILTERS[:only_comments] } + + it { is_expected.to include(comment) } + it { is_expected.not_to include(system_note) } end end diff --git a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb index f200c636aac..a772b911d8a 100644 --- a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb +++ b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb @@ -175,3 +175,64 @@ describe Metrics::Dashboard::GrafanaMetricEmbedService do end end end + +describe Metrics::Dashboard::GrafanaUidParser do + let_it_be(:grafana_integration) { create(:grafana_integration) } + let_it_be(:project) { grafana_integration.project } + + subject { described_class.new(grafana_url, project).parse } + + context 'with a Grafana-defined uid' do + let(:grafana_url) { grafana_integration.grafana_url + '/d/XDaNK6amz/?panelId=1' } + + it { is_expected.to eq 'XDaNK6amz' } + end + + context 'with a user-defined uid' do + let(:grafana_url) { grafana_integration.grafana_url + '/d/pgbouncer-main/pgbouncer-overview?panelId=1' } + + it { is_expected.to eq 'pgbouncer-main' } + end + + context 'when a uid is not present' do + let(:grafana_url) { grafana_integration.grafana_url } + + it { is_expected.to be nil } + end + + context 'when the url starts with unrelated content' do + let(:grafana_url) { 'js:' + grafana_integration.grafana_url } + + it { is_expected.to be nil } + end +end + +describe Metrics::Dashboard::DatasourceNameParser do + include GrafanaApiHelpers + + let(:grafana_url) { valid_grafana_dashboard_link('https://gitlab.grafana.net') } + let(:grafana_dashboard) { JSON.parse(fixture_file('grafana/dashboard_response.json'), symbolize_names: true) } + + subject { described_class.new(grafana_url, grafana_dashboard).parse } + + it { is_expected.to eq 'GitLab Omnibus' } + + context 'when the panelId is missing from the url' do + let(:grafana_url) { 'https:/gitlab.grafana.net/d/jbdbks/' } + + it { is_expected.to be nil } + end + + context 'when the panel is not present' do + # We're looking for panelId of 8, but only 6 is present + let(:grafana_dashboard) { { dashboard: { panels: [{ id: 6 }] } } } + + it { is_expected.to be nil } + end + + context 'when the dashboard panel does not have a datasource' do + let(:grafana_dashboard) { { dashboard: { panels: [{ id: 8 }] } } } + + it { is_expected.to be nil } + end +end diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb index 642986bb176..d8ba042af35 100644 --- a/spec/services/projects/destroy_service_spec.rb +++ b/spec/services/projects/destroy_service_spec.rb @@ -296,9 +296,12 @@ describe Projects::DestroyService do end context 'as the root of a fork network' do - let!(:fork_network) { create(:fork_network, root_project: project) } + let!(:fork_1) { fork_project(project, user) } + let!(:fork_2) { fork_project(project, user) } it 'updates the fork network with the project name' do + fork_network = project.fork_network + destroy_project(project, user) fork_network.reload diff --git a/spec/services/projects/unlink_fork_service_spec.rb b/spec/services/projects/unlink_fork_service_spec.rb index a1175bf7123..a6bdc69cdca 100644 --- a/spec/services/projects/unlink_fork_service_spec.rb +++ b/spec/services/projects/unlink_fork_service_spec.rb @@ -2,13 +2,13 @@ require 'spec_helper' -describe Projects::UnlinkForkService do +describe Projects::UnlinkForkService, :use_clean_rails_memory_store_caching do include ProjectForksHelper subject { described_class.new(forked_project, user) } let(:project) { create(:project, :public) } - let(:forked_project) { fork_project(project, user) } + let!(:forked_project) { fork_project(project, user) } let(:user) { create(:user) } context 'with opened merge request on the source project' do @@ -86,4 +86,169 @@ describe Projects::UnlinkForkService do expect { subject.execute }.not_to raise_error end end + + context 'when given project is a source of forks' do + let!(:forked_project_2) { fork_project(project, user) } + let!(:fork_of_fork) { fork_project(forked_project, user) } + + subject { described_class.new(project, user) } + + context 'with opened merge requests from fork back to root project' do + let!(:merge_request) { create(:merge_request, source_project: project, target_project: forked_project) } + let!(:merge_request2) { create(:merge_request, source_project: project, target_project: fork_project(project)) } + let!(:merge_request_in_fork) { create(:merge_request, source_project: forked_project, target_project: forked_project) } + + let(:mr_close_service) { MergeRequests::CloseService.new(project, user) } + + before do + allow(MergeRequests::CloseService).to receive(:new) + .with(project, user) + .and_return(mr_close_service) + end + + it 'closes all pending merge requests' do + expect(mr_close_service).to receive(:execute).with(merge_request) + expect(mr_close_service).to receive(:execute).with(merge_request2) + + subject.execute + end + + it 'does not close merge requests that do not come from the project being unlinked' do + expect(mr_close_service).not_to receive(:execute).with(merge_request_in_fork) + + subject.execute + end + end + + it 'removes its link to the fork network and updates direct network members' do + expect(project.fork_network_member).to be_present + expect(project.fork_network).to be_present + expect(project.forked_to_members.count).to eq(2) + expect(forked_project.forked_to_members.count).to eq(1) + expect(fork_of_fork.forked_to_members.count).to eq(0) + + subject.execute + + project.reload + forked_project.reload + fork_of_fork.reload + + expect(project.fork_network_member).to be_nil + expect(project.fork_network).to be_nil + expect(forked_project.fork_network).to have_attributes(root_project_id: nil, + deleted_root_project_name: project.full_name) + expect(project.forked_to_members.count).to eq(0) + expect(forked_project.forked_to_members.count).to eq(1) + expect(fork_of_fork.forked_to_members.count).to eq(0) + end + + it 'refreshes the forks count cache of the given project' do + expect(project.forks_count).to eq(2) + + subject.execute + + expect(project.forks_count).to be_zero + end + + context 'when given project is a fork of an unlinked parent' do + let!(:fork_of_fork) { fork_project(forked_project, user) } + let(:lfs_object) { create(:lfs_object) } + + before do + lfs_object.projects << project + end + + it 'saves lfs objects to the root project' do + # Remove parent from network + described_class.new(forked_project, user).execute + + described_class.new(fork_of_fork, user).execute + + expect(lfs_object.projects).to include(fork_of_fork) + end + end + + context 'and is node with a parent' do + subject { described_class.new(forked_project, user) } + + context 'with opened merge requests from and to given project' do + let!(:mr_from_parent) { create(:merge_request, source_project: project, target_project: forked_project) } + let!(:mr_to_parent) { create(:merge_request, source_project: forked_project, target_project: project) } + let!(:mr_to_child) { create(:merge_request, source_project: forked_project, target_project: fork_of_fork) } + let!(:mr_from_child) { create(:merge_request, source_project: fork_of_fork, target_project: forked_project) } + let!(:merge_request_in_fork) { create(:merge_request, source_project: forked_project, target_project: forked_project) } + + let(:mr_close_service) { MergeRequests::CloseService.new(forked_project, user) } + + before do + allow(MergeRequests::CloseService).to receive(:new) + .with(forked_project, user) + .and_return(mr_close_service) + end + + it 'close all pending merge requests' do + merge_requests = [mr_from_parent, mr_to_parent, mr_from_child, mr_to_child] + + merge_requests.each do |mr| + expect(mr_close_service).to receive(:execute).with(mr).and_call_original + end + + subject.execute + + merge_requests = MergeRequest.where(id: merge_requests) + + expect(merge_requests).to all(have_attributes(state: 'closed')) + end + + it 'does not close merge requests which do not come from the project being unlinked' do + expect(mr_close_service).not_to receive(:execute).with(merge_request_in_fork) + + subject.execute + end + end + + it 'refreshes the forks count cache of the parent and the given project' do + expect(project.forks_count).to eq(2) + expect(forked_project.forks_count).to eq(1) + + subject.execute + + expect(project.forks_count).to eq(1) + expect(forked_project.forks_count).to eq(0) + end + + it 'removes its link to the fork network and updates direct network members' do + expect(project.fork_network).to be_present + expect(forked_project.fork_network).to be_present + expect(fork_of_fork.fork_network).to be_present + + expect(project.forked_to_members.count).to eq(2) + expect(forked_project.forked_to_members.count).to eq(1) + expect(fork_of_fork.forked_to_members.count).to eq(0) + + subject.execute + project.reload + forked_project.reload + fork_of_fork.reload + + expect(project.fork_network).to be_present + expect(forked_project.fork_network).to be_nil + expect(fork_of_fork.fork_network).to be_present + + expect(project.forked_to_members.count).to eq(1) # 1 child is gone + expect(forked_project.forked_to_members.count).to eq(0) + expect(fork_of_fork.forked_to_members.count).to eq(0) + end + end + end + + context 'when given project is not part of a fork network' do + let!(:project_without_forks) { create(:project, :public) } + + subject { described_class.new(project_without_forks, user) } + + it 'does not raise errors' do + expect { subject.execute }.not_to raise_error + end + end end diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb index c848a5397e1..3092fb7116a 100644 --- a/spec/services/projects/update_service_spec.rb +++ b/spec/services/projects/update_service_spec.rb @@ -16,7 +16,14 @@ describe Projects::UpdateService do let(:admin) { create(:admin) } context 'when changing visibility level' do - context 'when visibility_level is INTERNAL' do + def expect_to_call_unlink_fork_service + service = Projects::UnlinkForkService.new(project, user) + + expect(Projects::UnlinkForkService).to receive(:new).with(project, user).and_return(service) + expect(service).to receive(:execute).and_call_original + end + + context 'when visibility_level changes to INTERNAL' do it 'updates the project to internal' do expect(TodosDestroyer::ProjectPrivateWorker).not_to receive(:perform_in) @@ -25,9 +32,21 @@ describe Projects::UpdateService do expect(result).to eq({ status: :success }) expect(project).to be_internal end + + context 'and project is PUBLIC' do + before do + project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC) + end + + it 'unlinks project from fork network' do + expect_to_call_unlink_fork_service + + update_project(project, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL) + end + end end - context 'when visibility_level is PUBLIC' do + context 'when visibility_level changes to PUBLIC' do it 'updates the project to public' do expect(TodosDestroyer::ProjectPrivateWorker).not_to receive(:perform_in) @@ -36,9 +55,17 @@ describe Projects::UpdateService do expect(result).to eq({ status: :success }) expect(project).to be_public end + + context 'and project is PRIVATE' do + it 'does not unlink project from fork network' do + expect(Projects::UnlinkForkService).not_to receive(:new) + + update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PUBLIC) + end + end end - context 'when visibility_level is PRIVATE' do + context 'when visibility_level changes to PRIVATE' do before do project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC) end @@ -52,6 +79,30 @@ describe Projects::UpdateService do expect(result).to eq({ status: :success }) expect(project).to be_private end + + context 'and project is PUBLIC' do + before do + project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC) + end + + it 'unlinks project from fork network' do + expect_to_call_unlink_fork_service + + update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PRIVATE) + end + end + + context 'and project is INTERNAL' do + before do + project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL) + end + + it 'unlinks project from fork network' do + expect_to_call_unlink_fork_service + + update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PRIVATE) + end + end end context 'when visibility levels are restricted to PUBLIC only' do @@ -107,28 +158,48 @@ describe Projects::UpdateService do let(:project) { create(:project, :internal) } let(:forked_project) { fork_project(project) } - it 'updates forks visibility level when parent set to more restrictive' do - opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE } + context 'and unlink forks feature flag is off' do + before do + stub_feature_flags(unlink_fork_network_upon_visibility_decrease: false) + end + + it 'updates forks visibility level when parent set to more restrictive' do + opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE } + + expect(project).to be_internal + expect(forked_project).to be_internal + + expect(update_project(project, admin, opts)).to eq({ status: :success }) + + expect(project).to be_private + expect(forked_project.reload).to be_private + end + + it 'does not update forks visibility level when parent set to less restrictive' do + opts = { visibility_level: Gitlab::VisibilityLevel::PUBLIC } - expect(project).to be_internal - expect(forked_project).to be_internal + expect(project).to be_internal + expect(forked_project).to be_internal - expect(update_project(project, admin, opts)).to eq({ status: :success }) + expect(update_project(project, admin, opts)).to eq({ status: :success }) - expect(project).to be_private - expect(forked_project.reload).to be_private + expect(project).to be_public + expect(forked_project.reload).to be_internal + end end - it 'does not update forks visibility level when parent set to less restrictive' do - opts = { visibility_level: Gitlab::VisibilityLevel::PUBLIC } + context 'and unlink forks feature flag is on' do + it 'does not change visibility of forks' do + opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE } - expect(project).to be_internal - expect(forked_project).to be_internal + expect(project).to be_internal + expect(forked_project).to be_internal - expect(update_project(project, admin, opts)).to eq({ status: :success }) + expect(update_project(project, admin, opts)).to eq({ status: :success }) - expect(project).to be_public - expect(forked_project.reload).to be_internal + expect(project).to be_private + expect(forked_project.reload).to be_internal + end end end diff --git a/spec/support/shared_examples/lib/gitlab/import_export/project_tree_restorer_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import_export/project_tree_restorer_shared_examples.rb index f26a8554055..2fee58a9f30 100644 --- a/spec/support/shared_examples/lib/gitlab/import_export/project_tree_restorer_shared_examples.rb +++ b/spec/support/shared_examples/lib/gitlab/import_export/project_tree_restorer_shared_examples.rb @@ -2,7 +2,7 @@ # Shared examples for ProjectTreeRestorer (shared to allow the testing # of EE-specific features) -RSpec.shared_examples 'restores project correctly' do |**results| +RSpec.shared_examples 'restores project successfully' do |**results| it 'restores the project' do expect(shared.errors).to be_empty expect(restored_project_json).to be_truthy @@ -34,4 +34,8 @@ RSpec.shared_examples 'restores project correctly' do |**results| expect(project.import_type).to be_nil expect(project.creator_id).not_to eq 123 end + + it 'records exact number of import failures' do + expect(project.import_failures.size).to eq(results.fetch(:import_failures, 0)) + end end |