summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab-ci.yml6
-rw-r--r--app/assets/javascripts/feature_highlight/feature_highlight.js61
-rw-r--r--app/assets/javascripts/feature_highlight/feature_highlight_helper.js57
-rw-r--r--app/assets/javascripts/feature_highlight/feature_highlight_options.js12
-rw-r--r--app/assets/javascripts/lib/utils/pretty_time.js107
-rw-r--r--app/assets/javascripts/main.js1
-rw-r--r--app/assets/javascripts/new_sidebar.js11
-rw-r--r--app/assets/javascripts/sidebar/components/time_tracking/collapsed_state.js5
-rw-r--r--app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.js17
-rw-r--r--app/assets/stylesheets/framework.scss1
-rw-r--r--app/assets/stylesheets/framework/buttons.scss17
-rw-r--r--app/assets/stylesheets/framework/feature_highlight.scss94
-rw-r--r--app/assets/stylesheets/new_sidebar.scss6
-rw-r--r--app/controllers/projects/uploads_controller.rb2
-rw-r--r--app/finders/groups_finder.rb2
-rw-r--r--app/models/ci/pipeline.rb3
-rw-r--r--app/models/environment.rb4
-rw-r--r--app/models/repository.rb94
-rw-r--r--app/services/commits/change_service.rb6
-rw-r--r--app/uploaders/avatar_uploader.rb2
-rw-r--r--app/uploaders/gitlab_uploader.rb2
-rw-r--r--app/views/feature_highlight/_issue_boards.svg98
-rw-r--r--app/views/layouts/nav/sidebar/_project.html.haml14
-rw-r--r--app/views/projects/empty.html.haml2
-rw-r--r--app/views/shared/icons/_thumbs_up.svg1
-rw-r--r--changelogs/unreleased/37890-auto-devops-banner-is-not-shown-when-the-repository-is-empty-new-project.yml5
-rw-r--r--changelogs/unreleased/do-not-perform-disk-check.yml5
-rw-r--r--changelogs/unreleased/reoganize-deployment-indexes.yml5
-rw-r--r--config/initializers/postgresql_opclasses_support.rb2
-rw-r--r--db/migrate/20170912113435_clean_stages_statuses_migration.rb26
-rw-r--r--db/migrate/20170918222253_reorganize_deployments_indexes.rb28
-rw-r--r--db/migrate/20170918223303_add_deployments_index_for_last_deployment.rb21
-rw-r--r--db/schema.rb7
-rw-r--r--doc/ci/yaml/README.md10
-rw-r--r--lib/gitlab/ci/build/policy.rb15
-rw-r--r--lib/gitlab/ci/build/policy/kubernetes.rb19
-rw-r--r--lib/gitlab/ci/build/policy/refs.rb43
-rw-r--r--lib/gitlab/ci/build/policy/specification.rb25
-rw-r--r--lib/gitlab/ci/yaml_processor.rb120
-rw-r--r--lib/gitlab/git.rb9
-rw-r--r--lib/gitlab/git/commit.rb4
-rw-r--r--lib/gitlab/git/operation_service.rb4
-rw-r--r--lib/gitlab/git/repository.rb85
-rw-r--r--scripts/schema_changed.sh10
-rw-r--r--spec/features/projects/user_edits_files_spec.rb16
-rw-r--r--spec/javascripts/feature_highlight/feature_highlight_helper_spec.js219
-rw-r--r--spec/javascripts/feature_highlight/feature_highlight_options_spec.js45
-rw-r--r--spec/javascripts/feature_highlight/feature_highlight_spec.js122
-rw-r--r--spec/javascripts/pretty_time_spec.js282
-rw-r--r--spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/build/policy/refs_spec.rb87
-rw-r--r--spec/lib/gitlab/ci/build/policy_spec.rb37
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb251
-rw-r--r--spec/migrations/clean_stages_statuses_migration_spec.rb51
-rw-r--r--spec/models/ci/pipeline_spec.rb1
-rw-r--r--spec/models/repository_spec.rb24
-rw-r--r--spec/requests/api/groups_spec.rb3
57 files changed, 920 insertions, 1316 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index cc3c170cb23..375757086a6 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -516,6 +516,12 @@ db:seed_fu-mysql:
<<: *db-seed_fu
<<: *use-mysql
+db:check-schema-pg:
+ <<: *db-migrate-reset
+ <<: *use-pg
+ script:
+ - source scripts/schema_changed.sh
+
# Frontend-related jobs
gitlab:assets:compile:
<<: *dedicated-runner
diff --git a/app/assets/javascripts/feature_highlight/feature_highlight.js b/app/assets/javascripts/feature_highlight/feature_highlight.js
deleted file mode 100644
index 800ca05cd11..00000000000
--- a/app/assets/javascripts/feature_highlight/feature_highlight.js
+++ /dev/null
@@ -1,61 +0,0 @@
-import Cookies from 'js-cookie';
-import _ from 'underscore';
-import {
- getCookieName,
- getSelector,
- hidePopover,
- setupDismissButton,
- mouseenter,
- mouseleave,
-} from './feature_highlight_helper';
-
-export const setupFeatureHighlightPopover = (id, debounceTimeout = 300) => {
- const $selector = $(getSelector(id));
- const $parent = $selector.parent();
- const $popoverContent = $parent.siblings('.feature-highlight-popover-content');
- const hideOnScroll = hidePopover.bind($selector);
- const debouncedMouseleave = _.debounce(mouseleave, debounceTimeout);
-
- $selector
- // Setup popover
- .data('content', $popoverContent.prop('outerHTML'))
- .popover({
- html: true,
- // Override the existing template to add custom CSS classes
- template: `
- <div class="popover feature-highlight-popover" role="tooltip">
- <div class="arrow"></div>
- <div class="popover-content"></div>
- </div>
- `,
- })
- .on('mouseenter', mouseenter)
- .on('mouseleave', debouncedMouseleave)
- .on('inserted.bs.popover', setupDismissButton)
- .on('show.bs.popover', () => {
- window.addEventListener('scroll', hideOnScroll);
- })
- .on('hide.bs.popover', () => {
- window.removeEventListener('scroll', hideOnScroll);
- })
- // Display feature highlight
- .removeAttr('disabled');
-};
-
-export const shouldHighlightFeature = (id) => {
- const element = document.querySelector(getSelector(id));
- const previouslyDismissed = Cookies.get(getCookieName(id)) === 'true';
-
- return element && !previouslyDismissed;
-};
-
-export const highlightFeatures = (highlightOrder) => {
- const featureId = highlightOrder.find(shouldHighlightFeature);
-
- if (featureId) {
- setupFeatureHighlightPopover(featureId);
- return true;
- }
-
- return false;
-};
diff --git a/app/assets/javascripts/feature_highlight/feature_highlight_helper.js b/app/assets/javascripts/feature_highlight/feature_highlight_helper.js
deleted file mode 100644
index 9f741355cd7..00000000000
--- a/app/assets/javascripts/feature_highlight/feature_highlight_helper.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import Cookies from 'js-cookie';
-
-export const getCookieName = cookieId => `feature-highlighted-${cookieId}`;
-export const getSelector = highlightId => `.js-feature-highlight[data-highlight=${highlightId}]`;
-
-export const showPopover = function showPopover() {
- if (this.hasClass('js-popover-show')) {
- return false;
- }
- this.popover('show');
- this.addClass('disable-animation js-popover-show');
-
- return true;
-};
-
-export const hidePopover = function hidePopover() {
- if (!this.hasClass('js-popover-show')) {
- return false;
- }
- this.popover('hide');
- this.removeClass('disable-animation js-popover-show');
-
- return true;
-};
-
-export const dismiss = function dismiss(cookieId) {
- Cookies.set(getCookieName(cookieId), true);
- hidePopover.call(this);
- this.hide();
-};
-
-export const mouseleave = function mouseleave() {
- if (!$('.popover:hover').length > 0) {
- const $featureHighlight = $(this);
- hidePopover.call($featureHighlight);
- }
-};
-
-export const mouseenter = function mouseenter() {
- const $featureHighlight = $(this);
-
- const showedPopover = showPopover.call($featureHighlight);
- if (showedPopover) {
- $('.popover')
- .on('mouseleave', mouseleave.bind($featureHighlight));
- }
-};
-
-export const setupDismissButton = function setupDismissButton() {
- const popoverId = this.getAttribute('aria-describedby');
- const cookieId = this.dataset.highlight;
- const $popover = $(this);
- const dismissWrapper = dismiss.bind($popover, cookieId);
-
- $(`#${popoverId} .dismiss-feature-highlight`)
- .on('click', dismissWrapper);
-};
diff --git a/app/assets/javascripts/feature_highlight/feature_highlight_options.js b/app/assets/javascripts/feature_highlight/feature_highlight_options.js
deleted file mode 100644
index fd48f2e87cc..00000000000
--- a/app/assets/javascripts/feature_highlight/feature_highlight_options.js
+++ /dev/null
@@ -1,12 +0,0 @@
-import { highlightFeatures } from './feature_highlight';
-import bp from '../breakpoints';
-
-const highlightOrder = ['issue-boards'];
-
-export default function domContentLoaded(order) {
- if (bp.getBreakpointSize() === 'lg') {
- highlightFeatures(order);
- }
-}
-
-document.addEventListener('DOMContentLoaded', domContentLoaded.bind(this, highlightOrder));
diff --git a/app/assets/javascripts/lib/utils/pretty_time.js b/app/assets/javascripts/lib/utils/pretty_time.js
index 227bf65b560..b1ffd797f7e 100644
--- a/app/assets/javascripts/lib/utils/pretty_time.js
+++ b/app/assets/javascripts/lib/utils/pretty_time.js
@@ -1,68 +1,61 @@
import _ from 'underscore';
-(() => {
- /*
- * TODO: Make these methods more configurable (e.g. stringifyTime condensed or
- * non-condensed, abbreviateTimelengths)
- * */
-
- const utils = window.gl.utils = gl.utils || {};
- const prettyTime = utils.prettyTime = {
- /*
- * Accepts seconds and returns a timeObject { weeks: #, days: #, hours: #, minutes: # }
- * Seconds can be negative or positive, zero or non-zero. Can be configured for any day
- * or week length.
- */
- parseSeconds(seconds, { daysPerWeek = 5, hoursPerDay = 8 } = {}) {
- const DAYS_PER_WEEK = daysPerWeek;
- const HOURS_PER_DAY = hoursPerDay;
- const MINUTES_PER_HOUR = 60;
- const MINUTES_PER_WEEK = DAYS_PER_WEEK * HOURS_PER_DAY * MINUTES_PER_HOUR;
- const MINUTES_PER_DAY = HOURS_PER_DAY * MINUTES_PER_HOUR;
-
- const timePeriodConstraints = {
- weeks: MINUTES_PER_WEEK,
- days: MINUTES_PER_DAY,
- hours: MINUTES_PER_HOUR,
- minutes: 1,
- };
+/*
+ * TODO: Make these methods more configurable (e.g. stringifyTime condensed or
+ * non-condensed, abbreviateTimelengths)
+ * */
+
+/*
+ * Accepts seconds and returns a timeObject { weeks: #, days: #, hours: #, minutes: # }
+ * Seconds can be negative or positive, zero or non-zero. Can be configured for any day
+ * or week length.
+*/
+
+export function parseSeconds(seconds, { daysPerWeek = 5, hoursPerDay = 8 } = {}) {
+ const DAYS_PER_WEEK = daysPerWeek;
+ const HOURS_PER_DAY = hoursPerDay;
+ const MINUTES_PER_HOUR = 60;
+ const MINUTES_PER_WEEK = DAYS_PER_WEEK * HOURS_PER_DAY * MINUTES_PER_HOUR;
+ const MINUTES_PER_DAY = HOURS_PER_DAY * MINUTES_PER_HOUR;
+
+ const timePeriodConstraints = {
+ weeks: MINUTES_PER_WEEK,
+ days: MINUTES_PER_DAY,
+ hours: MINUTES_PER_HOUR,
+ minutes: 1,
+ };
- let unorderedMinutes = prettyTime.secondsToMinutes(seconds);
+ let unorderedMinutes = Math.abs(seconds / MINUTES_PER_HOUR);
- return _.mapObject(timePeriodConstraints, (minutesPerPeriod) => {
- const periodCount = Math.floor(unorderedMinutes / minutesPerPeriod);
+ return _.mapObject(timePeriodConstraints, (minutesPerPeriod) => {
+ const periodCount = Math.floor(unorderedMinutes / minutesPerPeriod);
- unorderedMinutes -= (periodCount * minutesPerPeriod);
+ unorderedMinutes -= (periodCount * minutesPerPeriod);
- return periodCount;
- });
- },
+ return periodCount;
+ });
+}
- /*
- * Accepts a timeObject and returns a condensed string representation of it
- * (e.g. '1w 2d 3h 1m' or '1h 30m'). Zero value units are not included.
- */
+/*
+* Accepts a timeObject (see parseSeconds) and returns a condensed string representation of it
+* (e.g. '1w 2d 3h 1m' or '1h 30m'). Zero value units are not included.
+*/
- stringifyTime(timeObject) {
- const reducedTime = _.reduce(timeObject, (memo, unitValue, unitName) => {
- const isNonZero = !!unitValue;
- return isNonZero ? `${memo} ${unitValue}${unitName.charAt(0)}` : memo;
- }, '').trim();
- return reducedTime.length ? reducedTime : '0m';
- },
+export function stringifyTime(timeObject) {
+ const reducedTime = _.reduce(timeObject, (memo, unitValue, unitName) => {
+ const isNonZero = !!unitValue;
+ return isNonZero ? `${memo} ${unitValue}${unitName.charAt(0)}` : memo;
+ }, '').trim();
+ return reducedTime.length ? reducedTime : '0m';
+}
- /*
- * Accepts a time string of any size (e.g. '1w 2d 3h 5m' or '1w 2d') and returns
- * the first non-zero unit/value pair.
- */
+/*
+* Accepts a time string of any size (e.g. '1w 2d 3h 5m' or '1w 2d') and returns
+* the first non-zero unit/value pair.
+*/
- abbreviateTime(timeStr) {
- return timeStr.split(' ')
- .filter(unitStr => unitStr.charAt(0) !== '0')[0];
- },
+export function abbreviateTime(timeStr) {
+ return timeStr.split(' ')
+ .filter(unitStr => unitStr.charAt(0) !== '0')[0];
+}
- secondsToMinutes(seconds) {
- return Math.abs(seconds / 60);
- },
- };
-})(window.gl || (window.gl = {}));
diff --git a/app/assets/javascripts/main.js b/app/assets/javascripts/main.js
index 0f84470828a..c2a104df749 100644
--- a/app/assets/javascripts/main.js
+++ b/app/assets/javascripts/main.js
@@ -101,7 +101,6 @@ import './label_manager';
import './labels';
import './labels_select';
import './layout_nav';
-import './feature_highlight/feature_highlight_options';
import LazyLoader from './lazy_loader';
import './line_highlighter';
import './logo';
diff --git a/app/assets/javascripts/new_sidebar.js b/app/assets/javascripts/new_sidebar.js
index f2eb2338a1e..997550b37fb 100644
--- a/app/assets/javascripts/new_sidebar.js
+++ b/app/assets/javascripts/new_sidebar.js
@@ -11,6 +11,7 @@ export default class NewNavSidebar {
initDomElements() {
this.$page = $('.page-with-sidebar');
this.$sidebar = $('.nav-sidebar');
+ this.$innerScroll = $('.nav-sidebar-inner-scroll', this.$sidebar);
this.$overlay = $('.mobile-overlay');
this.$openSidebar = $('.toggle-mobile-nav');
this.$closeSidebar = $('.close-nav-button');
@@ -55,6 +56,16 @@ export default class NewNavSidebar {
this.$page.toggleClass('page-with-icon-sidebar', breakpoint === 'sm' ? true : collapsed);
}
NewNavSidebar.setCollapsedCookie(collapsed);
+
+ this.toggleSidebarOverflow();
+ }
+
+ toggleSidebarOverflow() {
+ if (this.$innerScroll.prop('scrollHeight') > this.$innerScroll.prop('offsetHeight')) {
+ this.$innerScroll.css('overflow-y', 'scroll');
+ } else {
+ this.$innerScroll.css('overflow-y', '');
+ }
}
render() {
diff --git a/app/assets/javascripts/sidebar/components/time_tracking/collapsed_state.js b/app/assets/javascripts/sidebar/components/time_tracking/collapsed_state.js
index 0da265053bd..a9fbc7f1a2f 100644
--- a/app/assets/javascripts/sidebar/components/time_tracking/collapsed_state.js
+++ b/app/assets/javascripts/sidebar/components/time_tracking/collapsed_state.js
@@ -1,6 +1,5 @@
import stopwatchSvg from 'icons/_icon_stopwatch.svg';
-
-import '../../../lib/utils/pretty_time';
+import { abbreviateTime } from '../../../lib/utils/pretty_time';
export default {
name: 'time-tracking-collapsed-state',
@@ -79,7 +78,7 @@ export default {
},
methods: {
abbreviateTime(timeStr) {
- return gl.utils.prettyTime.abbreviateTime(timeStr);
+ return abbreviateTime(timeStr);
},
},
template: `
diff --git a/app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.js b/app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.js
index 40f5c89c5bb..fd0d4570d68 100644
--- a/app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.js
+++ b/app/assets/javascripts/sidebar/components/time_tracking/comparison_pane.js
@@ -1,6 +1,4 @@
-import '../../../lib/utils/pretty_time';
-
-const prettyTime = gl.utils.prettyTime;
+import { parseSeconds, stringifyTime } from '../../../lib/utils/pretty_time';
export default {
name: 'time-tracking-comparison-pane',
@@ -23,12 +21,12 @@ export default {
},
},
computed: {
- parsedRemaining() {
+ parsedTimeRemaining() {
const diffSeconds = this.timeEstimate - this.timeSpent;
- return prettyTime.parseSeconds(diffSeconds);
+ return parseSeconds(diffSeconds);
},
timeRemainingHumanReadable() {
- return prettyTime.stringifyTime(this.parsedRemaining);
+ return stringifyTime(this.parsedTimeRemaining);
},
timeRemainingTooltip() {
const prefix = this.timeRemainingMinutes < 0 ? 'Over by' : 'Time remaining:';
@@ -44,13 +42,6 @@ export default {
timeRemainingStatusClass() {
return this.timeEstimate >= this.timeSpent ? 'within_estimate' : 'over_estimate';
},
- /* Parsed time values */
- parsedEstimate() {
- return prettyTime.parseSeconds(this.timeEstimate);
- },
- parsedSpent() {
- return prettyTime.parseSeconds(this.timeSpent);
- },
},
template: `
<div class="time-tracking-comparison-pane">
diff --git a/app/assets/stylesheets/framework.scss b/app/assets/stylesheets/framework.scss
index 35e7a10379f..923d14f2c3d 100644
--- a/app/assets/stylesheets/framework.scss
+++ b/app/assets/stylesheets/framework.scss
@@ -52,4 +52,3 @@
@import "framework/snippets";
@import "framework/memory_graph";
@import "framework/responsive-tables";
-@import "framework/feature_highlight";
diff --git a/app/assets/stylesheets/framework/buttons.scss b/app/assets/stylesheets/framework/buttons.scss
index 4f208df4216..d178bc17462 100644
--- a/app/assets/stylesheets/framework/buttons.scss
+++ b/app/assets/stylesheets/framework/buttons.scss
@@ -46,15 +46,6 @@
}
}
-@mixin btn-svg {
- svg {
- height: 15px;
- width: 15px;
- position: relative;
- top: 2px;
- }
-}
-
@mixin btn-color($light, $border-light, $normal, $border-normal, $dark, $border-dark, $color) {
background-color: $light;
border-color: $border-light;
@@ -132,7 +123,6 @@
.btn {
@include btn-default;
@include btn-white;
- @include btn-svg;
color: $gl-text-color;
@@ -231,6 +221,13 @@
}
}
+ svg {
+ height: 15px;
+ width: 15px;
+ position: relative;
+ top: 2px;
+ }
+
svg,
.fa {
&:not(:last-child) {
diff --git a/app/assets/stylesheets/framework/feature_highlight.scss b/app/assets/stylesheets/framework/feature_highlight.scss
deleted file mode 100644
index ebae473df50..00000000000
--- a/app/assets/stylesheets/framework/feature_highlight.scss
+++ /dev/null
@@ -1,94 +0,0 @@
-.feature-highlight {
- position: relative;
- margin-left: $gl-padding;
- width: 20px;
- height: 20px;
- cursor: pointer;
-
- &::before {
- content: '';
- display: block;
- position: absolute;
- top: 6px;
- left: 6px;
- width: 8px;
- height: 8px;
- background-color: $blue-500;
- border-radius: 50%;
- box-shadow: 0 0 0 rgba($blue-500, 0.4);
- animation: pulse-highlight 2s infinite;
- }
-
- &:hover::before,
- &.disable-animation::before {
- animation: none;
- }
-
- &[disabled]::before {
- display: none;
- }
-}
-
-.is-showing-fly-out {
- .feature-highlight {
- display: none;
- }
-}
-
-.feature-highlight-popover-content {
- display: none;
-
- hr {
- margin: $gl-padding * 0.5 0;
- }
-
- .btn-link {
- @include btn-svg;
-
- svg path {
- fill: currentColor;
- }
- }
-
- .dismiss-feature-highlight {
- padding: 0;
- }
-
- svg:first-child {
- width: 100%;
- background-color: $indigo-50;
- border-top-left-radius: 2px;
- border-top-right-radius: 2px;
- border-bottom: 1px solid darken($gray-normal, 8%);
- }
-}
-
-.popover .feature-highlight-popover-content {
- display: block;
-}
-
-.feature-highlight-popover {
- padding: 0;
-
- .popover-content {
- padding: 0;
- }
-}
-
-.feature-highlight-popover-sub-content {
- padding: 9px 14px;
-}
-
-@include keyframes(pulse-highlight) {
- 0% {
- box-shadow: 0 0 0 0 rgba($blue-200, 0.4);
- }
-
- 70% {
- box-shadow: 0 0 0 10px transparent;
- }
-
- 100% {
- box-shadow: 0 0 0 0 transparent;
- }
-}
diff --git a/app/assets/stylesheets/new_sidebar.scss b/app/assets/stylesheets/new_sidebar.scss
index 8030854e527..4d5e3d1eceb 100644
--- a/app/assets/stylesheets/new_sidebar.scss
+++ b/app/assets/stylesheets/new_sidebar.scss
@@ -192,7 +192,11 @@ $new-sidebar-collapsed-width: 50px;
.nav-sidebar-inner-scroll {
height: 100%;
width: 100%;
- overflow: scroll;
+ overflow: auto;
+
+ @media (min-width: $screen-sm-min) {
+ overflow: hidden;
+ }
}
.with-performance-bar .nav-sidebar {
diff --git a/app/controllers/projects/uploads_controller.rb b/app/controllers/projects/uploads_controller.rb
index 6966a7c5fee..4d2fb17a19b 100644
--- a/app/controllers/projects/uploads_controller.rb
+++ b/app/controllers/projects/uploads_controller.rb
@@ -28,7 +28,7 @@ class Projects::UploadsController < Projects::ApplicationController
end
def image_or_video?
- uploader && uploader.file.exists? && uploader.image_or_video?
+ uploader && uploader.exists? && uploader.image_or_video?
end
def uploader_class
diff --git a/app/finders/groups_finder.rb b/app/finders/groups_finder.rb
index 88d71b0a87b..0c4c4b10fb6 100644
--- a/app/finders/groups_finder.rb
+++ b/app/finders/groups_finder.rb
@@ -57,7 +57,7 @@ class GroupsFinder < UnionFinder
end
def owned_groups
- current_user&.groups || Group.none
+ current_user&.owned_groups || Group.none
end
def include_public_groups?
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 8d017b9b3b1..acaa028eaa2 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -31,6 +31,7 @@ module Ci
has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id'
delegate :id, to: :project, prefix: true
+ delegate :full_path, to: :project, prefix: true
validates :source, exclusion: { in: %w(unknown), unless: :importing? }, on: :create
validates :sha, presence: { unless: :importing? }
@@ -336,7 +337,7 @@ module Ci
return @config_processor if defined?(@config_processor)
@config_processor ||= begin
- Gitlab::Ci::YamlProcessor.new(ci_yaml_file, project.full_path)
+ Gitlab::Ci::YamlProcessor.new(ci_yaml_file)
rescue Gitlab::Ci::YamlProcessor::ValidationError, Psych::SyntaxError => e
self.yaml_errors = e.message
nil
diff --git a/app/models/environment.rb b/app/models/environment.rb
index 44e39e21442..b6868ccbe8f 100644
--- a/app/models/environment.rb
+++ b/app/models/environment.rb
@@ -6,9 +6,7 @@ class Environment < ActiveRecord::Base
belongs_to :project, required: true, validate: true
- has_many :deployments,
- -> (env) { where(project_id: env.project_id) },
- dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :deployments, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :last_deployment, -> { order('deployments.id DESC') }, class_name: 'Deployment'
diff --git a/app/models/repository.rb b/app/models/repository.rb
index 9d1de4f4306..f11cf1b065d 100644
--- a/app/models/repository.rb
+++ b/app/models/repository.rb
@@ -834,10 +834,6 @@ class Repository
}
end
- def user_to_committer(user)
- Gitlab::Git.committer_hash(email: user.email, name: user.name)
- end
-
def can_be_merged?(source_sha, target_branch)
our_commit = rugged.branches[target_branch].target
their_commit = rugged.lookup(source_sha)
@@ -859,54 +855,34 @@ class Repository
end
def revert(
- user, commit, branch_name,
+ user, commit, branch_name, message,
start_branch_name: nil, start_project: project)
- with_branch(
- user,
- branch_name,
- start_branch_name: start_branch_name,
- start_repository: start_project.repository.raw_repository) do |start_commit|
-
- revert_tree_id = check_revert_content(commit, start_commit.sha)
- unless revert_tree_id
- raise Repository::CreateTreeError.new('Failed to revert commit')
- end
- committer = user_to_committer(user)
-
- create_commit(message: commit.revert_message(user),
- author: committer,
- committer: committer,
- tree: revert_tree_id,
- parents: [start_commit.sha])
+ with_cache_hooks do
+ raw_repository.revert(
+ user: user,
+ commit: commit.raw,
+ branch_name: branch_name,
+ message: message,
+ start_branch_name: start_branch_name,
+ start_repository: start_project.repository.raw_repository
+ )
end
end
def cherry_pick(
- user, commit, branch_name,
+ user, commit, branch_name, message,
start_branch_name: nil, start_project: project)
- with_branch(
- user,
- branch_name,
- start_branch_name: start_branch_name,
- start_repository: start_project.repository.raw_repository) do |start_commit|
- cherry_pick_tree_id = check_cherry_pick_content(commit, start_commit.sha)
- unless cherry_pick_tree_id
- raise Repository::CreateTreeError.new('Failed to cherry-pick commit')
- end
-
- committer = user_to_committer(user)
-
- create_commit(message: commit.cherry_pick_message(user),
- author: {
- email: commit.author_email,
- name: commit.author_name,
- time: commit.authored_date
- },
- committer: committer,
- tree: cherry_pick_tree_id,
- parents: [start_commit.sha])
+ with_cache_hooks do
+ raw_repository.cherry_pick(
+ user: user,
+ commit: commit.raw,
+ branch_name: branch_name,
+ message: message,
+ start_branch_name: start_branch_name,
+ start_repository: start_project.repository.raw_repository
+ )
end
end
@@ -918,36 +894,6 @@ class Repository
end
end
- def check_revert_content(target_commit, source_sha)
- args = [target_commit.sha, source_sha]
- args << { mainline: 1 } if target_commit.merge_commit?
-
- revert_index = rugged.revert_commit(*args)
- return false if revert_index.conflicts?
-
- tree_id = revert_index.write_tree(rugged)
- return false unless diff_exists?(source_sha, tree_id)
-
- tree_id
- end
-
- def check_cherry_pick_content(target_commit, source_sha)
- args = [target_commit.sha, source_sha]
- args << 1 if target_commit.merge_commit?
-
- cherry_pick_index = rugged.cherrypick_commit(*args)
- return false if cherry_pick_index.conflicts?
-
- tree_id = cherry_pick_index.write_tree(rugged)
- return false unless diff_exists?(source_sha, tree_id)
-
- tree_id
- end
-
- def diff_exists?(sha1, sha2)
- rugged.diff(sha1, sha2).size > 0
- end
-
def merged_to_root_ref?(branch_name)
branch_commit = commit(branch_name)
root_ref_commit = commit(root_ref)
diff --git a/app/services/commits/change_service.rb b/app/services/commits/change_service.rb
index 85c2fcf9ea6..b9d0173a2d0 100644
--- a/app/services/commits/change_service.rb
+++ b/app/services/commits/change_service.rb
@@ -12,14 +12,18 @@ module Commits
raise NotImplementedError unless repository.respond_to?(action)
# rubocop:disable GitlabSecurity/PublicSend
+ message = @commit.public_send(:"#{action}_message", current_user)
+
+ # rubocop:disable GitlabSecurity/PublicSend
repository.public_send(
action,
current_user,
@commit,
@branch_name,
+ message,
start_project: @start_project,
start_branch_name: @start_branch)
- rescue Repository::CreateTreeError
+ rescue Gitlab::Git::Repository::CreateTreeError
error_msg = "Sorry, we cannot #{action.to_s.dasherize} this #{@commit.change_type_title(current_user)} automatically.
This #{@commit.change_type_title(current_user)} may already have been #{action.to_s.dasherize}ed, or a more recent commit may have updated some of its content."
raise ChangeError, error_msg
diff --git a/app/uploaders/avatar_uploader.rb b/app/uploaders/avatar_uploader.rb
index 66d3bcb998a..cbb79376d5f 100644
--- a/app/uploaders/avatar_uploader.rb
+++ b/app/uploaders/avatar_uploader.rb
@@ -9,7 +9,7 @@ class AvatarUploader < GitlabUploader
end
def exists?
- model.avatar.file && model.avatar.file.exists?
+ model.avatar.file && model.avatar.file.present?
end
# We set move_to_store and move_to_cache to 'false' to prevent stealing
diff --git a/app/uploaders/gitlab_uploader.rb b/app/uploaders/gitlab_uploader.rb
index 05a2091633a..7f72b3ce471 100644
--- a/app/uploaders/gitlab_uploader.rb
+++ b/app/uploaders/gitlab_uploader.rb
@@ -51,7 +51,7 @@ class GitlabUploader < CarrierWave::Uploader::Base
end
def exists?
- file.try(:exists?)
+ file.present?
end
# Override this if you don't want to save files by default to the Rails.root directory
diff --git a/app/views/feature_highlight/_issue_boards.svg b/app/views/feature_highlight/_issue_boards.svg
deleted file mode 100644
index 1522c9d51c9..00000000000
--- a/app/views/feature_highlight/_issue_boards.svg
+++ /dev/null
@@ -1,98 +0,0 @@
-<svg xmlns="http://www.w3.org/2000/svg" width="214" height="102" viewBox="0 0 214 102" xmlns:xlink="http://www.w3.org/1999/xlink">
- <defs>
- <path id="b" d="M2,0 L46,0 C47.1045695,-2.02906125e-16 48,0.8954305 48,2 L48,27 C48,28.1045695 47.1045695,29 46,29 L2,29 C0.8954305,29 1.3527075e-16,28.1045695 0,27 L0,2 C-1.3527075e-16,0.8954305 0.8954305,2.02906125e-16 2,0 Z"/>
- <filter id="a" width="102.1%" height="106.9%" x="-1%" y="-1.7%" filterUnits="objectBoundingBox">
- <feOffset dy="1" in="SourceAlpha" result="shadowOffsetOuter1"/>
- <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.05 0" in="shadowOffsetOuter1"/>
- </filter>
- <path id="d" d="M2,0 L46,0 C47.1045695,-2.02906125e-16 48,0.8954305 48,2 L48,26 C48,27.1045695 47.1045695,28 46,28 L2,28 C0.8954305,28 1.3527075e-16,27.1045695 0,26 L0,2 C-1.3527075e-16,0.8954305 0.8954305,2.02906125e-16 2,0 Z"/>
- <filter id="c" width="102.1%" height="107.1%" x="-1%" y="-1.8%" filterUnits="objectBoundingBox">
- <feOffset dy="1" in="SourceAlpha" result="shadowOffsetOuter1"/>
- <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.05 0" in="shadowOffsetOuter1"/>
- </filter>
- <path id="e" d="M5,0 L53,0 C55.7614237,-5.07265313e-16 58,2.23857625 58,5 L58,91 C58,93.7614237 55.7614237,96 53,96 L5,96 C2.23857625,96 3.38176876e-16,93.7614237 0,91 L0,5 C-3.38176876e-16,2.23857625 2.23857625,5.07265313e-16 5,0 Z"/>
- <path id="h" d="M2,0 L46,0 C47.1045695,-2.02906125e-16 48,0.8954305 48,2 L48,26 C48,27.1045695 47.1045695,28 46,28 L2,28 C0.8954305,28 1.3527075e-16,27.1045695 0,26 L0,2 C-1.3527075e-16,0.8954305 0.8954305,2.02906125e-16 2,0 Z"/>
- <filter id="g" width="102.1%" height="107.1%" x="-1%" y="-1.8%" filterUnits="objectBoundingBox">
- <feOffset dy="1" in="SourceAlpha" result="shadowOffsetOuter1"/>
- <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.05 0" in="shadowOffsetOuter1"/>
- </filter>
- <path id="j" d="M2,0 L46,0 C47.1045695,-2.02906125e-16 48,0.8954305 48,2 L48,26 C48,27.1045695 47.1045695,28 46,28 L2,28 C0.8954305,28 1.3527075e-16,27.1045695 0,26 L0,2 C-1.3527075e-16,0.8954305 0.8954305,2.02906125e-16 2,0 Z"/>
- <filter id="i" width="102.1%" height="107.1%" x="-1%" y="-1.8%" filterUnits="objectBoundingBox">
- <feOffset dy="1" in="SourceAlpha" result="shadowOffsetOuter1"/>
- <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.05 0" in="shadowOffsetOuter1"/>
- </filter>
- <path id="l" d="M2,0 L46,0 C47.1045695,-2.02906125e-16 48,0.8954305 48,2 L48,26 C48,27.1045695 47.1045695,28 46,28 L2,28 C0.8954305,28 1.3527075e-16,27.1045695 0,26 L0,2 C-1.3527075e-16,0.8954305 0.8954305,2.02906125e-16 2,0 Z"/>
- <filter id="k" width="102.1%" height="107.1%" x="-1%" y="-1.8%" filterUnits="objectBoundingBox">
- <feOffset dy="1" in="SourceAlpha" result="shadowOffsetOuter1"/>
- <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.05 0" in="shadowOffsetOuter1"/>
- </filter>
- <path id="n" d="M2,0 L46,0 C47.1045695,-2.02906125e-16 48,0.8954305 48,2 L48,26 C48,27.1045695 47.1045695,28 46,28 L2,28 C0.8954305,28 1.3527075e-16,27.1045695 0,26 L0,2 C-1.3527075e-16,0.8954305 0.8954305,2.02906125e-16 2,0 Z"/>
- <filter id="m" width="102.1%" height="107.1%" x="-1%" y="-1.8%" filterUnits="objectBoundingBox">
- <feOffset dy="1" in="SourceAlpha" result="shadowOffsetOuter1"/>
- <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.05 0" in="shadowOffsetOuter1"/>
- </filter>
- <path id="p" d="M2,0 L46,0 C47.1045695,-2.02906125e-16 48,0.8954305 48,2 L48,26 C48,27.1045695 47.1045695,28 46,28 L2,28 C0.8954305,28 1.3527075e-16,27.1045695 0,26 L0,2 C-1.3527075e-16,0.8954305 0.8954305,2.02906125e-16 2,0 Z"/>
- <filter id="o" width="102.1%" height="107.1%" x="-1%" y="-1.8%" filterUnits="objectBoundingBox">
- <feOffset dy="1" in="SourceAlpha" result="shadowOffsetOuter1"/>
- <feColorMatrix values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.05 0" in="shadowOffsetOuter1"/>
- </filter>
- </defs>
- <g fill="none" fill-rule="evenodd">
- <path fill="#D6D4DE" d="M14,21 L62,21 C64.7614237,21 67,23.2385763 67,26 L67,112 C67,114.761424 64.7614237,117 62,117 L14,117 C11.2385763,117 9,114.761424 9,112 L9,26 C9,23.2385763 11.2385763,21 14,21 Z"/>
- <g transform="translate(11 23)">
- <path fill="#FFFFFF" d="M5,0 L53,0 C55.7614237,-5.07265313e-16 58,2.23857625 58,5 L58,91 C58,93.7614237 55.7614237,96 53,96 L5,96 C2.23857625,96 3.38176876e-16,93.7614237 0,91 L0,5 C-3.38176876e-16,2.23857625 2.23857625,5.07265313e-16 5,0 Z"/>
- <path fill="#FC6D26" d="M4,0 L54,0 C56.209139,-4.05812251e-16 58,1.790861 58,4 L0,4 C-2.705415e-16,1.790861 1.790861,4.05812251e-16 4,0 Z"/>
- <g transform="translate(5 10)">
- <use fill="black" filter="url(#a)" xlink:href="#b"/>
- <use fill="#F9F9F9" xlink:href="#b"/>
- </g>
- <g transform="translate(5 42)">
- <use fill="black" filter="url(#c)" xlink:href="#d"/>
- <use fill="#FEF0E8" xlink:href="#d"/>
- <path fill="#FEE1D3" d="M9,8 L33,8 C34.1045695,8 35,8.8954305 35,10 C35,11.1045695 34.1045695,12 33,12 L9,12 C7.8954305,12 7,11.1045695 7,10 C7,8.8954305 7.8954305,8 9,8 Z"/>
- <path fill="#FDC4A8" d="M9,17 L17,17 C18.1045695,17 19,17.8954305 19,19 C19,20.1045695 18.1045695,21 17,21 L9,21 C7.8954305,21 7,20.1045695 7,19 C7,17.8954305 7.8954305,17 9,17 Z"/>
- <path fill="#FC6D26" d="M24,17 L32,17 C33.1045695,17 34,17.8954305 34,19 C34,20.1045695 33.1045695,21 32,21 L24,21 C22.8954305,21 22,20.1045695 22,19 C22,17.8954305 22.8954305,17 24,17 Z"/>
- </g>
- </g>
- <path fill="#D6D4DE" d="M148,26 L196,26 C198.761424,26 201,28.2385763 201,31 L201,117 C201,119.761424 198.761424,122 196,122 L148,122 C145.238576,122 143,119.761424 143,117 L143,31 C143,28.2385763 145.238576,26 148,26 Z"/>
- <g transform="translate(145 28)">
- <mask id="f" fill="white">
- <use xlink:href="#e"/>
- </mask>
- <use fill="#FFFFFF" xlink:href="#e"/>
- <path fill="#FC6D26" d="M4,0 L54,0 C56.209139,-4.05812251e-16 58,1.790861 58,4 L0,4 C-2.705415e-16,1.790861 1.790861,4.05812251e-16 4,0 Z" mask="url(#f)"/>
- <g transform="translate(5 10)">
- <use fill="black" filter="url(#g)" xlink:href="#h"/>
- <use fill="#F9F9F9" xlink:href="#h"/>
- </g>
- <g transform="translate(5 42)">
- <use fill="black" filter="url(#i)" xlink:href="#j"/>
- <use fill="#FEF0E8" xlink:href="#j"/>
- <path fill="#FEE1D3" d="M9 8L33 8C34.1045695 8 35 8.8954305 35 10 35 11.1045695 34.1045695 12 33 12L9 12C7.8954305 12 7 11.1045695 7 10 7 8.8954305 7.8954305 8 9 8zM9 17L13 17C14.1045695 17 15 17.8954305 15 19 15 20.1045695 14.1045695 21 13 21L9 21C7.8954305 21 7 20.1045695 7 19 7 17.8954305 7.8954305 17 9 17z"/>
- <path fill="#FC6D26" d="M20,17 L24,17 C25.1045695,17 26,17.8954305 26,19 C26,20.1045695 25.1045695,21 24,21 L20,21 C18.8954305,21 18,20.1045695 18,19 C18,17.8954305 18.8954305,17 20,17 Z"/>
- <path fill="#FDC4A8" d="M31,17 L35,17 C36.1045695,17 37,17.8954305 37,19 C37,20.1045695 36.1045695,21 35,21 L31,21 C29.8954305,21 29,20.1045695 29,19 C29,17.8954305 29.8954305,17 31,17 Z"/>
- </g>
- </g>
- <path fill="#D6D4DE" d="M81,14 L129,14 C131.761424,14 134,16.2385763 134,19 L134,105 C134,107.761424 131.761424,110 129,110 L81,110 C78.2385763,110 76,107.761424 76,105 L76,19 C76,16.2385763 78.2385763,14 81,14 Z"/>
- <g transform="translate(78 16)">
- <path fill="#FFFFFF" d="M5,0 L53,0 C55.7614237,-5.07265313e-16 58,2.23857625 58,5 L58,91 C58,93.7614237 55.7614237,96 53,96 L5,96 C2.23857625,96 3.38176876e-16,93.7614237 0,91 L0,5 C-3.38176876e-16,2.23857625 2.23857625,5.07265313e-16 5,0 Z"/>
- <g transform="translate(5 10)">
- <use fill="black" filter="url(#k)" xlink:href="#l"/>
- <use fill="#EFEDF8" xlink:href="#l"/>
- <path fill="#E1DBF1" d="M9,8 L33,8 C34.1045695,8 35,8.8954305 35,10 C35,11.1045695 34.1045695,12 33,12 L9,12 C7.8954305,12 7,11.1045695 7,10 C7,8.8954305 7.8954305,8 9,8 Z"/>
- <path fill="#6B4FBB" d="M9,17 L13,17 C14.1045695,17 15,17.8954305 15,19 C15,20.1045695 14.1045695,21 13,21 L9,21 C7.8954305,21 7,20.1045695 7,19 C7,17.8954305 7.8954305,17 9,17 Z"/>
- <path fill="#C3B8E3" d="M20,17 L28,17 C29.1045695,17 30,17.8954305 30,19 C30,20.1045695 29.1045695,21 28,21 L20,21 C18.8954305,21 18,20.1045695 18,19 C18,17.8954305 18.8954305,17 20,17 Z"/>
- </g>
- <g transform="translate(5 42)">
- <use fill="black" filter="url(#m)" xlink:href="#n"/>
- <use fill="#F9F9F9" xlink:href="#n"/>
- </g>
- <g transform="translate(5 74)">
- <rect width="34" height="4" x="7" y="7" fill="#E1DBF1" rx="2"/>
- <use fill="black" filter="url(#o)" xlink:href="#p"/>
- <use fill="#F9F9F9" xlink:href="#p"/>
- </g>
- <path fill="#6B4FBB" d="M4,0 L54,0 C56.209139,-4.05812251e-16 58,1.790861 58,4 L0,4 C-2.705415e-16,1.790861 1.790861,4.05812251e-16 4,0 Z"/>
- </g>
- </g>
-</svg>
diff --git a/app/views/layouts/nav/sidebar/_project.html.haml b/app/views/layouts/nav/sidebar/_project.html.haml
index 29f1fc6b354..8ec2e2c79fc 100644
--- a/app/views/layouts/nav/sidebar/_project.html.haml
+++ b/app/views/layouts/nav/sidebar/_project.html.haml
@@ -117,20 +117,6 @@
= link_to project_boards_path(@project), title: boards_link_text do
%span
= boards_link_text
- .feature-highlight.js-feature-highlight{ disabled: true, data: { trigger: 'manual', container: 'body', toggle: 'popover', placement: 'right', highlight: 'issue-boards' } }
- .feature-highlight-popover-content
- = render 'feature_highlight/issue_boards.svg'
- .feature-highlight-popover-sub-content
- %span= _('Use')
- = link_to 'Issue Boards', project_boards_path(@project)
- %span= _('to create customized software development workflows like')
- %strong= _('Scrum')
- %span= _('or')
- %strong= _('Kanban')
- %hr
- %button.btn-link.dismiss-feature-highlight{ type: 'button' }
- %span= _("Got it! Don't show this again")
- = custom_icon('thumbs_up')
= nav_link(controller: :labels) do
= link_to project_labels_path(@project), title: 'Labels' do
diff --git a/app/views/projects/empty.html.haml b/app/views/projects/empty.html.haml
index 5e980314307..d5b83b53ebb 100644
--- a/app/views/projects/empty.html.haml
+++ b/app/views/projects/empty.html.haml
@@ -27,6 +27,8 @@
- if can?(current_user, :push_code, @project)
%div{ class: container_class }
+ - if show_auto_devops_callout?(@project)
+ = render 'shared/auto_devops_callout'
.prepend-top-20
.empty_wrapper
%h3.page-title-empty
diff --git a/app/views/shared/icons/_thumbs_up.svg b/app/views/shared/icons/_thumbs_up.svg
deleted file mode 100644
index 7267462418e..00000000000
--- a/app/views/shared/icons/_thumbs_up.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M8.33 5h5.282a2 2 0 0 1 1.963 2.38l-.563 2.905a3 3 0 0 1-.243.732l-1.104 2.286A3 3 0 0 1 10.964 15H7a3 3 0 0 1-3-3V5.7a2 2 0 0 1 .436-1.247l3.11-3.9A.632.632 0 0 1 8.486.5l.138.137a1 1 0 0 1 .28.87L8.33 5zM1 6h2v7H1a1 1 0 0 1-1-1V7a1 1 0 0 1 1-1z"/></svg>
diff --git a/changelogs/unreleased/37890-auto-devops-banner-is-not-shown-when-the-repository-is-empty-new-project.yml b/changelogs/unreleased/37890-auto-devops-banner-is-not-shown-when-the-repository-is-empty-new-project.yml
new file mode 100644
index 00000000000..2dddfa0b882
--- /dev/null
+++ b/changelogs/unreleased/37890-auto-devops-banner-is-not-shown-when-the-repository-is-empty-new-project.yml
@@ -0,0 +1,5 @@
+---
+title: Fix Auto DevOps banner to be shown on empty projects
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/do-not-perform-disk-check.yml b/changelogs/unreleased/do-not-perform-disk-check.yml
new file mode 100644
index 00000000000..cc139ee2c9e
--- /dev/null
+++ b/changelogs/unreleased/do-not-perform-disk-check.yml
@@ -0,0 +1,5 @@
+---
+title: File uploaders do not perform hard check, only soft check
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/reoganize-deployment-indexes.yml b/changelogs/unreleased/reoganize-deployment-indexes.yml
new file mode 100644
index 00000000000..87734b4fe4b
--- /dev/null
+++ b/changelogs/unreleased/reoganize-deployment-indexes.yml
@@ -0,0 +1,5 @@
+---
+title: Reorganize indexes for the "deployments" table
+merge_request:
+author:
+type: other
diff --git a/config/initializers/postgresql_opclasses_support.rb b/config/initializers/postgresql_opclasses_support.rb
index 820cc89ef57..c2f3023b330 100644
--- a/config/initializers/postgresql_opclasses_support.rb
+++ b/config/initializers/postgresql_opclasses_support.rb
@@ -127,7 +127,7 @@ module ActiveRecord
orders = desc_order_columns.any? ? Hash[desc_order_columns.map {|order_column| [order_column, :desc]}] : {}
where = inddef.scan(/WHERE (.+)$/).flatten[0]
using = inddef.scan(/USING (.+?) /).flatten[0].to_sym
- opclasses = Hash[inddef.scan(/\((.+)\)$/).flatten[0].split(',').map do |column_and_opclass|
+ opclasses = Hash[inddef.scan(/\((.+?)\)(?:$| WHERE )/).flatten[0].split(',').map do |column_and_opclass|
column, opclass = column_and_opclass.split(' ').map(&:strip)
[column, opclass] if opclass
end.compact]
diff --git a/db/migrate/20170912113435_clean_stages_statuses_migration.rb b/db/migrate/20170912113435_clean_stages_statuses_migration.rb
new file mode 100644
index 00000000000..fc091d7894e
--- /dev/null
+++ b/db/migrate/20170912113435_clean_stages_statuses_migration.rb
@@ -0,0 +1,26 @@
+class CleanStagesStatusesMigration < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ class Stage < ActiveRecord::Base
+ include ::EachBatch
+ self.table_name = 'ci_stages'
+ end
+
+ def up
+ Gitlab::BackgroundMigration.steal('MigrateStageStatus')
+
+ Stage.where('status IS NULL').each_batch(of: 50) do |batch|
+ range = batch.pluck('MIN(id)', 'MAX(id)').first
+
+ Gitlab::BackgroundMigration::MigrateStageStatus.new.perform(*range)
+ end
+ end
+
+ def down
+ # noop
+ end
+end
diff --git a/db/migrate/20170918222253_reorganize_deployments_indexes.rb b/db/migrate/20170918222253_reorganize_deployments_indexes.rb
new file mode 100644
index 00000000000..139427ed2b9
--- /dev/null
+++ b/db/migrate/20170918222253_reorganize_deployments_indexes.rb
@@ -0,0 +1,28 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class ReorganizeDeploymentsIndexes < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_index_if_not_exists :deployments, [:environment_id, :iid, :project_id]
+ remove_index_if_exists :deployments, [:project_id, :environment_id, :iid]
+ end
+
+ def down
+ add_index_if_not_exists :deployments, [:project_id, :environment_id, :iid]
+ remove_index_if_exists :deployments, [:environment_id, :iid, :project_id]
+ end
+
+ def add_index_if_not_exists(table, columns)
+ add_concurrent_index(table, columns) unless index_exists?(table, columns)
+ end
+
+ def remove_index_if_exists(table, columns)
+ remove_concurrent_index(table, columns) if index_exists?(table, columns)
+ end
+end
diff --git a/db/migrate/20170918223303_add_deployments_index_for_last_deployment.rb b/db/migrate/20170918223303_add_deployments_index_for_last_deployment.rb
new file mode 100644
index 00000000000..b91efb86d98
--- /dev/null
+++ b/db/migrate/20170918223303_add_deployments_index_for_last_deployment.rb
@@ -0,0 +1,21 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddDeploymentsIndexForLastDeployment < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ TO_INDEX = [:deployments, %i[environment_id id]].freeze
+
+ def up
+ add_concurrent_index(*TO_INDEX)
+ end
+
+ def down
+ remove_concurrent_index(*TO_INDEX)
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 2d8c33591f0..3ec430c0078 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema.define(version: 20170914135630) do
+ActiveRecord::Schema.define(version: 20170918223303) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
@@ -256,7 +256,7 @@ ActiveRecord::Schema.define(version: 20170914135630) do
add_index "ci_builds", ["commit_id", "status", "type"], name: "index_ci_builds_on_commit_id_and_status_and_type", using: :btree
add_index "ci_builds", ["commit_id", "type", "name", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_name_and_ref", using: :btree
add_index "ci_builds", ["commit_id", "type", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_ref", using: :btree
- add_index "ci_builds", ["id"], name: "index_for_ci_builds_retried_migration", where: "(retried IS NULL)", using: :btree, opclasses: {"id)"=>"WHERE"}
+ add_index "ci_builds", ["id"], name: "index_for_ci_builds_retried_migration", where: "(retried IS NULL)", using: :btree
add_index "ci_builds", ["project_id"], name: "index_ci_builds_on_project_id", using: :btree
add_index "ci_builds", ["protected"], name: "index_ci_builds_on_protected", using: :btree
add_index "ci_builds", ["runner_id"], name: "index_ci_builds_on_runner_id", using: :btree
@@ -506,7 +506,8 @@ ActiveRecord::Schema.define(version: 20170914135630) do
end
add_index "deployments", ["created_at"], name: "index_deployments_on_created_at", using: :btree
- add_index "deployments", ["project_id", "environment_id", "iid"], name: "index_deployments_on_project_id_and_environment_id_and_iid", using: :btree
+ add_index "deployments", ["environment_id", "id"], name: "index_deployments_on_environment_id_and_id", using: :btree
+ add_index "deployments", ["environment_id", "iid", "project_id"], name: "index_deployments_on_environment_id_and_iid_and_project_id", using: :btree
add_index "deployments", ["project_id", "iid"], name: "index_deployments_on_project_id_and_iid", unique: true, using: :btree
create_table "emails", force: :cascade do |t|
diff --git a/doc/ci/yaml/README.md b/doc/ci/yaml/README.md
index f69d71a5c39..aad81843299 100644
--- a/doc/ci/yaml/README.md
+++ b/doc/ci/yaml/README.md
@@ -252,6 +252,8 @@ The `cache:key` variable can use any of the [predefined variables](../variables/
The default key is **default** across the project, therefore everything is
shared between each pipelines and jobs by default, starting from GitLab 9.0.
+>**Note:** The `cache:key` variable cannot contain the `/` character.
+
---
**Example configurations**
@@ -276,7 +278,7 @@ To enable per-job and per-branch caching:
```yaml
cache:
- key: "$CI_JOB_NAME/$CI_COMMIT_REF_NAME"
+ key: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME"
untracked: true
```
@@ -284,7 +286,7 @@ To enable per-branch and per-stage caching:
```yaml
cache:
- key: "$CI_JOB_STAGE/$CI_COMMIT_REF_NAME"
+ key: "$CI_JOB_STAGE-$CI_COMMIT_REF_NAME"
untracked: true
```
@@ -293,7 +295,7 @@ If you use **Windows Batch** to run your shell scripts you need to replace
```yaml
cache:
- key: "%CI_JOB_STAGE%/%CI_COMMIT_REF_NAME%"
+ key: "%CI_JOB_STAGE%-%CI_COMMIT_REF_NAME%"
untracked: true
```
@@ -302,7 +304,7 @@ If you use **Windows PowerShell** to run your shell scripts you need to replace
```yaml
cache:
- key: "$env:CI_JOB_STAGE/$env:CI_COMMIT_REF_NAME"
+ key: "$env:CI_JOB_STAGE-$env:CI_COMMIT_REF_NAME"
untracked: true
```
diff --git a/lib/gitlab/ci/build/policy.rb b/lib/gitlab/ci/build/policy.rb
new file mode 100644
index 00000000000..d10cc7802d4
--- /dev/null
+++ b/lib/gitlab/ci/build/policy.rb
@@ -0,0 +1,15 @@
+module Gitlab
+ module Ci
+ module Build
+ module Policy
+ def self.fabricate(specs)
+ specifications = specs.to_h.map do |spec, value|
+ self.const_get(spec.to_s.camelize).new(value)
+ end
+
+ specifications.compact
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/build/policy/kubernetes.rb b/lib/gitlab/ci/build/policy/kubernetes.rb
new file mode 100644
index 00000000000..b20d374288f
--- /dev/null
+++ b/lib/gitlab/ci/build/policy/kubernetes.rb
@@ -0,0 +1,19 @@
+module Gitlab
+ module Ci
+ module Build
+ module Policy
+ class Kubernetes < Policy::Specification
+ def initialize(spec)
+ unless spec.to_sym == :active
+ raise UnknownPolicyError
+ end
+ end
+
+ def satisfied_by?(pipeline)
+ pipeline.has_kubernetes_active?
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/build/policy/refs.rb b/lib/gitlab/ci/build/policy/refs.rb
new file mode 100644
index 00000000000..eadc0948d2f
--- /dev/null
+++ b/lib/gitlab/ci/build/policy/refs.rb
@@ -0,0 +1,43 @@
+module Gitlab
+ module Ci
+ module Build
+ module Policy
+ class Refs < Policy::Specification
+ def initialize(refs)
+ @patterns = Array(refs)
+ end
+
+ def satisfied_by?(pipeline)
+ @patterns.any? do |pattern|
+ pattern, path = pattern.split('@', 2)
+
+ matches_path?(path, pipeline) &&
+ matches_pattern?(pattern, pipeline)
+ end
+ end
+
+ private
+
+ def matches_path?(path, pipeline)
+ return true unless path
+
+ pipeline.project_full_path == path
+ end
+
+ def matches_pattern?(pattern, pipeline)
+ return true if pipeline.tag? && pattern == 'tags'
+ return true if pipeline.branch? && pattern == 'branches'
+ return true if pipeline.source == pattern
+ return true if pipeline.source&.pluralize == pattern
+
+ if pattern.first == "/" && pattern.last == "/"
+ Regexp.new(pattern[1...-1]) =~ pipeline.ref
+ else
+ pattern == pipeline.ref
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/build/policy/specification.rb b/lib/gitlab/ci/build/policy/specification.rb
new file mode 100644
index 00000000000..c317291f29d
--- /dev/null
+++ b/lib/gitlab/ci/build/policy/specification.rb
@@ -0,0 +1,25 @@
+module Gitlab
+ module Ci
+ module Build
+ module Policy
+ ##
+ # Abstract class that defines an interface of job policy
+ # specification.
+ #
+ # Used for job's only/except policy configuration.
+ #
+ class Specification
+ UnknownPolicyError = Class.new(StandardError)
+
+ def initialize(spec)
+ @spec = spec
+ end
+
+ def satisfied_by?(pipeline)
+ raise NotImplementedError
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/yaml_processor.rb b/lib/gitlab/ci/yaml_processor.rb
index 7582964b24e..0bd78b03448 100644
--- a/lib/gitlab/ci/yaml_processor.rb
+++ b/lib/gitlab/ci/yaml_processor.rb
@@ -5,12 +5,11 @@ module Gitlab
include Gitlab::Ci::Config::Entry::LegacyValidationHelpers
- attr_reader :path, :cache, :stages, :jobs
+ attr_reader :cache, :stages, :jobs
- def initialize(config, path = nil)
+ def initialize(config)
@ci_config = Gitlab::Ci::Config.new(config)
@config = @ci_config.to_hash
- @path = path
unless @ci_config.valid?
raise ValidationError, @ci_config.errors.first
@@ -21,28 +20,12 @@ module Gitlab
raise ValidationError, e.message
end
- def builds_for_stage_and_ref(stage, ref, tag = false, source = nil)
- jobs_for_stage_and_ref(stage, ref, tag, source).map do |name, _|
- build_attributes(name)
- end
- end
-
def builds
@jobs.map do |name, _|
build_attributes(name)
end
end
- def stage_seeds(pipeline)
- seeds = @stages.uniq.map do |stage|
- builds = pipeline_stage_builds(stage, pipeline)
-
- Gitlab::Ci::Stage::Seed.new(pipeline, stage, builds) if builds.any?
- end
-
- seeds.compact
- end
-
def build_attributes(name)
job = @jobs[name.to_sym] || {}
@@ -70,6 +53,32 @@ module Gitlab
}.compact }
end
+ def pipeline_stage_builds(stage, pipeline)
+ selected_jobs = @jobs.select do |_, job|
+ next unless job[:stage] == stage
+
+ only_specs = Gitlab::Ci::Build::Policy
+ .fabricate(job.fetch(:only, {}))
+ except_specs = Gitlab::Ci::Build::Policy
+ .fabricate(job.fetch(:except, {}))
+
+ only_specs.all? { |spec| spec.satisfied_by?(pipeline) } &&
+ except_specs.none? { |spec| spec.satisfied_by?(pipeline) }
+ end
+
+ selected_jobs.map { |_, job| build_attributes(job[:name]) }
+ end
+
+ def stage_seeds(pipeline)
+ seeds = @stages.uniq.map do |stage|
+ builds = pipeline_stage_builds(stage, pipeline)
+
+ Gitlab::Ci::Stage::Seed.new(pipeline, stage, builds) if builds.any?
+ end
+
+ seeds.compact
+ end
+
def self.validation_message(content)
return 'Please provide content of .gitlab-ci.yml' if content.blank?
@@ -83,34 +92,6 @@ module Gitlab
private
- def pipeline_stage_builds(stage, pipeline)
- builds = builds_for_stage_and_ref(
- stage, pipeline.ref, pipeline.tag?, pipeline.source)
-
- builds.select do |build|
- job = @jobs[build.fetch(:name).to_sym]
- has_kubernetes = pipeline.has_kubernetes_active?
- only_kubernetes = job.dig(:only, :kubernetes)
- except_kubernetes = job.dig(:except, :kubernetes)
-
- [!only_kubernetes && !except_kubernetes,
- only_kubernetes && has_kubernetes,
- except_kubernetes && !has_kubernetes].any?
- end
- end
-
- def jobs_for_ref(ref, tag = false, source = nil)
- @jobs.select do |_, job|
- process?(job.dig(:only, :refs), job.dig(:except, :refs), ref, tag, source)
- end
- end
-
- def jobs_for_stage_and_ref(stage, ref, tag = false, source = nil)
- jobs_for_ref(ref, tag, source).select do |_, job|
- job[:stage] == stage
- end
- end
-
def initial_parsing
##
# Global config
@@ -203,51 +184,6 @@ module Gitlab
raise ValidationError, "#{name} job: on_stop job #{on_stop} needs to have action stop defined"
end
end
-
- def process?(only_params, except_params, ref, tag, source)
- if only_params.present?
- return false unless matching?(only_params, ref, tag, source)
- end
-
- if except_params.present?
- return false if matching?(except_params, ref, tag, source)
- end
-
- true
- end
-
- def matching?(patterns, ref, tag, source)
- patterns.any? do |pattern|
- pattern, path = pattern.split('@', 2)
- matches_path?(path) && matches_pattern?(pattern, ref, tag, source)
- end
- end
-
- def matches_path?(path)
- return true unless path
-
- path == self.path
- end
-
- def matches_pattern?(pattern, ref, tag, source)
- return true if tag && pattern == 'tags'
- return true if !tag && pattern == 'branches'
- return true if source_to_pattern(source) == pattern
-
- if pattern.first == "/" && pattern.last == "/"
- Regexp.new(pattern[1...-1]) =~ ref
- else
- pattern == ref
- end
- end
-
- def source_to_pattern(source)
- if %w[api external web].include?(source)
- source
- else
- source&.pluralize
- end
- end
end
end
end
diff --git a/lib/gitlab/git.rb b/lib/gitlab/git.rb
index b4b6326cfdd..c78fe63f9b5 100644
--- a/lib/gitlab/git.rb
+++ b/lib/gitlab/git.rb
@@ -57,6 +57,15 @@ module Gitlab
def version
Gitlab::VersionInfo.parse(Gitlab::Popen.popen(%W(#{Gitlab.config.git.bin_path} --version)).first)
end
+
+ def check_namespace!(*objects)
+ expected_namespace = self.name + '::'
+ objects.each do |object|
+ unless object.class.name.start_with?(expected_namespace)
+ raise ArgumentError, "expected object in #{expected_namespace}, got #{object}"
+ end
+ end
+ end
end
end
end
diff --git a/lib/gitlab/git/commit.rb b/lib/gitlab/git/commit.rb
index 1f370686186..1957c254c28 100644
--- a/lib/gitlab/git/commit.rb
+++ b/lib/gitlab/git/commit.rb
@@ -413,6 +413,10 @@ module Gitlab
end
end
+ def merge_commit?
+ parent_ids.size > 1
+ end
+
private
def init_from_hash(hash)
diff --git a/lib/gitlab/git/operation_service.rb b/lib/gitlab/git/operation_service.rb
index dcdec818f5e..6f054ed3c6c 100644
--- a/lib/gitlab/git/operation_service.rb
+++ b/lib/gitlab/git/operation_service.rb
@@ -15,9 +15,7 @@ module Gitlab
end
# Refactoring aid
- unless new_repository.is_a?(Gitlab::Git::Repository)
- raise "expected a Gitlab::Git::Repository, got #{new_repository}"
- end
+ Gitlab::Git.check_namespace!(new_repository)
@repository = new_repository
end
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index 18210bcab4e..4b000bd31e2 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -19,6 +19,7 @@ module Gitlab
InvalidRef = Class.new(StandardError)
GitError = Class.new(StandardError)
DeleteBranchError = Class.new(StandardError)
+ CreateTreeError = Class.new(StandardError)
class << self
# Unlike `new`, `create` takes the storage path, not the storage name
@@ -684,6 +685,88 @@ module Gitlab
nil
end
+ def revert(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ OperationService.new(user, self).with_branch(
+ branch_name,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ ) do |start_commit|
+
+ Gitlab::Git.check_namespace!(commit, start_repository)
+
+ revert_tree_id = check_revert_content(commit, start_commit.sha)
+ raise CreateTreeError unless revert_tree_id
+
+ committer = user_to_committer(user)
+
+ create_commit(message: message,
+ author: committer,
+ committer: committer,
+ tree: revert_tree_id,
+ parents: [start_commit.sha])
+ end
+ end
+
+ def check_revert_content(target_commit, source_sha)
+ args = [target_commit.sha, source_sha]
+ args << { mainline: 1 } if target_commit.merge_commit?
+
+ revert_index = rugged.revert_commit(*args)
+ return false if revert_index.conflicts?
+
+ tree_id = revert_index.write_tree(rugged)
+ return false unless diff_exists?(source_sha, tree_id)
+
+ tree_id
+ end
+
+ def cherry_pick(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ OperationService.new(user, self).with_branch(
+ branch_name,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ ) do |start_commit|
+
+ Gitlab::Git.check_namespace!(commit, start_repository)
+
+ cherry_pick_tree_id = check_cherry_pick_content(commit, start_commit.sha)
+ raise CreateTreeError unless cherry_pick_tree_id
+
+ committer = user_to_committer(user)
+
+ create_commit(message: message,
+ author: {
+ email: commit.author_email,
+ name: commit.author_name,
+ time: commit.authored_date
+ },
+ committer: committer,
+ tree: cherry_pick_tree_id,
+ parents: [start_commit.sha])
+ end
+ end
+
+ def check_cherry_pick_content(target_commit, source_sha)
+ args = [target_commit.sha, source_sha]
+ args << 1 if target_commit.merge_commit?
+
+ cherry_pick_index = rugged.cherrypick_commit(*args)
+ return false if cherry_pick_index.conflicts?
+
+ tree_id = cherry_pick_index.write_tree(rugged)
+ return false unless diff_exists?(source_sha, tree_id)
+
+ tree_id
+ end
+
+ def diff_exists?(sha1, sha2)
+ rugged.diff(sha1, sha2).size > 0
+ end
+
+ def user_to_committer(user)
+ Gitlab::Git.committer_hash(email: user.email, name: user.name)
+ end
+
def create_commit(params = {})
params[:message].delete!("\r")
@@ -835,7 +918,7 @@ module Gitlab
end
def with_repo_branch_commit(start_repository, start_branch_name)
- raise "expected Gitlab::Git::Repository, got #{start_repository}" unless start_repository.is_a?(Gitlab::Git::Repository)
+ Gitlab::Git.check_namespace!(start_repository)
return yield nil if start_repository.empty_repo?
diff --git a/scripts/schema_changed.sh b/scripts/schema_changed.sh
new file mode 100644
index 00000000000..5de2b35571d
--- /dev/null
+++ b/scripts/schema_changed.sh
@@ -0,0 +1,10 @@
+function schema_changed() {
+ if [[ ! -z `git diff --name-only -- db/schema.rb` ]]; then
+ echo "db/schema.rb after rake db:migrate:reset is different from one in the repository"
+ exit 1
+ else
+ echo "db/schema.rb after rake db:migrate:reset matches one in the repository"
+ fi
+}
+
+schema_changed
diff --git a/spec/features/projects/user_edits_files_spec.rb b/spec/features/projects/user_edits_files_spec.rb
index 3129aad8473..19954313c23 100644
--- a/spec/features/projects/user_edits_files_spec.rb
+++ b/spec/features/projects/user_edits_files_spec.rb
@@ -20,8 +20,7 @@ describe 'User edits files' do
it 'inserts a content of a file', js: true do
click_link('.gitignore')
find('.js-edit-blob').click
-
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
@@ -38,8 +37,7 @@ describe 'User edits files' do
it 'commits an edited file', js: true do
click_link('.gitignore')
find('.js-edit-blob').click
-
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -56,7 +54,7 @@ describe 'User edits files' do
click_link('.gitignore')
find('.js-edit-blob').click
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -67,15 +65,13 @@ describe 'User edits files' do
click_link('Changes')
- wait_for_requests
expect(page).to have_content('*.rbca')
end
it 'shows the diff of an edited file', js: true do
click_link('.gitignore')
find('.js-edit-blob').click
-
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
click_link('Preview changes')
@@ -104,7 +100,7 @@ describe 'User edits files' do
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
)
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
@@ -120,7 +116,7 @@ describe 'User edits files' do
click_link('Fork')
- wait_for_requests
+ find('.file-editor', match: :first)
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
diff --git a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js b/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js
deleted file mode 100644
index 114d282e48a..00000000000
--- a/spec/javascripts/feature_highlight/feature_highlight_helper_spec.js
+++ /dev/null
@@ -1,219 +0,0 @@
-import Cookies from 'js-cookie';
-import {
- getCookieName,
- getSelector,
- showPopover,
- hidePopover,
- dismiss,
- mouseleave,
- mouseenter,
- setupDismissButton,
-} from '~/feature_highlight/feature_highlight_helper';
-
-describe('feature highlight helper', () => {
- describe('getCookieName', () => {
- it('returns `feature-highlighted-` prefix', () => {
- const cookieId = 'cookieId';
- expect(getCookieName(cookieId)).toEqual(`feature-highlighted-${cookieId}`);
- });
- });
-
- describe('getSelector', () => {
- it('returns js-feature-highlight selector', () => {
- const highlightId = 'highlightId';
- expect(getSelector(highlightId)).toEqual(`.js-feature-highlight[data-highlight=${highlightId}]`);
- });
- });
-
- describe('showPopover', () => {
- it('returns true when popover is shown', () => {
- const context = {
- hasClass: () => false,
- popover: () => {},
- addClass: () => {},
- };
-
- expect(showPopover.call(context)).toEqual(true);
- });
-
- it('returns false when popover is already shown', () => {
- const context = {
- hasClass: () => true,
- };
-
- expect(showPopover.call(context)).toEqual(false);
- });
-
- it('shows popover', (done) => {
- const context = {
- hasClass: () => false,
- popover: () => {},
- addClass: () => {},
- };
-
- spyOn(context, 'popover').and.callFake((method) => {
- expect(method).toEqual('show');
- done();
- });
-
- showPopover.call(context);
- });
-
- it('adds disable-animation and js-popover-show class', (done) => {
- const context = {
- hasClass: () => false,
- popover: () => {},
- addClass: () => {},
- };
-
- spyOn(context, 'addClass').and.callFake((classNames) => {
- expect(classNames).toEqual('disable-animation js-popover-show');
- done();
- });
-
- showPopover.call(context);
- });
- });
-
- describe('hidePopover', () => {
- it('returns true when popover is hidden', () => {
- const context = {
- hasClass: () => true,
- popover: () => {},
- removeClass: () => {},
- };
-
- expect(hidePopover.call(context)).toEqual(true);
- });
-
- it('returns false when popover is already hidden', () => {
- const context = {
- hasClass: () => false,
- };
-
- expect(hidePopover.call(context)).toEqual(false);
- });
-
- it('hides popover', (done) => {
- const context = {
- hasClass: () => true,
- popover: () => {},
- removeClass: () => {},
- };
-
- spyOn(context, 'popover').and.callFake((method) => {
- expect(method).toEqual('hide');
- done();
- });
-
- hidePopover.call(context);
- });
-
- it('removes disable-animation and js-popover-show class', (done) => {
- const context = {
- hasClass: () => true,
- popover: () => {},
- removeClass: () => {},
- };
-
- spyOn(context, 'removeClass').and.callFake((classNames) => {
- expect(classNames).toEqual('disable-animation js-popover-show');
- done();
- });
-
- hidePopover.call(context);
- });
- });
-
- describe('dismiss', () => {
- const context = {
- hide: () => {},
- };
-
- beforeEach(() => {
- spyOn(Cookies, 'set').and.callFake(() => {});
- spyOn(hidePopover, 'call').and.callFake(() => {});
- spyOn(context, 'hide').and.callFake(() => {});
- dismiss.call(context);
- });
-
- it('sets cookie to true', () => {
- expect(Cookies.set).toHaveBeenCalled();
- });
-
- it('calls hide popover', () => {
- expect(hidePopover.call).toHaveBeenCalled();
- });
-
- it('calls hide', () => {
- expect(context.hide).toHaveBeenCalled();
- });
- });
-
- describe('mouseleave', () => {
- it('calls hide popover if .popover:hover is false', () => {
- const fakeJquery = {
- length: 0,
- };
-
- spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn));
- spyOn(hidePopover, 'call');
- mouseleave();
- expect(hidePopover.call).toHaveBeenCalled();
- });
-
- it('does not call hide popover if .popover:hover is true', () => {
- const fakeJquery = {
- length: 1,
- };
-
- spyOn($.fn, 'init').and.callFake(selector => (selector === '.popover:hover' ? fakeJquery : $.fn));
- spyOn(hidePopover, 'call');
- mouseleave();
- expect(hidePopover.call).not.toHaveBeenCalled();
- });
- });
-
- describe('mouseenter', () => {
- const context = {};
-
- it('shows popover', () => {
- spyOn(showPopover, 'call').and.returnValue(false);
- mouseenter.call(context);
- expect(showPopover.call).toHaveBeenCalled();
- });
-
- it('registers mouseleave event if popover is showed', (done) => {
- spyOn(showPopover, 'call').and.returnValue(true);
- spyOn($.fn, 'on').and.callFake((eventName) => {
- expect(eventName).toEqual('mouseleave');
- done();
- });
- mouseenter.call(context);
- });
-
- it('does not register mouseleave event if popover is not showed', () => {
- spyOn(showPopover, 'call').and.returnValue(false);
- const spy = spyOn($.fn, 'on').and.callFake(() => {});
- mouseenter.call(context);
- expect(spy).not.toHaveBeenCalled();
- });
- });
-
- describe('setupDismissButton', () => {
- it('registers click event callback', (done) => {
- const context = {
- getAttribute: () => 'popoverId',
- dataset: {
- highlight: 'cookieId',
- },
- };
-
- spyOn($.fn, 'on').and.callFake((event) => {
- expect(event).toEqual('click');
- done();
- });
- setupDismissButton.call(context);
- });
- });
-});
diff --git a/spec/javascripts/feature_highlight/feature_highlight_options_spec.js b/spec/javascripts/feature_highlight/feature_highlight_options_spec.js
deleted file mode 100644
index 7feb361edec..00000000000
--- a/spec/javascripts/feature_highlight/feature_highlight_options_spec.js
+++ /dev/null
@@ -1,45 +0,0 @@
-import domContentLoaded from '~/feature_highlight/feature_highlight_options';
-import bp from '~/breakpoints';
-
-describe('feature highlight options', () => {
- describe('domContentLoaded', () => {
- const highlightOrder = [];
-
- beforeEach(() => {
- // Check for when highlightFeatures is called
- spyOn(highlightOrder, 'find').and.callFake(() => {});
- });
-
- it('should not call highlightFeatures when breakpoint is xs', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('xs');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).not.toHaveBeenCalled();
- });
-
- it('should not call highlightFeatures when breakpoint is sm', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('sm');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).not.toHaveBeenCalled();
- });
-
- it('should not call highlightFeatures when breakpoint is md', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('md');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).not.toHaveBeenCalled();
- });
-
- it('should call highlightFeatures when breakpoint is lg', () => {
- spyOn(bp, 'getBreakpointSize').and.returnValue('lg');
-
- domContentLoaded(highlightOrder);
- expect(bp.getBreakpointSize).toHaveBeenCalled();
- expect(highlightOrder.find).toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/javascripts/feature_highlight/feature_highlight_spec.js b/spec/javascripts/feature_highlight/feature_highlight_spec.js
deleted file mode 100644
index 6abe8425ee7..00000000000
--- a/spec/javascripts/feature_highlight/feature_highlight_spec.js
+++ /dev/null
@@ -1,122 +0,0 @@
-import Cookies from 'js-cookie';
-import * as featureHighlightHelper from '~/feature_highlight/feature_highlight_helper';
-import * as featureHighlight from '~/feature_highlight/feature_highlight';
-
-describe('feature highlight', () => {
- describe('setupFeatureHighlightPopover', () => {
- const selector = '.js-feature-highlight[data-highlight=test]';
- beforeEach(() => {
- setFixtures(`
- <div>
- <div class="js-feature-highlight" data-highlight="test" disabled>
- Trigger
- </div>
- </div>
- <div class="feature-highlight-popover-content">
- Content
- <div class="dismiss-feature-highlight">
- Dismiss
- </div>
- </div>
- `);
- spyOn(window, 'addEventListener');
- spyOn(window, 'removeEventListener');
- featureHighlight.setupFeatureHighlightPopover('test', 0);
- });
-
- it('setups popover content', () => {
- const $popoverContent = $('.feature-highlight-popover-content');
- const outerHTML = $popoverContent.prop('outerHTML');
-
- expect($(selector).data('content')).toEqual(outerHTML);
- });
-
- it('setups mouseenter', () => {
- const showSpy = spyOn(featureHighlightHelper.showPopover, 'call');
- $(selector).trigger('mouseenter');
-
- expect(showSpy).toHaveBeenCalled();
- });
-
- it('setups debounced mouseleave', (done) => {
- const hideSpy = spyOn(featureHighlightHelper.hidePopover, 'call');
- $(selector).trigger('mouseleave');
-
- // Even though we've set the debounce to 0ms, setTimeout is needed for the debounce
- setTimeout(() => {
- expect(hideSpy).toHaveBeenCalled();
- done();
- }, 0);
- });
-
- it('setups inserted.bs.popover', () => {
- $(selector).trigger('mouseenter');
- const popoverId = $(selector).attr('aria-describedby');
- const spyEvent = spyOnEvent(`#${popoverId} .dismiss-feature-highlight`, 'click');
-
- $(`#${popoverId} .dismiss-feature-highlight`).click();
- expect(spyEvent).toHaveBeenTriggered();
- });
-
- it('setups show.bs.popover', () => {
- $(selector).trigger('show.bs.popover');
- expect(window.addEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function));
- });
-
- it('setups hide.bs.popover', () => {
- $(selector).trigger('hide.bs.popover');
- expect(window.removeEventListener).toHaveBeenCalledWith('scroll', jasmine.any(Function));
- });
-
- it('removes disabled attribute', () => {
- expect($('.js-feature-highlight').is(':disabled')).toEqual(false);
- });
-
- it('displays popover', () => {
- expect($(selector).attr('aria-describedby')).toBeFalsy();
- $(selector).trigger('mouseenter');
- expect($(selector).attr('aria-describedby')).toBeTruthy();
- });
- });
-
- describe('shouldHighlightFeature', () => {
- it('should return false if element is not found', () => {
- spyOn(document, 'querySelector').and.returnValue(null);
- spyOn(Cookies, 'get').and.returnValue(null);
-
- expect(featureHighlight.shouldHighlightFeature()).toBeFalsy();
- });
-
- it('should return false if previouslyDismissed', () => {
- spyOn(document, 'querySelector').and.returnValue(document.createElement('div'));
- spyOn(Cookies, 'get').and.returnValue('true');
-
- expect(featureHighlight.shouldHighlightFeature()).toBeFalsy();
- });
-
- it('should return true if element is found and not previouslyDismissed', () => {
- spyOn(document, 'querySelector').and.returnValue(document.createElement('div'));
- spyOn(Cookies, 'get').and.returnValue(null);
-
- expect(featureHighlight.shouldHighlightFeature()).toBeTruthy();
- });
- });
-
- describe('highlightFeatures', () => {
- it('calls setupFeatureHighlightPopover if shouldHighlightFeature returns true', () => {
- // Mimic shouldHighlightFeature set to true
- const highlightOrder = ['issue-boards'];
- spyOn(highlightOrder, 'find').and.returnValue(highlightOrder[0]);
-
- expect(featureHighlight.highlightFeatures(highlightOrder)).toEqual(true);
- });
-
- it('does not call setupFeatureHighlightPopover if shouldHighlightFeature returns false', () => {
- // Mimic shouldHighlightFeature set to false
- const highlightOrder = ['issue-boards'];
- spyOn(highlightOrder, 'find').and.returnValue(null);
-
- expect(featureHighlight.highlightFeatures(highlightOrder)).toEqual(false);
- });
- });
-});
diff --git a/spec/javascripts/pretty_time_spec.js b/spec/javascripts/pretty_time_spec.js
index 0a6c479a95b..084ffe08917 100644
--- a/spec/javascripts/pretty_time_spec.js
+++ b/spec/javascripts/pretty_time_spec.js
@@ -1,215 +1,133 @@
-import '~/lib/utils/pretty_time';
+import { parseSeconds, abbreviateTime, stringifyTime } from '~/lib/utils/pretty_time';
-(() => {
- const prettyTime = gl.utils.prettyTime;
+function assertTimeUnits(obj, minutes, hours, days, weeks) {
+ expect(obj.minutes).toBe(minutes);
+ expect(obj.hours).toBe(hours);
+ expect(obj.days).toBe(days);
+ expect(obj.weeks).toBe(weeks);
+}
- describe('prettyTime methods', function () {
- describe('parseSeconds', function () {
- it('should correctly parse a negative value', function () {
- const parser = prettyTime.parseSeconds;
+describe('prettyTime methods', () => {
+ describe('parseSeconds', () => {
+ it('should correctly parse a negative value', () => {
+ const zeroSeconds = parseSeconds(-1000);
- const zeroSeconds = parser(-1000);
-
- expect(zeroSeconds.minutes).toBe(16);
- expect(zeroSeconds.hours).toBe(0);
- expect(zeroSeconds.days).toBe(0);
- expect(zeroSeconds.weeks).toBe(0);
- });
-
- it('should correctly parse a zero value', function () {
- const parser = prettyTime.parseSeconds;
-
- const zeroSeconds = parser(0);
-
- expect(zeroSeconds.minutes).toBe(0);
- expect(zeroSeconds.hours).toBe(0);
- expect(zeroSeconds.days).toBe(0);
- expect(zeroSeconds.weeks).toBe(0);
- });
-
- it('should correctly parse a small non-zero second values', function () {
- const parser = prettyTime.parseSeconds;
-
- const subOneMinute = parser(10);
-
- expect(subOneMinute.minutes).toBe(0);
- expect(subOneMinute.hours).toBe(0);
- expect(subOneMinute.days).toBe(0);
- expect(subOneMinute.weeks).toBe(0);
-
- const aboveOneMinute = parser(100);
-
- expect(aboveOneMinute.minutes).toBe(1);
- expect(aboveOneMinute.hours).toBe(0);
- expect(aboveOneMinute.days).toBe(0);
- expect(aboveOneMinute.weeks).toBe(0);
-
- const manyMinutes = parser(1000);
-
- expect(manyMinutes.minutes).toBe(16);
- expect(manyMinutes.hours).toBe(0);
- expect(manyMinutes.days).toBe(0);
- expect(manyMinutes.weeks).toBe(0);
- });
-
- it('should correctly parse large second values', function () {
- const parser = prettyTime.parseSeconds;
-
- const aboveOneHour = parser(4800);
-
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
-
- const aboveOneDay = parser(110000);
-
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(6);
- expect(aboveOneDay.days).toBe(3);
- expect(aboveOneDay.weeks).toBe(0);
-
- const aboveOneWeek = parser(25000000);
-
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(0);
- expect(aboveOneWeek.days).toBe(3);
- expect(aboveOneWeek.weeks).toBe(173);
- });
+ assertTimeUnits(zeroSeconds, 16, 0, 0, 0);
+ });
- it('should correctly accept a custom param for hoursPerDay', function () {
- const parser = prettyTime.parseSeconds;
- const config = { hoursPerDay: 24 };
+ it('should correctly parse a zero value', () => {
+ const zeroSeconds = parseSeconds(0);
- const aboveOneHour = parser(4800, config);
+ assertTimeUnits(zeroSeconds, 0, 0, 0, 0);
+ });
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
+ it('should correctly parse a small non-zero second values', () => {
+ const subOneMinute = parseSeconds(10);
+ const aboveOneMinute = parseSeconds(100);
+ const manyMinutes = parseSeconds(1000);
- const aboveOneDay = parser(110000, config);
+ assertTimeUnits(subOneMinute, 0, 0, 0, 0);
+ assertTimeUnits(aboveOneMinute, 1, 0, 0, 0);
+ assertTimeUnits(manyMinutes, 16, 0, 0, 0);
+ });
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(6);
- expect(aboveOneDay.days).toBe(1);
- expect(aboveOneDay.weeks).toBe(0);
+ it('should correctly parse large second values', () => {
+ const aboveOneHour = parseSeconds(4800);
+ const aboveOneDay = parseSeconds(110000);
+ const aboveOneWeek = parseSeconds(25000000);
- const aboveOneWeek = parser(25000000, config);
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 6, 3, 0);
+ assertTimeUnits(aboveOneWeek, 26, 0, 3, 173);
+ });
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(8);
- expect(aboveOneWeek.days).toBe(4);
+ it('should correctly accept a custom param for hoursPerDay', () => {
+ const config = { hoursPerDay: 24 };
- expect(aboveOneWeek.weeks).toBe(57);
- });
+ const aboveOneHour = parseSeconds(4800, config);
+ const aboveOneDay = parseSeconds(110000, config);
+ const aboveOneWeek = parseSeconds(25000000, config);
- it('should correctly accept a custom param for daysPerWeek', function () {
- const parser = prettyTime.parseSeconds;
- const config = { daysPerWeek: 7 };
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 6, 1, 0);
+ assertTimeUnits(aboveOneWeek, 26, 8, 4, 57);
+ });
- const aboveOneHour = parser(4800, config);
+ it('should correctly accept a custom param for daysPerWeek', () => {
+ const config = { daysPerWeek: 7 };
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
+ const aboveOneHour = parseSeconds(4800, config);
+ const aboveOneDay = parseSeconds(110000, config);
+ const aboveOneWeek = parseSeconds(25000000, config);
- const aboveOneDay = parser(110000, config);
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 6, 3, 0);
+ assertTimeUnits(aboveOneWeek, 26, 0, 0, 124);
+ });
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(6);
- expect(aboveOneDay.days).toBe(3);
- expect(aboveOneDay.weeks).toBe(0);
+ it('should correctly accept custom params for daysPerWeek and hoursPerDay', () => {
+ const config = { daysPerWeek: 55, hoursPerDay: 14 };
- const aboveOneWeek = parser(25000000, config);
+ const aboveOneHour = parseSeconds(4800, config);
+ const aboveOneDay = parseSeconds(110000, config);
+ const aboveOneWeek = parseSeconds(25000000, config);
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(0);
- expect(aboveOneWeek.days).toBe(0);
+ assertTimeUnits(aboveOneHour, 20, 1, 0, 0);
+ assertTimeUnits(aboveOneDay, 33, 2, 2, 0);
+ assertTimeUnits(aboveOneWeek, 26, 0, 1, 9);
+ });
+ });
- expect(aboveOneWeek.weeks).toBe(124);
- });
+ describe('stringifyTime', () => {
+ it('should stringify values with all non-zero units', () => {
+ const timeObject = {
+ weeks: 1,
+ days: 4,
+ hours: 7,
+ minutes: 20,
+ };
- it('should correctly accept custom params for daysPerWeek and hoursPerDay', function () {
- const parser = prettyTime.parseSeconds;
- const config = { daysPerWeek: 55, hoursPerDay: 14 };
+ const timeString = stringifyTime(timeObject);
- const aboveOneHour = parser(4800, config);
+ expect(timeString).toBe('1w 4d 7h 20m');
+ });
- expect(aboveOneHour.minutes).toBe(20);
- expect(aboveOneHour.hours).toBe(1);
- expect(aboveOneHour.days).toBe(0);
- expect(aboveOneHour.weeks).toBe(0);
+ it('should stringify values with some non-zero units', () => {
+ const timeObject = {
+ weeks: 0,
+ days: 4,
+ hours: 0,
+ minutes: 20,
+ };
- const aboveOneDay = parser(110000, config);
+ const timeString = stringifyTime(timeObject);
- expect(aboveOneDay.minutes).toBe(33);
- expect(aboveOneDay.hours).toBe(2);
- expect(aboveOneDay.days).toBe(2);
- expect(aboveOneDay.weeks).toBe(0);
+ expect(timeString).toBe('4d 20m');
+ });
- const aboveOneWeek = parser(25000000, config);
+ it('should stringify values with no non-zero units', () => {
+ const timeObject = {
+ weeks: 0,
+ days: 0,
+ hours: 0,
+ minutes: 0,
+ };
- expect(aboveOneWeek.minutes).toBe(26);
- expect(aboveOneWeek.hours).toBe(0);
- expect(aboveOneWeek.days).toBe(1);
+ const timeString = stringifyTime(timeObject);
- expect(aboveOneWeek.weeks).toBe(9);
- });
+ expect(timeString).toBe('0m');
});
+ });
- describe('stringifyTime', function () {
- it('should stringify values with all non-zero units', function () {
- const timeObject = {
- weeks: 1,
- days: 4,
- hours: 7,
- minutes: 20,
- };
-
- const timeString = prettyTime.stringifyTime(timeObject);
-
- expect(timeString).toBe('1w 4d 7h 20m');
- });
-
- it('should stringify values with some non-zero units', function () {
- const timeObject = {
- weeks: 0,
- days: 4,
- hours: 0,
- minutes: 20,
- };
-
- const timeString = prettyTime.stringifyTime(timeObject);
-
- expect(timeString).toBe('4d 20m');
- });
-
- it('should stringify values with no non-zero units', function () {
- const timeObject = {
- weeks: 0,
- days: 0,
- hours: 0,
- minutes: 0,
- };
-
- const timeString = prettyTime.stringifyTime(timeObject);
-
- expect(timeString).toBe('0m');
- });
+ describe('abbreviateTime', () => {
+ it('should abbreviate stringified times for weeks', () => {
+ const fullTimeString = '1w 3d 4h 5m';
+ expect(abbreviateTime(fullTimeString)).toBe('1w');
});
- describe('abbreviateTime', function () {
- it('should abbreviate stringified times for weeks', function () {
- const fullTimeString = '1w 3d 4h 5m';
- expect(prettyTime.abbreviateTime(fullTimeString)).toBe('1w');
- });
-
- it('should abbreviate stringified times for non-weeks', function () {
- const fullTimeString = '0w 3d 4h 5m';
- expect(prettyTime.abbreviateTime(fullTimeString)).toBe('3d');
- });
+ it('should abbreviate stringified times for non-weeks', () => {
+ const fullTimeString = '0w 3d 4h 5m';
+ expect(abbreviateTime(fullTimeString)).toBe('3d');
});
});
-})(window.gl || (window.gl = {}));
+});
diff --git a/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
new file mode 100644
index 00000000000..15eb01eb472
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/policy/kubernetes_spec.rb
@@ -0,0 +1,30 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Policy::Kubernetes do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when kubernetes service is active' do
+ set(:project) { create(:kubernetes_project) }
+
+ it 'is satisfied by a kubernetes pipeline' do
+ expect(described_class.new('active'))
+ .to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when kubernetes service is inactive' do
+ set(:project) { create(:project) }
+
+ it 'is not satisfied by a pipeline without kubernetes available' do
+ expect(described_class.new('active'))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when kubernetes policy is invalid' do
+ it 'raises an error' do
+ expect { described_class.new('unknown') }
+ .to raise_error(described_class::UnknownPolicyError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy/refs_spec.rb b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
new file mode 100644
index 00000000000..7211187e511
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
@@ -0,0 +1,87 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Policy::Refs do
+ describe '#satisfied_by?' do
+ context 'when matching ref' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'master') }
+
+ it 'is satisfied when pipeline branch matches' do
+ expect(described_class.new(%w[master deploy]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when pipeline branch does not match' do
+ expect(described_class.new(%w[feature fix]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when maching tags' do
+ context 'when pipeline runs for a tag' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'feature', tag: true)
+ end
+
+ it 'is satisfied when tags matcher is specified' do
+ expect(described_class.new(%w[master tags]))
+ .to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when pipeline is not created for a tag' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'feature', tag: false)
+ end
+
+ it 'is not satisfied when tag match is specified' do
+ expect(described_class.new(%w[master tags]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+ end
+
+ context 'when also matching a path' do
+ let(:pipeline) do
+ build_stubbed(:ci_pipeline, ref: 'master')
+ end
+
+ it 'is satisfied when provided patch matches specified one' do
+ expect(described_class.new(%W[master@#{pipeline.project_full_path}]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when path differs' do
+ expect(described_class.new(%w[master@some/fork/repository]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when maching a source' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, source: :push) }
+
+ it 'is satisifed when provided source keyword matches' do
+ expect(described_class.new(%w[pushes]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when provided source keyword does not match' do
+ expect(described_class.new(%w[triggers]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+
+ context 'when matching a ref by a regular expression' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'docs-something') }
+
+ it 'is satisfied when regexp matches pipeline ref' do
+ expect(described_class.new(['/docs-.*/']))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied when regexp does not match pipeline ref' do
+ expect(described_class.new(['/fix-.*/']))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/policy_spec.rb b/spec/lib/gitlab/ci/build/policy_spec.rb
new file mode 100644
index 00000000000..20ee3dd3e89
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/policy_spec.rb
@@ -0,0 +1,37 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Policy do
+ let(:policy) { spy('policy specification') }
+
+ before do
+ stub_const("#{described_class}::Something", policy)
+ end
+
+ describe '.fabricate' do
+ context 'when policy exists' do
+ it 'fabricates and initializes relevant policy' do
+ specs = described_class.fabricate(something: 'some value')
+
+ expect(specs).to be_an Array
+ expect(specs).to be_one
+ expect(policy).to have_received(:new).with('some value')
+ end
+ end
+
+ context 'when some policies are not defined' do
+ it 'gracefully skips unknown policies' do
+ expect { described_class.fabricate(unknown: 'first') }
+ .to raise_error(NameError)
+ end
+ end
+
+ context 'when passing a nil value as specs' do
+ it 'returns an empty array' do
+ specs = described_class.fabricate(nil)
+
+ expect(specs).to be_an Array
+ expect(specs).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 2278230f338..d72f8553f55 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -3,8 +3,7 @@ require 'spec_helper'
module Gitlab
module Ci
describe YamlProcessor, :lib do
- subject { described_class.new(config, path) }
- let(:path) { 'path' }
+ subject { described_class.new(config) }
describe 'our current .gitlab-ci.yml' do
let(:config) { File.read("#{Rails.root}/.gitlab-ci.yml") }
@@ -17,7 +16,7 @@ module Gitlab
end
describe '#build_attributes' do
- subject { described_class.new(config, path).build_attributes(:rspec) }
+ subject { described_class.new(config).build_attributes(:rspec) }
describe 'coverage entry' do
describe 'code coverage regexp' do
@@ -167,8 +166,6 @@ module Gitlab
end
context 'when kubernetes policy is specified' do
- let(:pipeline) { create(:ci_empty_pipeline) }
-
let(:config) do
YAML.dump(
spinach: { stage: 'test', script: 'spinach' },
@@ -204,7 +201,7 @@ module Gitlab
end
end
- describe "#builds_for_stage_and_ref" do
+ describe "#pipeline_stage_builds" do
let(:type) { 'test' }
it "returns builds if no branch specified" do
@@ -213,10 +210,10 @@ module Gitlab
rspec: { script: "rspec" }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref(type, "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -241,9 +238,9 @@ module Gitlab
rspec: { script: "rspec", only: ["deploy"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0)
end
it "does not return builds if only has regexp with another branch" do
@@ -252,9 +249,9 @@ module Gitlab
rspec: { script: "rspec", only: ["/^deploy$/"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0)
end
it "returns builds if only has specified this branch" do
@@ -263,9 +260,9 @@ module Gitlab
rspec: { script: "rspec", only: ["master"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
end
it "returns builds if only has a list of branches including specified" do
@@ -274,9 +271,9 @@ module Gitlab
rspec: { script: "rspec", type: type, only: %w(master deploy) }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
end
it "returns builds if only has a branches keyword specified" do
@@ -285,9 +282,9 @@ module Gitlab
rspec: { script: "rspec", type: type, only: ["branches"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
end
it "does not return builds if only has a tags keyword" do
@@ -296,9 +293,9 @@ module Gitlab
rspec: { script: "rspec", type: type, only: ["tags"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
end
it "returns builds if only has special keywords specified and source matches" do
@@ -315,9 +312,9 @@ module Gitlab
rspec: { script: "rspec", type: type, only: [possibility[:keyword]] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(1)
end
end
@@ -335,21 +332,27 @@ module Gitlab
rspec: { script: "rspec", type: type, only: [possibility[:keyword]] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(0)
end
end
it "returns builds if only has current repository path" do
+ seed_pipeline = pipeline(ref: 'deploy')
+
config = YAML.dump({
before_script: ["pwd"],
- rspec: { script: "rspec", type: type, only: ["branches@path"] }
+ rspec: {
+ script: "rspec",
+ type: type,
+ only: ["branches@#{seed_pipeline.project_full_path}"]
+ }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, seed_pipeline).size).to eq(1)
end
it "does not return builds if only has different repository path" do
@@ -358,9 +361,9 @@ module Gitlab
rspec: { script: "rspec", type: type, only: ["branches@fork"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
end
it "returns build only for specified type" do
@@ -371,11 +374,11 @@ module Gitlab
production: { script: "deploy", type: "deploy", only: ["master@path", "deploy"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, 'fork')
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("deploy", "deploy").size).to eq(2)
- expect(config_processor.builds_for_stage_and_ref("test", "deploy").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("deploy", "master").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("deploy", pipeline(ref: "deploy")).size).to eq(2)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "deploy")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("deploy", pipeline(ref: "master")).size).to eq(1)
end
context 'for invalid value' do
@@ -418,9 +421,9 @@ module Gitlab
rspec: { script: "rspec", except: ["deploy"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
end
it "returns builds if except has regexp with another branch" do
@@ -429,9 +432,9 @@ module Gitlab
rspec: { script: "rspec", except: ["/^deploy$/"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(1)
end
it "does not return builds if except has specified this branch" do
@@ -440,9 +443,9 @@ module Gitlab
rspec: { script: "rspec", except: ["master"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "master").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "master")).size).to eq(0)
end
it "does not return builds if except has a list of branches including specified" do
@@ -451,9 +454,9 @@ module Gitlab
rspec: { script: "rspec", type: type, except: %w(master deploy) }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
end
it "does not return builds if except has a branches keyword specified" do
@@ -462,9 +465,9 @@ module Gitlab
rspec: { script: "rspec", type: type, except: ["branches"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(0)
end
it "returns builds if except has a tags keyword" do
@@ -473,9 +476,9 @@ module Gitlab
rspec: { script: "rspec", type: type, except: ["tags"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
end
it "does not return builds if except has special keywords specified and source matches" do
@@ -492,9 +495,9 @@ module Gitlab
rspec: { script: "rspec", type: type, except: [possibility[:keyword]] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(0)
end
end
@@ -512,21 +515,27 @@ module Gitlab
rspec: { script: "rspec", type: type, except: [possibility[:keyword]] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy", false, possibility[:source]).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: 'deploy', tag: false, source: possibility[:source])).size).to eq(1)
end
end
it "does not return builds if except has current repository path" do
+ seed_pipeline = pipeline(ref: 'deploy')
+
config = YAML.dump({
before_script: ["pwd"],
- rspec: { script: "rspec", type: type, except: ["branches@path"] }
+ rspec: {
+ script: "rspec",
+ type: type,
+ except: ["branches@#{seed_pipeline.project_full_path}"]
+ }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds(type, seed_pipeline).size).to eq(0)
end
it "returns builds if except has different repository path" do
@@ -535,24 +544,28 @@ module Gitlab
rspec: { script: "rspec", type: type, except: ["branches@fork"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref(type, "deploy").size).to eq(1)
+ expect(config_processor.pipeline_stage_builds(type, pipeline(ref: "deploy")).size).to eq(1)
end
it "returns build except specified type" do
+ master_pipeline = pipeline(ref: 'master')
+ test_pipeline = pipeline(ref: 'test')
+ deploy_pipeline = pipeline(ref: 'deploy')
+
config = YAML.dump({
before_script: ["pwd"],
- rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@fork"] },
+ rspec: { script: "rspec", type: "test", except: ["master", "deploy", "test@#{test_pipeline.project_full_path}"] },
staging: { script: "deploy", type: "deploy", except: ["master"] },
- production: { script: "deploy", type: "deploy", except: ["master@fork"] }
+ production: { script: "deploy", type: "deploy", except: ["master@#{master_pipeline.project_full_path}"] }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, 'fork')
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("deploy", "deploy").size).to eq(2)
- expect(config_processor.builds_for_stage_and_ref("test", "test").size).to eq(0)
- expect(config_processor.builds_for_stage_and_ref("deploy", "master").size).to eq(0)
+ expect(config_processor.pipeline_stage_builds("deploy", deploy_pipeline).size).to eq(2)
+ expect(config_processor.pipeline_stage_builds("test", test_pipeline).size).to eq(0)
+ expect(config_processor.pipeline_stage_builds("deploy", master_pipeline).size).to eq(0)
end
context 'for invalid value' do
@@ -591,9 +604,9 @@ module Gitlab
describe "Scripts handling" do
let(:config_data) { YAML.dump(config) }
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data, path) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data) }
- subject { config_processor.builds_for_stage_and_ref("test", "master").first }
+ subject { config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first }
describe "before_script" do
context "in global context" do
@@ -674,10 +687,10 @@ module Gitlab
before_script: ["pwd"],
rspec: { script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -709,10 +722,10 @@ module Gitlab
command: ["/usr/local/bin/init", "run"] }, "docker:dind"],
script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -742,10 +755,10 @@ module Gitlab
before_script: ["pwd"],
rspec: { script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -771,10 +784,10 @@ module Gitlab
before_script: ["pwd"],
rspec: { image: "ruby:2.5", services: ["postgresql", "docker:dind"], script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -797,7 +810,7 @@ module Gitlab
end
describe 'Variables' do
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config), path) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { config_processor.builds.first[:yaml_variables] }
@@ -918,9 +931,9 @@ module Gitlab
rspec: { script: "rspec", when: when_state }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- builds = config_processor.builds_for_stage_and_ref("test", "master")
+ builds = config_processor.pipeline_stage_builds("test", pipeline(ref: "master"))
expect(builds.size).to eq(1)
expect(builds.first[:when]).to eq(when_state)
end
@@ -951,8 +964,8 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq(
paths: ["logs/", "binaries/"],
untracked: true,
key: 'key',
@@ -970,8 +983,8 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq(
paths: ["logs/", "binaries/"],
untracked: true,
key: 'key',
@@ -990,8 +1003,8 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first[:options][:cache]).to eq(
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first[:options][:cache]).to eq(
paths: ["test/"],
untracked: false,
key: 'local',
@@ -1019,8 +1032,8 @@ module Gitlab
config_processor = Gitlab::Ci::YamlProcessor.new(config)
- expect(config_processor.builds_for_stage_and_ref("test", "master").size).to eq(1)
- expect(config_processor.builds_for_stage_and_ref("test", "master").first).to eq({
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).size).to eq(1)
+ expect(config_processor.pipeline_stage_builds("test", pipeline(ref: "master")).first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
@@ -1055,9 +1068,9 @@ module Gitlab
}
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config, path)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
- builds = config_processor.builds_for_stage_and_ref("test", "master")
+ builds = config_processor.pipeline_stage_builds("test", pipeline(ref: "master"))
expect(builds.size).to eq(1)
expect(builds.first[:options][:artifacts][:when]).to eq(when_state)
end
@@ -1072,7 +1085,7 @@ module Gitlab
end
let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
- let(:builds) { processor.builds_for_stage_and_ref('deploy', 'master') }
+ let(:builds) { processor.pipeline_stage_builds('deploy', pipeline(ref: 'master')) }
context 'when a production environment is specified' do
let(:environment) { 'production' }
@@ -1229,7 +1242,7 @@ module Gitlab
describe "Hidden jobs" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
- subject { config_processor.builds_for_stage_and_ref("test", "master") }
+ subject { config_processor.pipeline_stage_builds("test", pipeline(ref: "master")) }
shared_examples 'hidden_job_handling' do
it "doesn't create jobs that start with dot" do
@@ -1277,7 +1290,7 @@ module Gitlab
describe "YAML Alias/Anchor" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
- subject { config_processor.builds_for_stage_and_ref("build", "master") }
+ subject { config_processor.pipeline_stage_builds("build", pipeline(ref: "master")) }
shared_examples 'job_templates_handling' do
it "is correctly supported for jobs" do
@@ -1377,182 +1390,182 @@ EOT
it "returns errors if tags parameter is invalid" do
config = YAML.dump({ rspec: { script: "test", tags: "mysql" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec tags should be an array of strings")
end
it "returns errors if before_script parameter is invalid" do
config = YAML.dump({ before_script: "bundle update", rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "before_script config should be an array of strings")
end
it "returns errors if job before_script parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", before_script: [10, "test"] } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:before_script config should be an array of strings")
end
it "returns errors if after_script parameter is invalid" do
config = YAML.dump({ after_script: "bundle update", rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "after_script config should be an array of strings")
end
it "returns errors if job after_script parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", after_script: [10, "test"] } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:after_script config should be an array of strings")
end
it "returns errors if image parameter is invalid" do
config = YAML.dump({ image: ["test"], rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "image config should be a hash or a string")
end
it "returns errors if job name is blank" do
config = YAML.dump({ '' => { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:job name can't be blank")
end
it "returns errors if job name is non-string" do
config = YAML.dump({ 10 => { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:10 name should be a symbol")
end
it "returns errors if job image parameter is invalid" do
config = YAML.dump({ rspec: { script: "test", image: ["test"] } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:image config should be a hash or a string")
end
it "returns errors if services parameter is not an array" do
config = YAML.dump({ services: "test", rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "services config should be a array")
end
it "returns errors if services parameter is not an array of strings" do
config = YAML.dump({ services: [10, "test"], rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "service config should be a hash or a string")
end
it "returns errors if job services parameter is not an array" do
config = YAML.dump({ rspec: { script: "test", services: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:services config should be a array")
end
it "returns errors if job services parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", services: [10, "test"] } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "service config should be a hash or a string")
end
it "returns error if job configuration is invalid" do
config = YAML.dump({ extra: "bundle update" })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:extra config should be a hash")
end
it "returns errors if services configuration is not correct" do
config = YAML.dump({ extra: { script: 'rspec', services: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:extra:services config should be a array")
end
it "returns errors if there are no jobs defined" do
config = YAML.dump({ before_script: ["bundle update"] })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
end
it "returns errors if there are no visible jobs defined" do
config = YAML.dump({ before_script: ["bundle update"], '.hidden'.to_sym => { script: 'ls' } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
end
it "returns errors if job allow_failure parameter is not an boolean" do
config = YAML.dump({ rspec: { script: "test", allow_failure: "string" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec allow failure should be a boolean value")
end
it "returns errors if job stage is not a string" do
config = YAML.dump({ rspec: { script: "test", type: 1 } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:type config should be a string")
end
it "returns errors if job stage is not a pre-defined stage" do
config = YAML.dump({ rspec: { script: "test", type: "acceptance" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be build, test, deploy")
end
it "returns errors if job stage is not a defined stage" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", type: "acceptance" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be build, test")
end
it "returns errors if stages is not an array" do
config = YAML.dump({ stages: "test", rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "stages config should be an array of strings")
end
it "returns errors if stages is not an array of strings" do
config = YAML.dump({ stages: [true, "test"], rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "stages config should be an array of strings")
end
it "returns errors if variables is not a map" do
config = YAML.dump({ variables: "test", rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
end
it "returns errors if variables is not a map of key-value strings" do
config = YAML.dump({ variables: { test: false }, rspec: { script: "test" } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
end
it "returns errors if job when is not on_success, on_failure or always" do
config = YAML.dump({ rspec: { script: "test", when: 1 } })
expect do
- Gitlab::Ci::YamlProcessor.new(config, path)
+ Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec when should be on_success, on_failure, always or manual")
end
@@ -1694,6 +1707,10 @@ EOT
end
end
end
+
+ def pipeline(**attributes)
+ build_stubbed(:ci_empty_pipeline, **attributes)
+ end
end
end
end
diff --git a/spec/migrations/clean_stages_statuses_migration_spec.rb b/spec/migrations/clean_stages_statuses_migration_spec.rb
new file mode 100644
index 00000000000..38705f8eaae
--- /dev/null
+++ b/spec/migrations/clean_stages_statuses_migration_spec.rb
@@ -0,0 +1,51 @@
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20170912113435_clean_stages_statuses_migration.rb')
+
+describe CleanStagesStatusesMigration, :migration, :sidekiq, :redis do
+ let(:migration) { spy('migration') }
+
+ before do
+ allow(Gitlab::BackgroundMigration::MigrateStageStatus)
+ .to receive(:new).and_return(migration)
+ end
+
+ context 'when there are pending background migrations' do
+ it 'processes pending jobs synchronously' do
+ Sidekiq::Testing.disable! do
+ BackgroundMigrationWorker
+ .perform_in(2.minutes, 'MigrateStageStatus', [1, 1])
+ BackgroundMigrationWorker
+ .perform_async('MigrateStageStatus', [1, 1])
+
+ migrate!
+
+ expect(migration).to have_received(:perform).with(1, 1).twice
+ end
+ end
+ end
+
+ context 'when there are no background migrations pending' do
+ it 'does nothing' do
+ Sidekiq::Testing.disable! do
+ migrate!
+
+ expect(migration).not_to have_received(:perform)
+ end
+ end
+ end
+
+ context 'when there are still unmigrated stages afterwards' do
+ let(:stages) { table('ci_stages') }
+
+ before do
+ stages.create!(status: nil, name: 'build')
+ stages.create!(status: nil, name: 'test')
+ end
+
+ it 'migrates statuses sequentially in batches' do
+ migrate!
+
+ expect(migration).to have_received(:perform).once
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 77f0be6b120..9c1e460ab20 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -26,6 +26,7 @@ describe Ci::Pipeline, :mailer do
it { is_expected.to respond_to :git_author_name }
it { is_expected.to respond_to :git_author_email }
it { is_expected.to respond_to :short_sha }
+ it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
describe '#source' do
context 'when creating new pipeline' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 60cd7e70055..76bb658b10d 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1311,24 +1311,25 @@ describe Repository, models: true do
describe '#revert' do
let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') }
let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
+ let(:message) { 'revert message' }
context 'when there is a conflict' do
it 'raises an error' do
- expect { repository.revert(user, new_image_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.revert(user, new_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit was already reverted' do
it 'raises an error' do
- repository.revert(user, update_image_commit, 'master')
+ repository.revert(user, update_image_commit, 'master', message)
- expect { repository.revert(user, update_image_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.revert(user, update_image_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit can be reverted' do
it 'reverts the changes' do
- expect(repository.revert(user, update_image_commit, 'master')).to be_truthy
+ expect(repository.revert(user, update_image_commit, 'master', message)).to be_truthy
end
end
@@ -1337,7 +1338,7 @@ describe Repository, models: true do
merge_commit
expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).to be_present
- repository.revert(user, merge_commit, 'master')
+ repository.revert(user, merge_commit, 'master', message)
expect(repository.blob_at_branch('master', 'files/ruby/feature.rb')).not_to be_present
end
end
@@ -1347,24 +1348,25 @@ describe Repository, models: true do
let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') }
let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') }
let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') }
+ let(:message) { 'cherry-pick message' }
context 'when there is a conflict' do
it 'raises an error' do
- expect { repository.cherry_pick(user, conflict_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.cherry_pick(user, conflict_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit was already cherry-picked' do
it 'raises an error' do
- repository.cherry_pick(user, pickable_commit, 'master')
+ repository.cherry_pick(user, pickable_commit, 'master', message)
- expect { repository.cherry_pick(user, pickable_commit, 'master') }.to raise_error(/Failed to/)
+ expect { repository.cherry_pick(user, pickable_commit, 'master', message) }.to raise_error(Gitlab::Git::Repository::CreateTreeError)
end
end
context 'when commit can be cherry-picked' do
it 'cherry-picks the changes' do
- expect(repository.cherry_pick(user, pickable_commit, 'master')).to be_truthy
+ expect(repository.cherry_pick(user, pickable_commit, 'master', message)).to be_truthy
end
end
@@ -1372,11 +1374,11 @@ describe Repository, models: true do
it 'cherry-picks the changes' do
expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).to be_nil
- cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome')
+ cherry_pick_commit_sha = repository.cherry_pick(user, pickable_merge, 'improve/awesome', message)
cherry_pick_commit_message = project.commit(cherry_pick_commit_sha).message
expect(repository.blob_at_branch('improve/awesome', 'foo/bar/.gitkeep')).not_to be_nil
- expect(cherry_pick_commit_message).to include('cherry picked from')
+ expect(cherry_pick_commit_message).to eq(message)
end
end
end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 42f0079e173..1671a046fdf 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -159,11 +159,14 @@ describe API::Groups do
context 'when using owned in the request' do
it 'returns an array of groups the user owns' do
+ group1.add_master(user2)
+
get api('/groups', user2), owned: true
expect(response).to have_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
expect(json_response.first['name']).to eq(group2.name)
end
end