summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab/ci/review.gitlab-ci.yml10
-rw-r--r--app/assets/javascripts/clusters/stores/clusters_store.js1
-rw-r--r--app/controllers/autocomplete_controller.rb2
-rw-r--r--app/controllers/boards/issues_controller.rb2
-rw-r--r--app/controllers/concerns/issuable_actions.rb22
-rw-r--r--app/controllers/concerns/notes_actions.rb2
-rw-r--r--app/controllers/projects/notes_controller.rb2
-rw-r--r--app/controllers/snippets/notes_controller.rb4
-rw-r--r--app/finders/notes_finder.rb42
-rw-r--r--app/models/ci/build.rb18
-rw-r--r--app/models/ci/build_need.rb14
-rw-r--r--app/models/ci/pipeline.rb4
-rw-r--r--app/models/commit_status.rb8
-rw-r--r--app/models/concerns/ci/metadatable.rb1
-rw-r--r--app/models/concerns/relative_positioning.rb4
-rw-r--r--app/models/merge_request.rb2
-rw-r--r--app/models/namespace/aggregation_schedule.rb14
-rw-r--r--app/serializers/user_serializer.rb17
-rw-r--r--app/services/ci/process_pipeline_service.rb40
-rw-r--r--app/services/ci/retry_build_service.rb2
-rw-r--r--app/services/merge_requests/mergeability_check_service.rb45
-rw-r--r--app/workers/all_queues.yml1
-rw-r--r--app/workers/build_process_worker.rb16
-rw-r--r--changelogs/unreleased/implement-dag.yml5
-rw-r--r--changelogs/unreleased/jupyter-fixes-v1.yml5
-rw-r--r--changelogs/unreleased/osw-avoid-errors-due-to-concurrent-calls.yml5
-rw-r--r--db/migrate/20190731084415_add_build_need.rb20
-rw-r--r--db/schema.rb9
-rw-r--r--doc/development/testing_guide/review_apps.md31
-rw-r--r--lib/api/helpers/notes_helpers.rb2
-rw-r--r--lib/gitlab/ci/config/entry/job.rb20
-rw-r--r--lib/gitlab/ci/config/normalizer.rb64
-rw-r--r--lib/gitlab/ci/yaml_processor.rb18
-rw-r--r--lib/gitlab/exclusive_lease_helpers.rb5
-rw-r--r--lib/gitlab/project_search_results.rb2
-rw-r--r--spec/controllers/autocomplete_controller_spec.rb14
-rw-r--r--spec/controllers/concerns/issuable_actions_spec.rb69
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb22
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb16
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb2
-rw-r--r--spec/factories/ci/build_need.rb8
-rw-r--r--spec/finders/notes_finder_spec.rb123
-rw-r--r--spec/fixtures/api/schemas/entities/discussion.json67
-rw-r--r--spec/fixtures/api/schemas/entities/discussions.json4
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb53
-rw-r--r--spec/lib/gitlab/ci/config/normalizer_spec.rb59
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb80
-rw-r--r--spec/lib/gitlab/exclusive_lease_helpers_spec.rb17
-rw-r--r--spec/models/ci/build_need_spec.rb13
-rw-r--r--spec/models/ci/build_spec.rb69
-rw-r--r--spec/models/namespace/aggregation_schedule_spec.rb34
-rw-r--r--spec/requests/api/merge_requests_spec.rb2
-rw-r--r--spec/serializers/user_serializer_spec.rb30
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb92
-rw-r--r--spec/services/ci/retry_build_service_spec.rb1
-rw-r--r--spec/services/merge_requests/mergeability_check_service_spec.rb79
-rw-r--r--spec/support/shared_examples/discussions_provider_shared_examples.rb15
-rw-r--r--spec/support/shared_examples/relative_positioning_shared_examples.rb22
-rw-r--r--spec/workers/build_process_worker_spec.rb26
-rw-r--r--vendor/jupyter/values.yaml4
60 files changed, 1133 insertions, 247 deletions
diff --git a/.gitlab/ci/review.gitlab-ci.yml b/.gitlab/ci/review.gitlab-ci.yml
index aa722f6ef33..29ed41ce0c6 100644
--- a/.gitlab/ci/review.gitlab-ci.yml
+++ b/.gitlab/ci/review.gitlab-ci.yml
@@ -116,17 +116,21 @@ schedule:review-deploy:
<<: *review-schedules-only
review-stop:
- <<: *review-base
+ <<: *review-only
+ extends: .single-script-job-dedicated-runner
+ image: registry.gitlab.com/gitlab-org/gitlab-build-images:gitlab-charts-build-base
stage: review
when: manual
allow_failure: true
variables:
- GIT_DEPTH: "1"
+ SCRIPT_NAME: review_apps/review-apps.sh
environment:
<<: *review-environment
action: stop
script:
- - source scripts/review_apps/review-apps.sh
+ - wget $CI_PROJECT_URL/raw/$CI_COMMIT_SHA/scripts/utils.sh
+ - source utils.sh
+ - source $(basename $SCRIPT_NAME)
- delete
.review-qa-base: &review-qa-base
diff --git a/app/assets/javascripts/clusters/stores/clusters_store.js b/app/assets/javascripts/clusters/stores/clusters_store.js
index f64f0ca616f..ada5a49e246 100644
--- a/app/assets/javascripts/clusters/stores/clusters_store.js
+++ b/app/assets/javascripts/clusters/stores/clusters_store.js
@@ -171,6 +171,7 @@ export default class ClusterStore {
this.state.applications.cert_manager.email || serverAppEntry.email;
} else if (appId === JUPYTER) {
this.state.applications.jupyter.hostname =
+ this.state.applications.jupyter.hostname ||
serverAppEntry.hostname ||
(this.state.applications.ingress.externalIp
? `jupyter.${this.state.applications.ingress.externalIp}.nip.io`
diff --git a/app/controllers/autocomplete_controller.rb b/app/controllers/autocomplete_controller.rb
index 091327931c2..f111c7ca8cc 100644
--- a/app/controllers/autocomplete_controller.rb
+++ b/app/controllers/autocomplete_controller.rb
@@ -16,7 +16,7 @@ class AutocompleteController < ApplicationController
.new(params: params, current_user: current_user, project: project, group: group)
.execute
- render json: UserSerializer.new.represent(users)
+ render json: UserSerializer.new(params).represent(users, project: project)
end
def user
diff --git a/app/controllers/boards/issues_controller.rb b/app/controllers/boards/issues_controller.rb
index 90528f75ffd..1d1a72d21f1 100644
--- a/app/controllers/boards/issues_controller.rb
+++ b/app/controllers/boards/issues_controller.rb
@@ -26,7 +26,7 @@ module Boards
list_service = Boards::Issues::ListService.new(board_parent, current_user, filter_params)
issues = list_service.execute
issues = issues.page(params[:page]).per(params[:per] || 20).without_count
- Issue.move_to_end(issues) if Gitlab::Database.read_write?
+ Issue.move_nulls_to_end(issues) if Gitlab::Database.read_write?
issues = issues.preload(:milestone,
:assignees,
project: [
diff --git a/app/controllers/concerns/issuable_actions.rb b/app/controllers/concerns/issuable_actions.rb
index 6fa2f75be33..398cb728e05 100644
--- a/app/controllers/concerns/issuable_actions.rb
+++ b/app/controllers/concerns/issuable_actions.rb
@@ -98,13 +98,12 @@ module IssuableActions
render json: { notice: "#{quantity} #{resource_name.pluralize(quantity)} updated" }
end
- # rubocop: disable CodeReuse/ActiveRecord
+ # rubocop:disable CodeReuse/ActiveRecord
def discussions
- notes = issuable.discussion_notes
- .inc_relations_for_view
- .with_notes_filter(notes_filter)
- .includes(:noteable)
- .fresh
+ notes = NotesFinder.new(current_user, finder_params_for_issuable).execute
+ .inc_relations_for_view
+ .includes(:noteable)
+ .fresh
if notes_filter != UserPreference::NOTES_FILTERS[:only_comments]
notes = ResourceEvents::MergeIntoNotesService.new(issuable, current_user).execute(notes)
@@ -117,7 +116,7 @@ module IssuableActions
render json: discussion_serializer.represent(discussions, context: self)
end
- # rubocop: enable CodeReuse/ActiveRecord
+ # rubocop:enable CodeReuse/ActiveRecord
private
@@ -222,4 +221,13 @@ module IssuableActions
def parent
@project || @group # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
+
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def finder_params_for_issuable
+ {
+ target: @issuable,
+ notes_filter: notes_filter
+ }.tap { |new_params| new_params[:project] = project if respond_to?(:project, true) }
+ end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
end
diff --git a/app/controllers/concerns/notes_actions.rb b/app/controllers/concerns/notes_actions.rb
index 0098c4cdf4c..d2a961efff7 100644
--- a/app/controllers/concerns/notes_actions.rb
+++ b/app/controllers/concerns/notes_actions.rb
@@ -243,7 +243,7 @@ module NotesActions
end
def notes_finder
- @notes_finder ||= NotesFinder.new(project, current_user, finder_params)
+ @notes_finder ||= NotesFinder.new(current_user, finder_params)
end
def note_serializer
diff --git a/app/controllers/projects/notes_controller.rb b/app/controllers/projects/notes_controller.rb
index 3152a38fd8e..65d9b074eee 100644
--- a/app/controllers/projects/notes_controller.rb
+++ b/app/controllers/projects/notes_controller.rb
@@ -68,7 +68,7 @@ class Projects::NotesController < Projects::ApplicationController
alias_method :awardable, :note
def finder_params
- params.merge(last_fetched_at: last_fetched_at, notes_filter: notes_filter)
+ params.merge(project: project, last_fetched_at: last_fetched_at, notes_filter: notes_filter)
end
def authorize_admin_note!
diff --git a/app/controllers/snippets/notes_controller.rb b/app/controllers/snippets/notes_controller.rb
index 612897f27e6..551b37cb3d3 100644
--- a/app/controllers/snippets/notes_controller.rb
+++ b/app/controllers/snippets/notes_controller.rb
@@ -27,7 +27,9 @@ class Snippets::NotesController < ApplicationController
alias_method :noteable, :snippet
def finder_params
- params.merge(last_fetched_at: last_fetched_at, target_id: snippet.id, target_type: 'personal_snippet')
+ params.merge(last_fetched_at: last_fetched_at, target_id: snippet.id, target_type: 'personal_snippet').tap do |merged_params|
+ merged_params[:project] = project if respond_to?(:project)
+ end
end
def authorize_read_snippet!
diff --git a/app/finders/notes_finder.rb b/app/finders/notes_finder.rb
index 8f610d7dddb..f7d9100bb78 100644
--- a/app/finders/notes_finder.rb
+++ b/app/finders/notes_finder.rb
@@ -3,6 +3,8 @@
class NotesFinder
FETCH_OVERLAP = 5.seconds
+ attr_reader :target_type
+
# Used to filter Notes
# When used with target_type and target_id this returns notes specifically for the controller
#
@@ -10,15 +12,17 @@ class NotesFinder
# current_user - which user check authorizations with
# project - which project to look for notes on
# params:
+ # target: noteable
# target_type: string
# target_id: integer
# last_fetched_at: time
# search: string
#
- def initialize(project, current_user, params = {})
- @project = project
+ def initialize(current_user, params = {})
+ @project = params[:project]
@current_user = current_user
- @params = params
+ @params = params.dup
+ @target_type = @params[:target_type]
end
def execute
@@ -32,7 +36,27 @@ class NotesFinder
def target
return @target if defined?(@target)
- target_type = @params[:target_type]
+ if target_given?
+ use_explicit_target
+ else
+ find_target_by_type_and_ids
+ end
+ end
+
+ private
+
+ def target_given?
+ @params.key?(:target)
+ end
+
+ def use_explicit_target
+ @target = @params[:target]
+ @target_type = @target.class.name.underscore
+
+ @target
+ end
+
+ def find_target_by_type_and_ids
target_id = @params[:target_id]
target_iid = @params[:target_iid]
@@ -45,13 +69,11 @@ class NotesFinder
@project.commit(target_id)
end
else
- noteables_for_type_by_id(target_type, target_id, target_iid)
+ noteable_for_type_by_id(target_type, target_id, target_iid)
end
end
- private
-
- def noteables_for_type_by_id(type, id, iid)
+ def noteable_for_type_by_id(type, id, iid)
query = if id
{ id: id }
else
@@ -77,10 +99,6 @@ class NotesFinder
search(notes)
end
- def target_type
- @params[:target_type]
- end
-
# rubocop: disable CodeReuse/ActiveRecord
def notes_of_any_type
types = %w(commit issue merge_request snippet)
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 07813e03f3a..dd2bfc42af9 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -38,6 +38,7 @@ module Ci
has_one :deployment, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id
+ has_many :needs, class_name: 'Ci::BuildNeed', foreign_key: :build_id, inverse_of: :build
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id
@@ -50,6 +51,7 @@ module Ci
accepts_nested_attributes_for :runner_session
accepts_nested_attributes_for :job_variables
+ accepts_nested_attributes_for :needs
delegate :url, to: :runner_session, prefix: true, allow_nil: true
delegate :terminal_specification, to: :runner_session, allow_nil: true
@@ -713,11 +715,21 @@ module Ci
depended_jobs = depends_on_builds
- return depended_jobs unless options[:dependencies].present?
+ # find all jobs that are dependent on
+ if options[:dependencies].present?
+ depended_jobs = depended_jobs.select do |job|
+ options[:dependencies].include?(job.name)
+ end
+ end
- depended_jobs.select do |job|
- options[:dependencies].include?(job.name)
+ # find all jobs that are needed by this one
+ if options[:needs].present?
+ depended_jobs = depended_jobs.select do |job|
+ options[:needs].include?(job.name)
+ end
end
+
+ depended_jobs
end
def empty_dependencies?
diff --git a/app/models/ci/build_need.rb b/app/models/ci/build_need.rb
new file mode 100644
index 00000000000..6531dfd332f
--- /dev/null
+++ b/app/models/ci/build_need.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Ci
+ class BuildNeed < ApplicationRecord
+ extend Gitlab::Ci::Model
+
+ belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id, inverse_of: :needs
+
+ validates :build, presence: true
+ validates :name, presence: true, length: { maximum: 128 }
+
+ scope :scoped_build, -> { where('ci_builds.id=ci_build_needs.build_id') }
+ end
+end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 1c76f401690..3515f0b83ee 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -611,8 +611,8 @@ module Ci
end
# rubocop: disable CodeReuse/ServiceClass
- def process!
- Ci::ProcessPipelineService.new(project, user).execute(self)
+ def process!(trigger_build_name = nil)
+ Ci::ProcessPipelineService.new(project, user).execute(self, trigger_build_name)
end
# rubocop: enable CodeReuse/ServiceClass
diff --git a/app/models/commit_status.rb b/app/models/commit_status.rb
index be6f3e9c5b0..d7eb78db5b8 100644
--- a/app/models/commit_status.rb
+++ b/app/models/commit_status.rb
@@ -43,6 +43,12 @@ class CommitStatus < ApplicationRecord
scope :after_stage, -> (index) { where('stage_idx > ?', index) }
scope :processables, -> { where(type: %w[Ci::Build Ci::Bridge]) }
+ scope :with_needs, -> (names = nil) do
+ needs = Ci::BuildNeed.scoped_build.select(1)
+ needs = needs.where(name: names) if names
+ where('EXISTS (?)', needs).preload(:needs)
+ end
+
# We use `CommitStatusEnums.failure_reasons` here so that EE can more easily
# extend this `Hash` with new values.
enum_with_nil failure_reason: ::CommitStatusEnums.failure_reasons
@@ -116,7 +122,7 @@ class CommitStatus < ApplicationRecord
commit_status.run_after_commit do
if pipeline_id
if complete? || manual?
- PipelineProcessWorker.perform_async(pipeline_id)
+ BuildProcessWorker.perform_async(id)
else
PipelineUpdateWorker.perform_async(pipeline_id)
end
diff --git a/app/models/concerns/ci/metadatable.rb b/app/models/concerns/ci/metadatable.rb
index 9eed9492b37..304cc71e9dc 100644
--- a/app/models/concerns/ci/metadatable.rb
+++ b/app/models/concerns/ci/metadatable.rb
@@ -29,6 +29,7 @@ module Ci
def degenerate!
self.class.transaction do
self.update!(options: nil, yaml_variables: nil)
+ self.needs.all.delete_all
self.metadata&.destroy
end
end
diff --git a/app/models/concerns/relative_positioning.rb b/app/models/concerns/relative_positioning.rb
index 9cd7b8d6258..4a1441805fc 100644
--- a/app/models/concerns/relative_positioning.rb
+++ b/app/models/concerns/relative_positioning.rb
@@ -34,7 +34,7 @@ module RelativePositioning
end
class_methods do
- def move_to_end(objects)
+ def move_nulls_to_end(objects)
objects = objects.reject(&:relative_position)
return if objects.empty?
@@ -43,7 +43,7 @@ module RelativePositioning
self.transaction do
objects.each do |object|
- relative_position = position_between(max_relative_position, MAX_POSITION)
+ relative_position = position_between(max_relative_position || START_POSITION, MAX_POSITION)
object.relative_position = relative_position
max_relative_position = relative_position
object.save(touch: false)
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index 8ade91933a4..5e8a6a7d5e5 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -752,7 +752,7 @@ class MergeRequest < ApplicationRecord
end
def check_mergeability
- MergeRequests::MergeabilityCheckService.new(self).execute
+ MergeRequests::MergeabilityCheckService.new(self).execute(retry_lease: false)
end
# rubocop: enable CodeReuse/ServiceClass
diff --git a/app/models/namespace/aggregation_schedule.rb b/app/models/namespace/aggregation_schedule.rb
index 0bef352cf24..61a7eb4b576 100644
--- a/app/models/namespace/aggregation_schedule.rb
+++ b/app/models/namespace/aggregation_schedule.rb
@@ -6,21 +6,13 @@ class Namespace::AggregationSchedule < ApplicationRecord
self.primary_key = :namespace_id
- DEFAULT_LEASE_TIMEOUT = 3.hours
+ DEFAULT_LEASE_TIMEOUT = 1.5.hours.to_i
REDIS_SHARED_KEY = 'gitlab:update_namespace_statistics_delay'.freeze
belongs_to :namespace
after_create :schedule_root_storage_statistics
- def self.delay_timeout
- redis_timeout = Gitlab::Redis::SharedState.with do |redis|
- redis.get(REDIS_SHARED_KEY)
- end
-
- redis_timeout.nil? ? DEFAULT_LEASE_TIMEOUT : redis_timeout.to_i
- end
-
def schedule_root_storage_statistics
run_after_commit_or_now do
try_obtain_lease do
@@ -28,7 +20,7 @@ class Namespace::AggregationSchedule < ApplicationRecord
.perform_async(namespace_id)
Namespaces::RootStatisticsWorker
- .perform_in(self.class.delay_timeout, namespace_id)
+ .perform_in(DEFAULT_LEASE_TIMEOUT, namespace_id)
end
end
end
@@ -37,7 +29,7 @@ class Namespace::AggregationSchedule < ApplicationRecord
# Used by ExclusiveLeaseGuard
def lease_timeout
- self.class.delay_timeout
+ DEFAULT_LEASE_TIMEOUT
end
# Used by ExclusiveLeaseGuard
diff --git a/app/serializers/user_serializer.rb b/app/serializers/user_serializer.rb
index 2111e1b5667..d988caea92d 100644
--- a/app/serializers/user_serializer.rb
+++ b/app/serializers/user_serializer.rb
@@ -2,4 +2,21 @@
class UserSerializer < BaseSerializer
entity UserEntity
+
+ def represent(resource, opts = {}, entity = nil)
+ if params[:merge_request_iid]
+ merge_request = opts[:project].merge_requests.find_by_iid!(params[:merge_request_iid])
+ preload_max_member_access(merge_request.project, Array(resource))
+
+ super(resource, opts.merge(merge_request: merge_request), MergeRequestAssigneeEntity)
+ else
+ super
+ end
+ end
+
+ private
+
+ def preload_max_member_access(project, users)
+ project.team.max_member_access_for_user_ids(users.map(&:id))
+ end
end
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index 207cc5017d0..e46615bcf75 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -4,19 +4,23 @@ module Ci
class ProcessPipelineService < BaseService
attr_reader :pipeline
- def execute(pipeline)
+ def execute(pipeline, trigger_build_name = nil)
@pipeline = pipeline
update_retried
- new_builds =
+ success =
stage_indexes_of_created_processables.flat_map do |index|
process_stage(index)
- end
+ end.any?
+
+ # we evaluate dependent needs,
+ # only when the another job has finished
+ success = process_builds_with_needs(trigger_build_name) || success
@pipeline.update_status
- new_builds.any?
+ success
end
private
@@ -36,6 +40,28 @@ module Ci
end
end
+ def process_builds_with_needs(trigger_build_name)
+ return false unless trigger_build_name
+ return false unless Feature.enabled?(:ci_dag_support, project)
+
+ created_processables
+ .with_needs(trigger_build_name)
+ .find_each
+ .map(&method(:process_build_with_needs))
+ .any?
+ end
+
+ def process_build_with_needs(build)
+ current_status = status_for_build_needs(build.needs.map(&:name))
+
+ return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
+
+ Gitlab::OptimisticLocking.retry_lock(build) do |subject|
+ Ci::ProcessBuildService.new(project, @user)
+ .execute(subject, current_status)
+ end
+ end
+
# rubocop: disable CodeReuse/ActiveRecord
def status_for_prior_stages(index)
pipeline.builds.where('stage_idx < ?', index).latest.status || 'success'
@@ -43,6 +69,12 @@ module Ci
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
+ def status_for_build_needs(needs)
+ pipeline.builds.where(name: needs).latest.status || 'success'
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ # rubocop: disable CodeReuse/ActiveRecord
def stage_indexes_of_created_processables
created_processables.order(:stage_idx).pluck(Arel.sql('DISTINCT stage_idx'))
end
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index fab8a179843..338495ba030 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -5,7 +5,7 @@ module Ci
CLONE_ACCESSORS = %i[pipeline project ref tag options name
allow_failure stage stage_id stage_idx trigger_request
yaml_variables when environment coverage_regex
- description tag_list protected].freeze
+ description tag_list protected needs].freeze
def execute(build)
reprocess!(build).tap do |new_build|
diff --git a/app/services/merge_requests/mergeability_check_service.rb b/app/services/merge_requests/mergeability_check_service.rb
index 9fa50c9448f..962e2327b3e 100644
--- a/app/services/merge_requests/mergeability_check_service.rb
+++ b/app/services/merge_requests/mergeability_check_service.rb
@@ -3,6 +3,7 @@
module MergeRequests
class MergeabilityCheckService < ::BaseService
include Gitlab::Utils::StrongMemoize
+ include Gitlab::ExclusiveLeaseHelpers
delegate :project, to: :@merge_request
delegate :repository, to: :project
@@ -21,13 +22,35 @@ module MergeRequests
# where we need the current state of the merge ref in repository, the `recheck`
# argument is required.
#
+ # retry_lease - Concurrent calls wait for at least 10 seconds until the
+ # lease is granted (other process finishes running). Returns an error
+ # ServiceResponse if the lease is not granted during this time.
+ #
# Returns a ServiceResponse indicating merge_status is/became can_be_merged
# and the merge-ref is synced. Success in case of being/becoming mergeable,
# error otherwise.
- def execute(recheck: false)
+ def execute(recheck: false, retry_lease: true)
return ServiceResponse.error(message: 'Invalid argument') unless merge_request
return ServiceResponse.error(message: 'Unsupported operation') if Gitlab::Database.read_only?
+ return check_mergeability(recheck) unless merge_ref_auto_sync_lock_enabled?
+
+ in_write_lock(retry_lease: retry_lease) do |retried|
+ # When multiple calls are waiting for the same lock (retry_lease),
+ # it's possible that when granted, the MR status was already updated for
+ # that object, therefore we reset if there was a lease retry.
+ merge_request.reset if retried
+
+ check_mergeability(recheck)
+ end
+ rescue FailedToObtainLockError => error
+ ServiceResponse.error(message: error.message)
+ end
+
+ private
+
+ attr_reader :merge_request
+ def check_mergeability(recheck)
recheck! if recheck
update_merge_status
@@ -46,9 +69,21 @@ module MergeRequests
ServiceResponse.success(payload: payload)
end
- private
+ # It's possible for this service to send concurrent requests to Gitaly in order
+ # to "git update-ref" the same ref. Therefore we handle a light exclusive
+ # lease here.
+ #
+ def in_write_lock(retry_lease:, &block)
+ lease_key = "mergeability_check:#{merge_request.id}"
- attr_reader :merge_request
+ lease_opts = {
+ ttl: 1.minute,
+ retries: retry_lease ? 10 : 0,
+ sleep_sec: retry_lease ? 1.second : 0
+ }
+
+ in_lock(lease_key, lease_opts, &block)
+ end
def payload
strong_memoize(:payload) do
@@ -116,5 +151,9 @@ module MergeRequests
def merge_ref_auto_sync_enabled?
Feature.enabled?(:merge_ref_auto_sync, project, default_enabled: true)
end
+
+ def merge_ref_auto_sync_lock_enabled?
+ Feature.enabled?(:merge_ref_auto_sync_lock, project, default_enabled: true)
+ end
end
end
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index 991a177018e..400becdd023 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -88,6 +88,7 @@
- pipeline_processing:ci_build_prepare
- pipeline_processing:build_queue
- pipeline_processing:build_success
+- pipeline_processing:build_process
- pipeline_processing:pipeline_process
- pipeline_processing:pipeline_success
- pipeline_processing:pipeline_update
diff --git a/app/workers/build_process_worker.rb b/app/workers/build_process_worker.rb
new file mode 100644
index 00000000000..19e590ee1d7
--- /dev/null
+++ b/app/workers/build_process_worker.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+class BuildProcessWorker
+ include ApplicationWorker
+ include PipelineQueue
+
+ queue_namespace :pipeline_processing
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def perform(build_id)
+ CommitStatus.find_by(id: build_id).try do |build|
+ build.pipeline.process!(build.name)
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+end
diff --git a/changelogs/unreleased/implement-dag.yml b/changelogs/unreleased/implement-dag.yml
new file mode 100644
index 00000000000..72f3f9a510c
--- /dev/null
+++ b/changelogs/unreleased/implement-dag.yml
@@ -0,0 +1,5 @@
+---
+title: "Support creating DAGs in CI config through the `needs` key"
+merge_request: 31328
+author:
+type: added
diff --git a/changelogs/unreleased/jupyter-fixes-v1.yml b/changelogs/unreleased/jupyter-fixes-v1.yml
new file mode 100644
index 00000000000..7a34f273c90
--- /dev/null
+++ b/changelogs/unreleased/jupyter-fixes-v1.yml
@@ -0,0 +1,5 @@
+---
+title: Jupyter fixes
+merge_request: 31332
+author: Amit Rathi
+type: fixed
diff --git a/changelogs/unreleased/osw-avoid-errors-due-to-concurrent-calls.yml b/changelogs/unreleased/osw-avoid-errors-due-to-concurrent-calls.yml
new file mode 100644
index 00000000000..17ff1b012cf
--- /dev/null
+++ b/changelogs/unreleased/osw-avoid-errors-due-to-concurrent-calls.yml
@@ -0,0 +1,5 @@
+---
+title: Add exclusive lease to mergeability check process
+merge_request: 31082
+author:
+type: fixed
diff --git a/db/migrate/20190731084415_add_build_need.rb b/db/migrate/20190731084415_add_build_need.rb
new file mode 100644
index 00000000000..45b8abb480d
--- /dev/null
+++ b/db/migrate/20190731084415_add_build_need.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddBuildNeed < ActiveRecord::Migration[5.2]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ create_table :ci_build_needs, id: :serial do |t|
+ t.integer :build_id, null: false
+ t.text :name, null: false
+
+ t.index [:build_id, :name], unique: true
+ t.foreign_key :ci_builds, column: :build_id, on_delete: :cascade
+ end
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 804f77b91de..3d229c2353b 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema.define(version: 2019_07_29_090456) do
+ActiveRecord::Schema.define(version: 2019_07_31_084415) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm"
@@ -454,6 +454,12 @@ ActiveRecord::Schema.define(version: 2019_07_29_090456) do
t.index ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true
end
+ create_table "ci_build_needs", id: :serial, force: :cascade do |t|
+ t.integer "build_id", null: false
+ t.text "name", null: false
+ t.index ["build_id", "name"], name: "index_ci_build_needs_on_build_id_and_name", unique: true
+ end
+
create_table "ci_build_trace_chunks", force: :cascade do |t|
t.integer "build_id", null: false
t.integer "chunk_index", null: false
@@ -3636,6 +3642,7 @@ ActiveRecord::Schema.define(version: 2019_07_29_090456) do
add_foreign_key "boards", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "boards", "projects", name: "fk_f15266b5f9", on_delete: :cascade
add_foreign_key "chat_teams", "namespaces", on_delete: :cascade
+ add_foreign_key "ci_build_needs", "ci_builds", column: "build_id", on_delete: :cascade
add_foreign_key "ci_build_trace_chunks", "ci_builds", column: "build_id", on_delete: :cascade
add_foreign_key "ci_build_trace_section_names", "projects", on_delete: :cascade
add_foreign_key "ci_build_trace_sections", "ci_build_trace_section_names", column: "section_name_id", name: "fk_264e112c66", on_delete: :cascade
diff --git a/doc/development/testing_guide/review_apps.md b/doc/development/testing_guide/review_apps.md
index 96761622cfe..d07a7045390 100644
--- a/doc/development/testing_guide/review_apps.md
+++ b/doc/development/testing_guide/review_apps.md
@@ -115,6 +115,28 @@ On every [pipeline][gitlab-pipeline] in the `qa` stage, the
browser performance testing using a
[Sitespeed.io Container](../../user/project/merge_requests/browser_performance_testing.md).
+## Cluster configuration
+
+### Node pools
+
+Both `review-apps-ce` and `review-apps-ee` clusters are currently set up with
+two node pools:
+
+- a node pool of non-preemptible `n1-standard-2` (2 vCPU, 7.5 GB memory) nodes
+ dedicated to the `tiller` deployment (see below) with a single node.
+- a node pool of preemptible `n1-standard-2` (2 vCPU, 7.5 GB memory) nodes,
+ with a minimum of 1 node and a maximum of 250 nodes.
+
+### Helm/Tiller
+
+The `tiller` deployment (the Helm server) is deployed to a dedicated node pool
+that has the `app=helm` label and a specific
+[taint](https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/)
+to prevent other pods from being scheduled on this node pool.
+
+This is to ensure Tiller isn't affected by "noisy" neighbors that could put
+their node under pressure.
+
## How to:
### Log into my Review App
@@ -241,15 +263,6 @@ thousands of unused Docker images.**
CNG-mirror project to store these Docker images so that we can just wipe out
the registry at some point, and use a new fresh, empty one.
-**How big are the Kubernetes clusters (`review-apps-ce` and `review-apps-ee`)?**
-
- > The clusters are currently set up with a single pool of preemptible nodes,
- with a minimum of 1 node and a maximum of 500 nodes.
-
-**What are the machine running on the cluster?**
-
- > We're currently using `n1-standard-1` (1 vCPU, 3.75 GB memory) machines.
-
**How do we secure this from abuse? Apps are open to the world so we need to
find a way to limit it to only us.**
diff --git a/lib/api/helpers/notes_helpers.rb b/lib/api/helpers/notes_helpers.rb
index b03ac7deb71..7124ac0c5c3 100644
--- a/lib/api/helpers/notes_helpers.rb
+++ b/lib/api/helpers/notes_helpers.rb
@@ -76,7 +76,7 @@ module API
def find_noteable(parent_type, parent_id, noteable_type, noteable_id)
params = params_by_noteable_type_and_id(noteable_type, noteable_id)
- noteable = NotesFinder.new(user_project, current_user, params).target
+ noteable = NotesFinder.new(current_user, params.merge(project: user_project)).target
noteable = nil unless can?(current_user, noteable_read_ability_name(noteable), noteable)
noteable || not_found!(noteable_type)
end
diff --git a/lib/gitlab/ci/config/entry/job.rb b/lib/gitlab/ci/config/entry/job.rb
index 5ab795359b8..2fd76bc3690 100644
--- a/lib/gitlab/ci/config/entry/job.rb
+++ b/lib/gitlab/ci/config/entry/job.rb
@@ -13,7 +13,7 @@ module Gitlab
ALLOWED_KEYS = %i[tags script only except type image services
allow_failure type stage when start_in artifacts cache
- dependencies before_script after_script variables
+ dependencies needs before_script after_script variables
environment coverage retry parallel extends].freeze
validations do
@@ -34,11 +34,22 @@ module Gitlab
message: 'should be on_success, on_failure, ' \
'always, manual or delayed' }
validates :dependencies, array_of_strings: true
+ validates :needs, array_of_strings: true
validates :extends, array_of_strings_or_string: true
end
validates :start_in, duration: { limit: '1 day' }, if: :delayed?
validates :start_in, absence: true, unless: :delayed?
+
+ validate do
+ next unless dependencies.present?
+ next unless needs.present?
+
+ missing_needs = dependencies - needs
+ if missing_needs.any?
+ errors.add(:dependencies, "the #{missing_needs.join(", ")} should be part of needs")
+ end
+ end
end
entry :before_script, Entry::Script,
@@ -95,10 +106,10 @@ module Gitlab
helpers :before_script, :script, :stage, :type, :after_script,
:cache, :image, :services, :only, :except, :variables,
:artifacts, :environment, :coverage, :retry,
- :parallel
+ :parallel, :needs
attributes :script, :tags, :allow_failure, :when, :dependencies,
- :retry, :parallel, :extends, :start_in
+ :needs, :retry, :parallel, :extends, :start_in
def self.matching?(name, config)
!name.to_s.start_with?('.') &&
@@ -178,7 +189,8 @@ module Gitlab
parallel: parallel_defined? ? parallel_value.to_i : nil,
artifacts: artifacts_value,
after_script: after_script_value,
- ignore: ignored? }
+ ignore: ignored?,
+ needs: needs_defined? ? needs_value : nil }
end
end
end
diff --git a/lib/gitlab/ci/config/normalizer.rb b/lib/gitlab/ci/config/normalizer.rb
index 99356226ef9..09f9bf5f69f 100644
--- a/lib/gitlab/ci/config/normalizer.rb
+++ b/lib/gitlab/ci/config/normalizer.rb
@@ -4,61 +4,63 @@ module Gitlab
module Ci
class Config
class Normalizer
+ include Gitlab::Utils::StrongMemoize
+
def initialize(jobs_config)
@jobs_config = jobs_config
end
def normalize_jobs
- extract_parallelized_jobs!
- return @jobs_config if @parallelized_jobs.empty?
+ return @jobs_config if parallelized_jobs.empty?
+
+ expand_parallelize_jobs do |job_name, config|
+ if config[:dependencies]
+ config[:dependencies] = expand_names(config[:dependencies])
+ end
- parallelized_config = parallelize_jobs
- parallelize_dependencies(parallelized_config)
+ if config[:needs]
+ config[:needs] = expand_names(config[:needs])
+ end
+
+ config
+ end
end
private
- def extract_parallelized_jobs!
- @parallelized_jobs = {}
+ def expand_names(job_names)
+ return unless job_names
- @jobs_config.each do |job_name, config|
- if config[:parallel]
- @parallelized_jobs[job_name] = self.class.parallelize_job_names(job_name, config[:parallel])
- end
+ job_names.flat_map do |job_name|
+ parallelized_jobs[job_name.to_sym] || job_name
end
-
- @parallelized_jobs
end
- def parallelize_jobs
- @jobs_config.each_with_object({}) do |(job_name, config), hash|
- if @parallelized_jobs.key?(job_name)
- @parallelized_jobs[job_name].each { |name, index| hash[name.to_sym] = config.merge(name: name, instance: index) }
- else
- hash[job_name] = config
- end
+ def parallelized_jobs
+ strong_memoize(:parallelized_jobs) do
+ @jobs_config.each_with_object({}) do |(job_name, config), hash|
+ next unless config[:parallel]
- hash
+ hash[job_name] = self.class.parallelize_job_names(job_name, config[:parallel])
+ end
end
end
- def parallelize_dependencies(parallelized_config)
- parallelized_job_names = @parallelized_jobs.keys.map(&:to_s)
- parallelized_config.each_with_object({}) do |(job_name, config), hash|
- if config[:dependencies] && (intersection = config[:dependencies] & parallelized_job_names).any?
- parallelized_deps = intersection.flat_map { |dep| @parallelized_jobs[dep.to_sym].map(&:first) }
- deps = config[:dependencies] - intersection + parallelized_deps
- hash[job_name] = config.merge(dependencies: deps)
+ def expand_parallelize_jobs
+ @jobs_config.each_with_object({}) do |(job_name, config), hash|
+ if parallelized_jobs.key?(job_name)
+ parallelized_jobs[job_name].each_with_index do |name, index|
+ hash[name.to_sym] =
+ yield(name, config.merge(name: name, instance: index + 1))
+ end
else
- hash[job_name] = config
+ hash[job_name] = yield(job_name, config)
end
-
- hash
end
end
def self.parallelize_job_names(name, total)
- Array.new(total) { |index| ["#{name} #{index + 1}/#{total}", index + 1] }
+ Array.new(total) { |index| "#{name} #{index + 1}/#{total}" }
end
end
end
diff --git a/lib/gitlab/ci/yaml_processor.rb b/lib/gitlab/ci/yaml_processor.rb
index a0bbf3c23a2..998130e5bd0 100644
--- a/lib/gitlab/ci/yaml_processor.rb
+++ b/lib/gitlab/ci/yaml_processor.rb
@@ -40,6 +40,7 @@ module Gitlab
environment: job[:environment_name],
coverage_regex: job[:coverage],
yaml_variables: yaml_variables(name),
+ needs_attributes: job[:needs]&.map { |need| { name: need } },
options: {
image: job[:image],
services: job[:services],
@@ -108,6 +109,7 @@ module Gitlab
validate_job_stage!(name, job)
validate_job_dependencies!(name, job)
+ validate_job_needs!(name, job)
validate_job_environment!(name, job)
end
end
@@ -152,6 +154,22 @@ module Gitlab
end
end
+ def validate_job_needs!(name, job)
+ return unless job[:needs]
+
+ stage_index = @stages.index(job[:stage])
+
+ job[:needs].each do |need|
+ raise ValidationError, "#{name} job: undefined need: #{need}" unless @jobs[need.to_sym]
+
+ needs_stage_index = @stages.index(@jobs[need.to_sym][:stage])
+
+ unless needs_stage_index.present? && needs_stage_index < stage_index
+ raise ValidationError, "#{name} job: need #{need} is not defined in prior stages"
+ end
+ end
+ end
+
def validate_job_environment!(name, job)
return unless job[:environment]
return unless job[:environment].is_a?(Hash)
diff --git a/lib/gitlab/exclusive_lease_helpers.rb b/lib/gitlab/exclusive_lease_helpers.rb
index 7961d4bbd6e..61eb030563d 100644
--- a/lib/gitlab/exclusive_lease_helpers.rb
+++ b/lib/gitlab/exclusive_lease_helpers.rb
@@ -15,17 +15,18 @@ module Gitlab
raise ArgumentError, 'Key needs to be specified' unless key
lease = Gitlab::ExclusiveLease.new(key, timeout: ttl)
+ retried = false
until uuid = lease.try_obtain
# Keep trying until we obtain the lease. To prevent hammering Redis too
# much we'll wait for a bit.
sleep(sleep_sec)
- break if (retries -= 1) < 0
+ (retries -= 1) < 0 ? break : retried ||= true
end
raise FailedToObtainLockError, 'Failed to obtain a lock' unless uuid
- yield
+ yield(retried)
ensure
Gitlab::ExclusiveLease.cancel(key, uuid)
end
diff --git a/lib/gitlab/project_search_results.rb b/lib/gitlab/project_search_results.rb
index 0f3b97e2317..827f4f77f36 100644
--- a/lib/gitlab/project_search_results.rb
+++ b/lib/gitlab/project_search_results.rb
@@ -108,7 +108,7 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord
def notes_finder(type)
- NotesFinder.new(project, @current_user, search: query, target_type: type).execute.user.order('updated_at DESC')
+ NotesFinder.new(@current_user, search: query, target_type: type, project: project).execute.user.order('updated_at DESC')
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb
index eaa5d6cd073..6cdd61e7abd 100644
--- a/spec/controllers/autocomplete_controller_spec.rb
+++ b/spec/controllers/autocomplete_controller_spec.rb
@@ -222,6 +222,20 @@ describe AutocompleteController do
expect(response_user_ids).to contain_exactly(non_member.id)
end
end
+
+ context 'merge_request_iid parameter included' do
+ before do
+ sign_in(user)
+ end
+
+ it 'includes can_merge option to users' do
+ merge_request = create(:merge_request, source_project: project)
+
+ get(:users, params: { merge_request_iid: merge_request.iid, project_id: project.id })
+
+ expect(json_response.first).to have_key('can_merge')
+ end
+ end
end
context 'GET projects' do
diff --git a/spec/controllers/concerns/issuable_actions_spec.rb b/spec/controllers/concerns/issuable_actions_spec.rb
new file mode 100644
index 00000000000..7b0b4497f3f
--- /dev/null
+++ b/spec/controllers/concerns/issuable_actions_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe IssuableActions do
+ let(:project) { double('project') }
+ let(:user) { double('user') }
+ let(:issuable) { double('issuable') }
+ let(:finder_params_for_issuable) { {} }
+ let(:notes_result) { double('notes_result') }
+ let(:discussion_serializer) { double('discussion_serializer') }
+
+ let(:controller) do
+ klass = Class.new do
+ attr_reader :current_user, :project, :issuable
+
+ def self.before_action(action, params = nil)
+ end
+
+ include IssuableActions
+
+ def initialize(issuable, project, user, finder_params)
+ @issuable = issuable
+ @project = project
+ @current_user = user
+ @finder_params = finder_params
+ end
+
+ def finder_params_for_issuable
+ @finder_params
+ end
+
+ def params
+ {
+ notes_filter: 1
+ }
+ end
+
+ def prepare_notes_for_rendering(notes)
+ []
+ end
+
+ def render(options)
+ end
+ end
+
+ klass.new(issuable, project, user, finder_params_for_issuable)
+ end
+
+ describe '#discussions' do
+ before do
+ allow(user).to receive(:set_notes_filter)
+ allow(user).to receive(:user_preference)
+ allow(discussion_serializer).to receive(:represent)
+ end
+
+ it 'instantiates and calls NotesFinder as expected' do
+ expect(Discussion).to receive(:build_collection).and_return([])
+ expect(DiscussionSerializer).to receive(:new).and_return(discussion_serializer)
+ expect(NotesFinder).to receive(:new).with(user, finder_params_for_issuable).and_call_original
+
+ expect_any_instance_of(NotesFinder).to receive(:execute).and_return(notes_result)
+
+ expect(notes_result).to receive_messages(inc_relations_for_view: notes_result, includes: notes_result, fresh: notes_result)
+
+ controller.discussions
+ end
+ end
+end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 32d14dce936..0f885d776e1 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1260,6 +1260,28 @@ describe Projects::IssuesController do
sign_in(user)
end
+ context do
+ it_behaves_like 'discussions provider' do
+ let!(:author) { create(:user) }
+ let!(:project) { create(:project) }
+
+ let!(:issue) { create(:issue, project: project, author: user) }
+
+ let!(:note_on_issue1) { create(:discussion_note_on_issue, noteable: issue, project: issue.project, author: create(:user)) }
+ let!(:note_on_issue2) { create(:discussion_note_on_issue, noteable: issue, project: issue.project, author: create(:user)) }
+
+ let(:requested_iid) { issue.iid }
+ let(:expected_discussion_count) { 3 }
+ let(:expected_discussion_ids) do
+ [
+ issue.notes.first.discussion_id,
+ note_on_issue1.discussion_id,
+ note_on_issue2.discussion_id
+ ]
+ end
+ end
+ end
+
it 'returns discussion json' do
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 57a432de3f5..b1dc6a65dd4 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -1210,6 +1210,22 @@ describe Projects::MergeRequestsController do
end
end
end
+
+ context do
+ it_behaves_like 'discussions provider' do
+ let!(:author) { create(:user) }
+ let!(:project) { create(:project) }
+
+ let!(:merge_request) { create(:merge_request, source_project: project) }
+
+ let!(:mr_note1) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
+ let!(:mr_note2) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
+
+ let(:requested_iid) { merge_request.iid }
+ let(:expected_discussion_count) { 2 }
+ let(:expected_discussion_ids) { [mr_note1.discussion_id, mr_note2.discussion_id] }
+ end
+ end
end
describe 'GET edit' do
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 98aea9056dc..9ab565dc2e8 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -43,7 +43,7 @@ describe Projects::NotesController do
request.headers['X-Last-Fetched-At'] = last_fetched_at
expect(NotesFinder).to receive(:new)
- .with(anything, anything, hash_including(last_fetched_at: last_fetched_at))
+ .with(anything, hash_including(last_fetched_at: last_fetched_at))
.and_call_original
get :index, params: request_params
diff --git a/spec/factories/ci/build_need.rb b/spec/factories/ci/build_need.rb
new file mode 100644
index 00000000000..568aff45a91
--- /dev/null
+++ b/spec/factories/ci/build_need.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_build_need, class: Ci::BuildNeed do
+ build factory: :ci_build
+ sequence(:name) { |n| "build_#{n}" }
+ end
+end
diff --git a/spec/finders/notes_finder_spec.rb b/spec/finders/notes_finder_spec.rb
index 87bde4ca2f6..88906adfeeb 100644
--- a/spec/finders/notes_finder_spec.rb
+++ b/spec/finders/notes_finder_spec.rb
@@ -14,7 +14,7 @@ describe NotesFinder do
let!(:system_note) { create(:note_on_issue, project: project, system: true) }
it 'returns only user notes when using only_comments filter' do
- finder = described_class.new(project, user, notes_filter: UserPreference::NOTES_FILTERS[:only_comments])
+ finder = described_class.new(user, project: project, notes_filter: UserPreference::NOTES_FILTERS[:only_comments])
notes = finder.execute
@@ -22,7 +22,7 @@ describe NotesFinder do
end
it 'returns only system notes when using only_activity filters' do
- finder = described_class.new(project, user, notes_filter: UserPreference::NOTES_FILTERS[:only_activity])
+ finder = described_class.new(user, project: project, notes_filter: UserPreference::NOTES_FILTERS[:only_activity])
notes = finder.execute
@@ -30,7 +30,7 @@ describe NotesFinder do
end
it 'gets all notes' do
- finder = described_class.new(project, user, notes_filter: UserPreference::NOTES_FILTERS[:all_activity])
+ finder = described_class.new(user, project: project, notes_filter: UserPreference::NOTES_FILTERS[:all_activity])
notes = finder.execute
@@ -41,7 +41,7 @@ describe NotesFinder do
it 'finds notes on merge requests' do
create(:note_on_merge_request, project: project)
- notes = described_class.new(project, user).execute
+ notes = described_class.new(user, project: project).execute
expect(notes.count).to eq(1)
end
@@ -49,7 +49,7 @@ describe NotesFinder do
it 'finds notes on snippets' do
create(:note_on_project_snippet, project: project)
- notes = described_class.new(project, user).execute
+ notes = described_class.new(user, project: project).execute
expect(notes.count).to eq(1)
end
@@ -59,13 +59,13 @@ describe NotesFinder do
note = create(:note_on_commit, project: project)
params = { target_type: 'commit', target_id: note.noteable.id }
- notes = described_class.new(project, create(:user), params).execute
+ notes = described_class.new(create(:user), params).execute
expect(notes.count).to eq(0)
end
it 'succeeds when no notes found' do
- notes = described_class.new(project, create(:user)).execute
+ notes = described_class.new(create(:user), project: project).execute
expect(notes.count).to eq(0)
end
@@ -82,7 +82,7 @@ describe NotesFinder do
it 'publicly excludes notes on merge requests' do
create(:note_on_merge_request, project: project)
- notes = described_class.new(project, create(:user)).execute
+ notes = described_class.new(create(:user), project: project).execute
expect(notes.count).to eq(0)
end
@@ -90,7 +90,7 @@ describe NotesFinder do
it 'publicly excludes notes on issues' do
create(:note_on_issue, project: project)
- notes = described_class.new(project, create(:user)).execute
+ notes = described_class.new(create(:user), project: project).execute
expect(notes.count).to eq(0)
end
@@ -98,7 +98,7 @@ describe NotesFinder do
it 'publicly excludes notes on snippets' do
create(:note_on_project_snippet, project: project)
- notes = described_class.new(project, create(:user)).execute
+ notes = described_class.new(create(:user), project: project).execute
expect(notes.count).to eq(0)
end
@@ -110,7 +110,7 @@ describe NotesFinder do
let!(:note2) { create :note_on_commit, project: project }
it 'finds only notes for the selected type' do
- notes = described_class.new(project, user, target_type: 'issue').execute
+ notes = described_class.new(user, project: project, target_type: 'issue').execute
expect(notes).to eq([note1])
end
@@ -118,56 +118,51 @@ describe NotesFinder do
context 'for target' do
let(:project) { create(:project, :repository) }
- let(:note1) { create :note_on_commit, project: project }
- let(:note2) { create :note_on_commit, project: project }
+ let!(:note1) { create :note_on_commit, project: project }
+ let!(:note2) { create :note_on_commit, project: project }
let(:commit) { note1.noteable }
- let(:params) { { target_id: commit.id, target_type: 'commit', last_fetched_at: 1.hour.ago.to_i } }
-
- before do
- note1
- note2
- end
+ let(:params) { { project: project, target_id: commit.id, target_type: 'commit', last_fetched_at: 1.hour.ago.to_i } }
it 'finds all notes' do
- notes = described_class.new(project, user, params).execute
+ notes = described_class.new(user, params).execute
expect(notes.size).to eq(2)
end
it 'finds notes on merge requests' do
note = create(:note_on_merge_request, project: project)
- params = { target_type: 'merge_request', target_id: note.noteable.id }
+ params = { project: project, target_type: 'merge_request', target_id: note.noteable.id }
- notes = described_class.new(project, user, params).execute
+ notes = described_class.new(user, params).execute
expect(notes).to include(note)
end
it 'finds notes on snippets' do
note = create(:note_on_project_snippet, project: project)
- params = { target_type: 'snippet', target_id: note.noteable.id }
+ params = { project: project, target_type: 'snippet', target_id: note.noteable.id }
- notes = described_class.new(project, user, params).execute
+ notes = described_class.new(user, params).execute
expect(notes.count).to eq(1)
end
it 'finds notes on personal snippets' do
note = create(:note_on_personal_snippet)
- params = { target_type: 'personal_snippet', target_id: note.noteable_id }
+ params = { project: project, target_type: 'personal_snippet', target_id: note.noteable_id }
- notes = described_class.new(project, user, params).execute
+ notes = described_class.new(user, params).execute
expect(notes.count).to eq(1)
end
it 'raises an exception for an invalid target_type' do
params[:target_type] = 'invalid'
- expect { described_class.new(project, user, params).execute }.to raise_error("invalid target_type '#{params[:target_type]}'")
+ expect { described_class.new(user, params).execute }.to raise_error("invalid target_type '#{params[:target_type]}'")
end
it 'filters out old notes' do
note2.update_attribute(:updated_at, 2.hours.ago)
- notes = described_class.new(project, user, params).execute
+ notes = described_class.new(user, params).execute
expect(notes).to eq([note1])
end
@@ -175,25 +170,47 @@ describe NotesFinder do
let(:confidential_issue) { create(:issue, :confidential, project: project, author: user) }
let!(:confidential_note) { create(:note, noteable: confidential_issue, project: confidential_issue.project) }
- let(:params) { { target_id: confidential_issue.id, target_type: 'issue', last_fetched_at: 1.hour.ago.to_i } }
+ let(:params) { { project: confidential_issue.project, target_id: confidential_issue.id, target_type: 'issue', last_fetched_at: 1.hour.ago.to_i } }
it 'returns notes if user can see the issue' do
- expect(described_class.new(project, user, params).execute).to eq([confidential_note])
+ expect(described_class.new(user, params).execute).to eq([confidential_note])
end
it 'raises an error if user can not see the issue' do
user = create(:user)
- expect { described_class.new(project, user, params).execute }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { described_class.new(user, params).execute }.to raise_error(ActiveRecord::RecordNotFound)
end
it 'raises an error for project members with guest role' do
user = create(:user)
project.add_guest(user)
- expect { described_class.new(project, user, params).execute }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { described_class.new(user, params).execute }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
+
+ context 'for explicit target' do
+ let(:project) { create(:project, :repository) }
+ let!(:note1) { create :note_on_commit, project: project, created_at: 1.day.ago, updated_at: 2.hours.ago }
+ let!(:note2) { create :note_on_commit, project: project }
+ let(:commit) { note1.noteable }
+ let(:params) { { project: project, target: commit } }
+
+ it 'returns the expected notes' do
+ expect(described_class.new(user, params).execute).to eq([note1, note2])
+ end
+
+ it 'returns the expected notes when last_fetched_at is given' do
+ params = { project: project, target: commit, last_fetched_at: 1.hour.ago.to_i }
+ expect(described_class.new(user, params).execute).to eq([note2])
+ end
+
+ it 'fails when nil is provided' do
+ params = { project: project, target: nil }
+ expect { described_class.new(user, params).execute }.to raise_error(RuntimeError)
+ end
+ end
end
describe '.search' do
@@ -201,17 +218,17 @@ describe NotesFinder do
let(:note) { create(:note_on_issue, note: 'WoW', project: project) }
it 'returns notes with matching content' do
- expect(described_class.new(note.project, nil, search: note.note).execute).to eq([note])
+ expect(described_class.new(nil, project: note.project, search: note.note).execute).to eq([note])
end
it 'returns notes with matching content regardless of the casing' do
- expect(described_class.new(note.project, nil, search: 'WOW').execute).to eq([note])
+ expect(described_class.new(nil, project: note.project, search: 'WOW').execute).to eq([note])
end
it 'returns commit notes user can access' do
note = create(:note_on_commit, project: project)
- expect(described_class.new(note.project, create(:user), search: note.note).execute).to eq([note])
+ expect(described_class.new(create(:user), project: note.project, search: note.note).execute).to eq([note])
end
context "confidential issues" do
@@ -220,27 +237,27 @@ describe NotesFinder do
let(:confidential_note) { create(:note, note: "Random", noteable: confidential_issue, project: confidential_issue.project) }
it "returns notes with matching content if user can see the issue" do
- expect(described_class.new(confidential_note.project, user, search: confidential_note.note).execute).to eq([confidential_note])
+ expect(described_class.new(user, project: confidential_note.project, search: confidential_note.note).execute).to eq([confidential_note])
end
it "does not return notes with matching content if user can not see the issue" do
user = create(:user)
- expect(described_class.new(confidential_note.project, user, search: confidential_note.note).execute).to be_empty
+ expect(described_class.new(user, project: confidential_note.project, search: confidential_note.note).execute).to be_empty
end
it "does not return notes with matching content for project members with guest role" do
user = create(:user)
project.add_guest(user)
- expect(described_class.new(confidential_note.project, user, search: confidential_note.note).execute).to be_empty
+ expect(described_class.new(user, project: confidential_note.project, search: confidential_note.note).execute).to be_empty
end
it "does not return notes with matching content for unauthenticated users" do
- expect(described_class.new(confidential_note.project, nil, search: confidential_note.note).execute).to be_empty
+ expect(described_class.new(nil, project: confidential_note.project, search: confidential_note.note).execute).to be_empty
end
end
context 'inlines SQL filters on subqueries for performance' do
- let(:sql) { described_class.new(note.project, nil, search: note.note).execute.to_sql }
+ let(:sql) { described_class.new(nil, project: note.project, search: note.note).execute.to_sql }
let(:number_of_noteable_types) { 4 }
specify 'project_id check' do
@@ -254,11 +271,11 @@ describe NotesFinder do
end
describe '#target' do
- subject { described_class.new(project, user, params) }
+ subject { described_class.new(user, params) }
context 'for a issue target' do
let(:issue) { create(:issue, project: project) }
- let(:params) { { target_type: 'issue', target_id: issue.id } }
+ let(:params) { { project: project, target_type: 'issue', target_id: issue.id } }
it 'returns the issue' do
expect(subject.target).to eq(issue)
@@ -267,7 +284,7 @@ describe NotesFinder do
context 'for a merge request target' do
let(:merge_request) { create(:merge_request, source_project: project) }
- let(:params) { { target_type: 'merge_request', target_id: merge_request.id } }
+ let(:params) { { project: project, target_type: 'merge_request', target_id: merge_request.id } }
it 'returns the merge_request' do
expect(subject.target).to eq(merge_request)
@@ -276,7 +293,7 @@ describe NotesFinder do
context 'for a snippet target' do
let(:snippet) { create(:project_snippet, project: project) }
- let(:params) { { target_type: 'snippet', target_id: snippet.id } }
+ let(:params) { { project: project, target_type: 'snippet', target_id: snippet.id } }
it 'returns the snippet' do
expect(subject.target).to eq(snippet)
@@ -286,7 +303,7 @@ describe NotesFinder do
context 'for a commit target' do
let(:project) { create(:project, :repository) }
let(:commit) { project.commit }
- let(:params) { { target_type: 'commit', target_id: commit.id } }
+ let(:params) { { project: project, target_type: 'commit', target_id: commit.id } }
it 'returns the commit' do
expect(subject.target).to eq(commit)
@@ -298,24 +315,24 @@ describe NotesFinder do
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
it 'finds issues by iid' do
- iid_params = { target_type: 'issue', target_iid: issue.iid }
- expect(described_class.new(project, user, iid_params).target).to eq(issue)
+ iid_params = { project: project, target_type: 'issue', target_iid: issue.iid }
+ expect(described_class.new(user, iid_params).target).to eq(issue)
end
it 'finds merge requests by iid' do
- iid_params = { target_type: 'merge_request', target_iid: merge_request.iid }
- expect(described_class.new(project, user, iid_params).target).to eq(merge_request)
+ iid_params = { project: project, target_type: 'merge_request', target_iid: merge_request.iid }
+ expect(described_class.new(user, iid_params).target).to eq(merge_request)
end
it 'returns nil if both target_id and target_iid are not given' do
- params_without_any_id = { target_type: 'issue' }
- expect(described_class.new(project, user, params_without_any_id).target).to be_nil
+ params_without_any_id = { project: project, target_type: 'issue' }
+ expect(described_class.new(user, params_without_any_id).target).to be_nil
end
it 'prioritizes target_id over target_iid' do
issue2 = create(:issue, project: project)
- iid_params = { target_type: 'issue', target_id: issue2.id, target_iid: issue.iid }
- expect(described_class.new(project, user, iid_params).target).to eq(issue2)
+ iid_params = { project: project, target_type: 'issue', target_id: issue2.id, target_iid: issue.iid }
+ expect(described_class.new(user, iid_params).target).to eq(issue2)
end
end
end
diff --git a/spec/fixtures/api/schemas/entities/discussion.json b/spec/fixtures/api/schemas/entities/discussion.json
new file mode 100644
index 00000000000..bcc1db79e83
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/discussion.json
@@ -0,0 +1,67 @@
+{
+ "type": "object",
+ "required" : [
+ "id",
+ "notes",
+ "individual_note"
+ ],
+ "properties" : {
+ "id": { "type": "string" },
+ "individual_note": { "type": "boolean" },
+ "notes": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties" : {
+ "id": { "type": "string" },
+ "type": { "type": ["string", "null"] },
+ "body": { "type": "string" },
+ "attachment": { "type": ["string", "null"]},
+ "award_emoji": { "type": "array" },
+ "author": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" },
+ "status_tooltip_html": { "type": ["string", "null"] },
+ "path": { "type": "string" }
+ },
+ "additionalProperties": false
+ },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "system": { "type": "boolean" },
+ "noteable_id": { "type": "integer" },
+ "noteable_iid": { "type": "integer" },
+ "noteable_type": { "type": "string" },
+ "resolved": { "type": "boolean" },
+ "resolvable": { "type": "boolean" },
+ "resolved_by": { "type": ["string", "null"] },
+ "note": { "type": "string" },
+ "note_html": { "type": "string" },
+ "current_user": { "type": "object" },
+ "suggestions": { "type": "array" },
+ "discussion_id": { "type": "string" },
+ "emoji_awardable": { "type": "boolean" },
+ "report_abuse_path": { "type": "string" },
+ "noteable_note_url": { "type": "string" },
+ "resolve_path": { "type": "string" },
+ "resolve_with_issue_path": { "type": "string" },
+ "cached_markdown_version": { "type": "integer" },
+ "human_access": { "type": ["string", "null"] },
+ "toggle_award_path": { "type": "string" },
+ "path": { "type": "string" }
+ },
+ "required": [
+ "id", "attachment", "author", "created_at", "updated_at",
+ "system", "noteable_id", "noteable_type"
+ ],
+ "additionalProperties": false
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/entities/discussions.json b/spec/fixtures/api/schemas/entities/discussions.json
new file mode 100644
index 00000000000..5a837429776
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/discussions.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "discussion.json" }
+}
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index d5861d5dd07..800ef122203 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -86,6 +86,22 @@ describe Gitlab::Ci::Config::Entry::Job do
it { expect(entry).to be_valid }
end
end
+
+ context 'when has needs' do
+ let(:config) do
+ { script: 'echo', needs: ['another-job'] }
+ end
+
+ it { expect(entry).to be_valid }
+
+ context 'when has dependencies' do
+ let(:config) do
+ { script: 'echo', dependencies: ['another-job'], needs: ['another-job'] }
+ end
+
+ it { expect(entry).to be_valid }
+ end
+ end
end
context 'when entry value is not correct' do
@@ -223,6 +239,43 @@ describe Gitlab::Ci::Config::Entry::Job do
expect(entry.errors).to include 'job start in must be blank'
end
end
+
+ context 'when has dependencies' do
+ context 'that are not a array of strings' do
+ let(:config) do
+ { script: 'echo', dependencies: 'build-job' }
+ end
+
+ it 'returns error about invalid type' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job dependencies should be an array of strings'
+ end
+ end
+ end
+
+ context 'when has needs' do
+ context 'that are not a array of strings' do
+ let(:config) do
+ { script: 'echo', needs: 'build-job' }
+ end
+
+ it 'returns error about invalid type' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job needs should be an array of strings'
+ end
+ end
+
+ context 'when have dependencies that are not subset of needs' do
+ let(:config) do
+ { script: 'echo', dependencies: ['another-job'], needs: ['build-job'] }
+ end
+
+ it 'returns error about invalid data' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job dependencies the another-job should be part of needs'
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/normalizer_spec.rb b/spec/lib/gitlab/ci/config/normalizer_spec.rb
index cd880177170..6b766cc37bf 100644
--- a/spec/lib/gitlab/ci/config/normalizer_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer_spec.rb
@@ -49,37 +49,44 @@ describe Gitlab::Ci::Config::Normalizer do
end
end
- context 'when jobs depend on parallelized jobs' do
- let(:config) { { job_name => job_config, other_job: { script: 'echo 1', dependencies: [job_name.to_s] } } }
-
- it 'parallelizes dependencies' do
- job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
-
- expect(subject[:other_job][:dependencies]).to include(*job_names)
+ %i[dependencies needs].each do |context|
+ context "when job has #{context} on parallelized jobs" do
+ let(:config) do
+ {
+ job_name => job_config,
+ other_job: { script: 'echo 1', context => [job_name.to_s] }
+ }
+ end
+
+ it "parallelizes #{context}" do
+ job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
+
+ expect(subject[:other_job][context]).to include(*job_names)
+ end
+
+ it "does not include original job name in #{context}" do
+ expect(subject[:other_job][context]).not_to include(job_name)
+ end
end
- it 'does not include original job name in dependencies' do
- expect(subject[:other_job][:dependencies]).not_to include(job_name)
- end
- end
+ context "when there are #{context} which are both parallelized and not" do
+ let(:config) do
+ {
+ job_name => job_config,
+ other_job: { script: 'echo 1' },
+ final_job: { script: 'echo 1', context => [job_name.to_s, "other_job"] }
+ }
+ end
- context 'when there are dependencies which are both parallelized and not' do
- let(:config) do
- {
- job_name => job_config,
- other_job: { script: 'echo 1' },
- final_job: { script: 'echo 1', dependencies: [job_name.to_s, "other_job"] }
- }
- end
-
- it 'parallelizes dependencies' do
- job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
+ it "parallelizes #{context}" do
+ job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
- expect(subject[:final_job][:dependencies]).to include(*job_names)
- end
+ expect(subject[:final_job][context]).to include(*job_names)
+ end
- it 'includes the regular job in dependencies' do
- expect(subject[:final_job][:dependencies]).to include('other_job')
+ it "includes the regular job in #{context}" do
+ expect(subject[:final_job][context]).to include('other_job')
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index d3676ee03bf..4ffa1fc9fd8 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -1112,6 +1112,86 @@ module Gitlab
end
end
+ describe "Needs" do
+ let(:needs) { }
+ let(:dependencies) { }
+
+ let(:config) do
+ {
+ build1: { stage: 'build', script: 'test' },
+ build2: { stage: 'build', script: 'test' },
+ test1: { stage: 'test', script: 'test', needs: needs, dependencies: dependencies },
+ test2: { stage: 'test', script: 'test' },
+ deploy: { stage: 'test', script: 'test' }
+ }
+ end
+
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+
+ context 'no needs' do
+ it { expect { subject }.not_to raise_error }
+ end
+
+ context 'needs to builds' do
+ let(:needs) { %w(build1 build2) }
+
+ it "does create jobs with valid specification" do
+ expect(subject.builds.size).to eq(5)
+ expect(subject.builds[0]).to eq(
+ stage: "build",
+ stage_idx: 0,
+ name: "build1",
+ options: {
+ script: ["test"]
+ },
+ when: "on_success",
+ allow_failure: false,
+ yaml_variables: []
+ )
+ expect(subject.builds[2]).to eq(
+ stage: "test",
+ stage_idx: 1,
+ name: "test1",
+ options: {
+ script: ["test"]
+ },
+ needs_attributes: [
+ { name: "build1" },
+ { name: "build2" }
+ ],
+ when: "on_success",
+ allow_failure: false,
+ yaml_variables: []
+ )
+ end
+ end
+
+ context 'needs to builds defined as symbols' do
+ let(:needs) { [:build1, :build2] }
+
+ it { expect { subject }.not_to raise_error }
+ end
+
+ context 'undefined need' do
+ let(:needs) { ['undefined'] }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: undefined need: undefined') }
+ end
+
+ context 'needs to deploy' do
+ let(:needs) { ['deploy'] }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: need deploy is not defined in prior stages') }
+ end
+
+ context 'needs and dependencies that are mismatching' do
+ let(:needs) { %w(build1) }
+ let(:dependencies) { %w(build2) }
+
+ it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies the build2 should be part of needs') }
+ end
+ end
+
describe "Hidden jobs" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
subject { config_processor.stage_builds_attributes("test") }
diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
index 5107e1efbbd..c3b706fc538 100644
--- a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
@@ -25,13 +25,13 @@ describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do
end
it 'calls the given block' do
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
end
it 'calls the given block continuously' do
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
end
it 'cancels the exclusive lease after the block' do
@@ -68,6 +68,15 @@ describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do
expect { subject }.to raise_error('Failed to obtain a lock')
end
+
+ context 'when lease is granted after retry' do
+ it 'yields block with true' do
+ expect(lease).to receive(:try_obtain).exactly(3).times { nil }
+ expect(lease).to receive(:try_obtain).once { unique_key }
+
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(true)
+ end
+ end
end
context 'when sleep second is specified' do
diff --git a/spec/models/ci/build_need_spec.rb b/spec/models/ci/build_need_spec.rb
new file mode 100644
index 00000000000..450dd550a8f
--- /dev/null
+++ b/spec/models/ci/build_need_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::BuildNeed, model: true do
+ let(:build_need) { build(:ci_build_need) }
+
+ it { is_expected.to belong_to(:build) }
+
+ it { is_expected.to validate_presence_of(:build) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(128) }
+end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 17c7c05324a..9369f393b5e 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -19,7 +19,8 @@ describe Ci::Build do
it { is_expected.to belong_to(:runner) }
it { is_expected.to belong_to(:trigger_request) }
it { is_expected.to belong_to(:erased_by) }
- it { is_expected.to have_many(:trace_sections)}
+ it { is_expected.to have_many(:trace_sections) }
+ it { is_expected.to have_many(:needs) }
it { is_expected.to have_one(:deployment) }
it { is_expected.to have_one(:runner_session) }
it { is_expected.to have_many(:job_variables) }
@@ -182,6 +183,30 @@ describe Ci::Build do
end
end
+ describe '.with_needs' do
+ let!(:build) { create(:ci_build) }
+ let!(:build_need_a) { create(:ci_build_need, build: build) }
+ let!(:build_need_b) { create(:ci_build_need, build: build) }
+
+ context 'when passing build name' do
+ subject { described_class.with_needs(build_need_a.name) }
+
+ it { is_expected.to contain_exactly(build) }
+ end
+
+ context 'when not passing any build name' do
+ subject { described_class.with_needs }
+
+ it { is_expected.to contain_exactly(build) }
+ end
+
+ context 'when not matching build name' do
+ subject { described_class.with_needs('undefined') }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '#enqueue' do
let(:build) { create(:ci_build, :created) }
@@ -595,6 +620,46 @@ describe Ci::Build do
expect(staging.depends_on_builds.map(&:id))
.to contain_exactly(build.id, retried_rspec.id, rubocop_test.id)
end
+
+ describe '#dependencies' do
+ let(:dependencies) { }
+ let(:needs) { }
+
+ let!(:final) do
+ create(:ci_build,
+ pipeline: pipeline, name: 'final',
+ stage_idx: 3, stage: 'deploy', options: {
+ dependencies: dependencies,
+ needs: needs
+ }
+ )
+ end
+
+ subject { final.dependencies }
+
+ context 'when depedencies are defined' do
+ let(:dependencies) { %w(rspec staging) }
+
+ it { is_expected.to contain_exactly(rspec_test, staging) }
+ end
+
+ context 'when needs are defined' do
+ let(:needs) { %w(build rspec staging) }
+
+ it { is_expected.to contain_exactly(build, rspec_test, staging) }
+ end
+
+ context 'when needs and dependencies are defined' do
+ let(:dependencies) { %w(rspec staging) }
+ let(:needs) { %w(build rspec staging) }
+
+ it { is_expected.to contain_exactly(rspec_test, staging) }
+ end
+
+ context 'when nor dependencies or needs are defined' do
+ it { is_expected.to contain_exactly(build, rspec_test, rubocop_test, staging) }
+ end
+ end
end
describe '#triggered_by?' do
@@ -3614,6 +3679,7 @@ describe Ci::Build do
before do
build.ensure_metadata
+ build.needs.create!(name: 'another-job')
end
it 'drops metadata' do
@@ -3621,6 +3687,7 @@ describe Ci::Build do
expect(build.reload).to be_degenerated
expect(build.metadata).to be_nil
+ expect(build.needs).to be_empty
end
end
diff --git a/spec/models/namespace/aggregation_schedule_spec.rb b/spec/models/namespace/aggregation_schedule_spec.rb
index 0f1283717e0..38bf8089411 100644
--- a/spec/models/namespace/aggregation_schedule_spec.rb
+++ b/spec/models/namespace/aggregation_schedule_spec.rb
@@ -7,24 +7,6 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
it { is_expected.to belong_to :namespace }
- describe '.delay_timeout' do
- context 'when timeout is set on redis' do
- it 'uses personalized timeout' do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(described_class::REDIS_SHARED_KEY, 1.hour)
- end
-
- expect(described_class.delay_timeout).to eq(1.hour)
- end
- end
-
- context 'when timeout is not set on redis' do
- it 'uses default timeout' do
- expect(described_class.delay_timeout).to eq(3.hours)
- end
- end
- end
-
describe '#schedule_root_storage_statistics' do
let(:namespace) { create(:namespace) }
let(:aggregation_schedule) { namespace.build_aggregation_schedule }
@@ -87,21 +69,5 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
aggregation_schedule.schedule_root_storage_statistics
end
end
-
- context 'with a personalized lease timeout' do
- before do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(described_class::REDIS_SHARED_KEY, 1.hour)
- end
- end
-
- it 'uses a personalized time' do
- expect(Namespaces::RootStatisticsWorker)
- .to receive(:perform_in)
- .with(1.hour, aggregation_schedule.namespace_id)
-
- aggregation_schedule.save!
- end
- end
end
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 7a6f1cd548c..15d6db42760 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1571,7 +1571,7 @@ describe API::MergeRequests do
end
end
- describe "GET /projects/:id/merge_requests/:merge_request_iid/merge_ref" do
+ describe "GET /projects/:id/merge_requests/:merge_request_iid/merge_ref", :clean_gitlab_redis_shared_state do
before do
merge_request.mark_as_unchecked!
end
diff --git a/spec/serializers/user_serializer_spec.rb b/spec/serializers/user_serializer_spec.rb
new file mode 100644
index 00000000000..2e4a8c644fe
--- /dev/null
+++ b/spec/serializers/user_serializer_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe UserSerializer do
+ let(:user1) { create(:user) }
+ let(:user2) { create(:user) }
+
+ context 'serializer with merge request context' do
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+ let(:serializer) { described_class.new(merge_request_iid: merge_request.iid) }
+
+ before do
+ allow(project).to(
+ receive_message_chain(:merge_requests, :find_by_iid!)
+ .with(merge_request.iid).and_return(merge_request)
+ )
+
+ project.add_maintainer(user1)
+ end
+
+ it 'returns a user with can_merge option' do
+ serialized_user1, serialized_user2 = serializer.represent([user1, user2], project: project).as_json
+
+ expect(serialized_user1).to include("id" => user1.id, "can_merge" => true)
+ expect(serialized_user2).to include("id" => user2.id, "can_merge" => false)
+ end
+ end
+end
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index cadb519ccee..77f108b6ab8 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -700,6 +700,94 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
+ context 'when pipeline with needs is created' do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
+ let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
+ let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
+ let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1) }
+ let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1) }
+ let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
+
+ let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
+ let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') }
+
+ let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') }
+ let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') }
+
+ it 'when linux:* finishes first it runs it out of order' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+
+ # we follow the single path of linux
+ linux_build.reset.success!
+
+ expect(stages).to eq(%w(running pending created))
+ expect(builds.success).to contain_exactly(linux_build)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
+
+ linux_rspec.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rubocop)
+
+ linux_rubocop.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop)
+ expect(builds.pending).to contain_exactly(mac_build)
+
+ mac_build.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success pending))
+ expect(builds.success).to contain_exactly(
+ linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+
+ context 'when feature ci_dag_support is disabled' do
+ before do
+ stub_feature_flags(ci_dag_support: false)
+ end
+
+ it 'when linux:build finishes first it follows stages' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+
+ # we follow the single path of linux
+ linux_build.reset.success!
+
+ expect(stages).to eq(%w(running created created))
+ expect(builds.success).to contain_exactly(linux_build)
+ expect(builds.pending).to contain_exactly(mac_build)
+
+ mac_build.reset.success!
+
+ expect(stages).to eq(%w(success pending created))
+ expect(builds.success).to contain_exactly(linux_build, mac_build)
+ expect(builds.pending).to contain_exactly(
+ linux_rspec, linux_rubocop, mac_rspec, mac_rubocop)
+
+ linux_rspec.reset.success!
+ linux_rubocop.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success pending))
+ expect(builds.success).to contain_exactly(
+ linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+ end
+ end
+
def process_pipeline
described_class.new(pipeline.project, user).execute(pipeline)
end
@@ -712,6 +800,10 @@ describe Ci::ProcessPipelineService, '#execute' do
all_builds.where.not(status: [:created, :skipped])
end
+ def stages
+ pipeline.reset.stages.map(&:status)
+ end
+
def builds_names
builds.pluck(:name)
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 915288cd916..fe7c6fe4700 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -67,6 +67,7 @@ describe Ci::RetryBuildService do
end
create(:ci_job_variable, job: build)
+ create(:ci_build_need, build: build)
build.reload
end
diff --git a/spec/services/merge_requests/mergeability_check_service_spec.rb b/spec/services/merge_requests/mergeability_check_service_spec.rb
index 6e827f2ea5b..a864da0a6fb 100644
--- a/spec/services/merge_requests/mergeability_check_service_spec.rb
+++ b/spec/services/merge_requests/mergeability_check_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe MergeRequests::MergeabilityCheckService do
+describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shared_state do
shared_examples_for 'unmergeable merge request' do
it 'updates or keeps merge status as cannot_be_merged' do
subject
@@ -60,24 +60,69 @@ describe MergeRequests::MergeabilityCheckService do
subject { described_class.new(merge_request).execute }
+ def execute_within_threads(amount:, retry_lease: true)
+ threads = []
+
+ amount.times do
+ # Let's use a different object for each thread to get closer
+ # to a real world scenario.
+ mr = MergeRequest.find(merge_request.id)
+
+ threads << Thread.new do
+ described_class.new(mr).execute(retry_lease: retry_lease)
+ end
+ end
+
+ threads.each(&:join)
+ threads
+ end
+
before do
project.add_developer(merge_request.author)
end
it_behaves_like 'mergeable merge request'
- context 'when multiple calls to the service' do
- it 'returns success' do
- subject
- result = subject
+ context 'when lock is disabled' do
+ before do
+ stub_feature_flags(merge_ref_auto_sync_lock: false)
+ end
- expect(result).to be_a(ServiceResponse)
- expect(result.success?).to be(true)
+ it_behaves_like 'mergeable merge request'
+ end
+
+ context 'when concurrent calls' do
+ it 'waits first lock and returns "cached" result in subsequent calls' do
+ threads = execute_within_threads(amount: 3)
+ results = threads.map { |t| t.value.status }
+
+ expect(results).to contain_exactly(:success, :success, :success)
+ end
+
+ it 'writes the merge-ref once' do
+ service = instance_double(MergeRequests::MergeToRefService)
+
+ expect(MergeRequests::MergeToRefService).to receive(:new).once { service }
+ expect(service).to receive(:execute).once.and_return(success: true)
+
+ execute_within_threads(amount: 3)
end
- it 'second call does not change the merge-ref' do
- expect { subject }.to change(merge_request, :merge_ref_head).from(nil)
- expect { subject }.not_to change(merge_request.merge_ref_head, :id)
+ it 'resets one merge request upon execution' do
+ expect_any_instance_of(MergeRequest).to receive(:reset).once
+
+ execute_within_threads(amount: 2)
+ end
+
+ context 'when retry_lease flag is false' do
+ it 'the first call succeeds, subsequent concurrent calls get a lock error response' do
+ threads = execute_within_threads(amount: 3, retry_lease: false)
+ results = threads.map { |t| [t.value.status, t.value.message] }
+
+ expect(results).to contain_exactly([:error, 'Failed to obtain a lock'],
+ [:error, 'Failed to obtain a lock'],
+ [:success, nil])
+ end
end
end
@@ -102,8 +147,7 @@ describe MergeRequests::MergeabilityCheckService do
context 'when broken' do
before do
- allow(merge_request).to receive(:broken?) { true }
- allow(project.repository).to receive(:can_be_merged?) { false }
+ expect(merge_request).to receive(:broken?) { true }
end
it_behaves_like 'unmergeable merge request'
@@ -117,10 +161,13 @@ describe MergeRequests::MergeabilityCheckService do
end
end
- context 'when it has conflicts' do
- before do
- allow(merge_request).to receive(:broken?) { false }
- allow(project.repository).to receive(:can_be_merged?) { false }
+ context 'when it cannot be merged on git' do
+ let(:merge_request) do
+ create(:merge_request,
+ merge_status: :unchecked,
+ source_branch: 'conflict-resolvable',
+ source_project: project,
+ target_branch: 'conflict-start')
end
it_behaves_like 'unmergeable merge request'
diff --git a/spec/support/shared_examples/discussions_provider_shared_examples.rb b/spec/support/shared_examples/discussions_provider_shared_examples.rb
new file mode 100644
index 00000000000..77cf1ac3f51
--- /dev/null
+++ b/spec/support/shared_examples/discussions_provider_shared_examples.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+shared_examples 'discussions provider' do
+ it 'returns the expected discussions' do
+ get :discussions, params: { namespace_id: project.namespace, project_id: project, id: requested_iid }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('entities/discussions')
+
+ expect(json_response.size).to eq(expected_discussion_count)
+ expect(json_response.pluck('id')).to eq(expected_discussion_ids)
+ end
+end
diff --git a/spec/support/shared_examples/relative_positioning_shared_examples.rb b/spec/support/shared_examples/relative_positioning_shared_examples.rb
index 5ee62644c54..1c53e2602eb 100644
--- a/spec/support/shared_examples/relative_positioning_shared_examples.rb
+++ b/spec/support/shared_examples/relative_positioning_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples "a class that supports relative positioning" do
+RSpec.shared_examples 'a class that supports relative positioning' do
let(:item1) { create(factory, default_params) }
let(:item2) { create(factory, default_params) }
let(:new_item) { create(factory, default_params) }
@@ -9,24 +9,32 @@ RSpec.shared_examples "a class that supports relative positioning" do
create(factory, params.merge(default_params))
end
- describe '.move_to_end' do
- it 'moves the object to the end' do
- item1.update(relative_position: 5)
- item2.update(relative_position: 15)
+ describe '.move_nulls_to_end' do
+ it 'moves items with null relative_position to the end' do
+ skip("#{item1} has a default relative position") if item1.relative_position
+ skip("#{item2} has a default relative position") if item2.relative_position
- described_class.move_to_end([item1, item2])
+ described_class.move_nulls_to_end([item1, item2])
expect(item2.prev_relative_position).to eq item1.relative_position
expect(item1.prev_relative_position).to eq nil
expect(item2.next_relative_position).to eq nil
end
+ it 'moves the item near the start position when there are no existing positions' do
+ skip("#{item1} has a default relative position") if item1.relative_position
+
+ described_class.move_nulls_to_end([item1])
+
+ expect(item1.relative_position).to eq(described_class::START_POSITION + described_class::IDEAL_DISTANCE)
+ end
+
it 'does not perform any moves if all items have their relative_position set' do
item1.update!(relative_position: 1)
expect(item1).not_to receive(:save)
- described_class.move_to_end([item1])
+ described_class.move_nulls_to_end([item1])
end
end
diff --git a/spec/workers/build_process_worker_spec.rb b/spec/workers/build_process_worker_spec.rb
new file mode 100644
index 00000000000..cceca40717c
--- /dev/null
+++ b/spec/workers/build_process_worker_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe BuildProcessWorker do
+ describe '#perform' do
+ context 'when build exists' do
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
+ it 'processes build' do
+ expect_any_instance_of(Ci::Pipeline).to receive(:process!)
+ .with(build.name)
+
+ described_class.new.perform(build.id)
+ end
+ end
+
+ context 'when build does not exist' do
+ it 'does not raise exception' do
+ expect { described_class.new.perform(123) }
+ .not_to raise_error
+ end
+ end
+ end
+end
diff --git a/vendor/jupyter/values.yaml b/vendor/jupyter/values.yaml
index 2aadd3dbe1e..852c6c3f2b0 100644
--- a/vendor/jupyter/values.yaml
+++ b/vendor/jupyter/values.yaml
@@ -57,3 +57,7 @@ ingress:
annotations:
kubernetes.io/ingress.class: "nginx"
kubernetes.io/tls-acme: "true"
+
+proxy:
+ service:
+ type: ClusterIP