summaryrefslogtreecommitdiff
path: root/spec/services
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb2
-rw-r--r--spec/services/auto_merge_service_spec.rb4
-rw-r--r--spec/services/award_emojis/add_service_spec.rb6
-rw-r--r--spec/services/award_emojis/destroy_service_spec.rb6
-rw-r--r--spec/services/award_emojis/toggle_service_spec.rb6
-rw-r--r--spec/services/boards/issues/move_service_spec.rb16
-rw-r--r--spec/services/boards/list_service_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/custom_config_content_spec.rb29
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb144
-rw-r--r--spec/services/ci/ensure_stage_service_spec.rb4
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb6
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb91
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb12
-rw-r--r--spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb12
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb940
-rw-r--r--spec/services/ci/prepare_build_service_spec.rb4
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb894
-rw-r--r--spec/services/ci/register_job_service_spec.rb6
-rw-r--r--spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb64
-rw-r--r--spec/services/ci/retry_build_service_spec.rb28
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb2
-rw-r--r--spec/services/ci/run_scheduled_build_service_spec.rb12
-rw-r--r--spec/services/clusters/applications/check_installation_progress_service_spec.rb6
-rw-r--r--spec/services/clusters/applications/create_service_spec.rb34
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb43
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb12
-rw-r--r--spec/services/container_expiration_policy_service_spec.rb31
-rw-r--r--spec/services/create_snippet_service_spec.rb117
-rw-r--r--spec/services/deployments/after_create_service_spec.rb48
-rw-r--r--spec/services/deployments/link_merge_requests_service_spec.rb30
-rw-r--r--spec/services/error_tracking/list_issues_service_spec.rb4
-rw-r--r--spec/services/error_tracking/list_projects_service_spec.rb4
-rw-r--r--spec/services/external_pull_requests/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/git/branch_push_service_spec.rb6
-rw-r--r--spec/services/groups/auto_devops_service_spec.rb4
-rw-r--r--spec/services/issues/referenced_merge_requests_service_spec.rb16
-rw-r--r--spec/services/issues/reorder_service_spec.rb6
-rw-r--r--spec/services/issues/zoom_link_service_spec.rb4
-rw-r--r--spec/services/merge_requests/conflicts/list_service_spec.rb4
-rw-r--r--spec/services/merge_requests/create_from_issue_service_spec.rb4
-rw-r--r--spec/services/merge_requests/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/merge_requests/get_urls_service_spec.rb7
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb4
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb4
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb12
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb9
-rw-r--r--spec/services/metrics/dashboard/clone_dashboard_service_spec.rb197
-rw-r--r--spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb6
-rw-r--r--spec/services/metrics/dashboard/default_embed_service_spec.rb6
-rw-r--r--spec/services/metrics/dashboard/dynamic_embed_service_spec.rb6
-rw-r--r--spec/services/metrics/dashboard/project_dashboard_service_spec.rb6
-rw-r--r--spec/services/metrics/dashboard/system_dashboard_service_spec.rb6
-rw-r--r--spec/services/metrics/sample_metrics_service_spec.rb9
-rw-r--r--spec/services/milestones/promote_service_spec.rb4
-rw-r--r--spec/services/milestones/transfer_service_spec.rb4
-rw-r--r--spec/services/namespaces/statistics_refresher_service_spec.rb9
-rw-r--r--spec/services/notes/create_service_spec.rb20
-rw-r--r--spec/services/notes/destroy_service_spec.rb4
-rw-r--r--spec/services/notes/resolve_service_spec.rb4
-rw-r--r--spec/services/notification_service_spec.rb20
-rw-r--r--spec/services/pages_domains/create_acme_order_service_spec.rb26
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb6
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb6
-rw-r--r--spec/services/projects/container_repository/delete_tags_service_spec.rb6
-rw-r--r--spec/services/projects/container_repository/destroy_service_spec.rb4
-rw-r--r--spec/services/projects/create_service_spec.rb4
-rw-r--r--spec/services/projects/detect_repository_languages_service_spec.rb4
-rw-r--r--spec/services/projects/fork_service_spec.rb13
-rw-r--r--spec/services/projects/gitlab_projects_import_service_spec.rb2
-rw-r--r--spec/services/projects/housekeeping_service_spec.rb2
-rw-r--r--spec/services/projects/import_export/export_service_spec.rb8
-rw-r--r--spec/services/projects/lfs_pointers/lfs_import_service_spec.rb12
-rw-r--r--spec/services/projects/open_merge_requests_count_service_spec.rb2
-rw-r--r--spec/services/projects/operations/update_service_spec.rb4
-rw-r--r--spec/services/projects/participants_service_spec.rb28
-rw-r--r--spec/services/projects/update_pages_service_spec.rb66
-rw-r--r--spec/services/prometheus/adapter_service_spec.rb51
-rw-r--r--spec/services/prometheus/proxy_service_spec.rb10
-rw-r--r--spec/services/prometheus/proxy_variable_substitution_service_spec.rb146
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb5
-rw-r--r--spec/services/releases/update_service_spec.rb38
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb4
-rw-r--r--spec/services/resource_events/merge_into_notes_service_spec.rb10
-rw-r--r--spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb21
-rw-r--r--spec/services/snippets/create_service_spec.rb170
-rw-r--r--spec/services/snippets/destroy_service_spec.rb77
-rw-r--r--spec/services/snippets/update_service_spec.rb123
-rw-r--r--spec/services/spam/mark_as_spam_service_spec.rb52
-rw-r--r--spec/services/spam_service_spec.rb4
-rw-r--r--spec/services/suggestions/apply_service_spec.rb31
-rw-r--r--spec/services/system_note_service_spec.rb119
-rw-r--r--spec/services/system_notes/commit_service_spec.rb6
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb8
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb129
-rw-r--r--spec/services/task_list_toggle_service_spec.rb44
-rw-r--r--spec/services/template_engines/liquid_service_spec.rb126
-rw-r--r--spec/services/update_snippet_service_spec.rb80
-rw-r--r--spec/services/users/activity_service_spec.rb4
-rw-r--r--spec/services/users/destroy_service_spec.rb16
-rw-r--r--spec/services/users/update_service_spec.rb7
100 files changed, 2894 insertions, 1587 deletions
diff --git a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
index f2cda999932..e03d87e9d49 100644
--- a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
+++ b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
@@ -34,7 +34,7 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do
it { is_expected.to be_truthy }
- context 'when the head piipeline succeeded' do
+ context 'when the head pipeline succeeded' do
let(:pipeline_status) { :success }
it { is_expected.to be_falsy }
diff --git a/spec/services/auto_merge_service_spec.rb b/spec/services/auto_merge_service_spec.rb
index 50dfc49a59c..221cf695331 100644
--- a/spec/services/auto_merge_service_spec.rb
+++ b/spec/services/auto_merge_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe AutoMergeService do
- set(:project) { create(:project) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project, user) }
describe '.all_strategies' do
diff --git a/spec/services/award_emojis/add_service_spec.rb b/spec/services/award_emojis/add_service_spec.rb
index 8364e662735..4bcb5fa039f 100644
--- a/spec/services/award_emojis/add_service_spec.rb
+++ b/spec/services/award_emojis/add_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe AwardEmojis::AddService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:awardable) { create(:note, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:awardable) { create(:note, project: project) }
let(:name) { 'thumbsup' }
subject(:service) { described_class.new(awardable, name, user) }
diff --git a/spec/services/award_emojis/destroy_service_spec.rb b/spec/services/award_emojis/destroy_service_spec.rb
index 6d54c037464..f411345560e 100644
--- a/spec/services/award_emojis/destroy_service_spec.rb
+++ b/spec/services/award_emojis/destroy_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe AwardEmojis::DestroyService do
- set(:user) { create(:user) }
- set(:awardable) { create(:note) }
- set(:project) { awardable.project }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:awardable) { create(:note) }
+ let_it_be(:project) { awardable.project }
let(:name) { 'thumbsup' }
let!(:award_from_other_user) do
create(:award_emoji, name: name, awardable: awardable, user: create(:user))
diff --git a/spec/services/award_emojis/toggle_service_spec.rb b/spec/services/award_emojis/toggle_service_spec.rb
index a8d110d04f7..069bdfcb99f 100644
--- a/spec/services/award_emojis/toggle_service_spec.rb
+++ b/spec/services/award_emojis/toggle_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe AwardEmojis::ToggleService do
- set(:user) { create(:user) }
- set(:project) { create(:project, :public) }
- set(:awardable) { create(:note, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:awardable) { create(:note, project: project) }
let(:name) { 'thumbsup' }
subject(:service) { described_class.new(awardable, name, user) }
diff --git a/spec/services/boards/issues/move_service_spec.rb b/spec/services/boards/issues/move_service_spec.rb
index cf84ec8fd4c..b9ebbc30c1a 100644
--- a/spec/services/boards/issues/move_service_spec.rb
+++ b/spec/services/boards/issues/move_service_spec.rb
@@ -54,14 +54,14 @@ describe Boards::Issues::MoveService do
end
describe '#execute_multiple' do
- set(:group) { create(:group) }
- set(:user) { create(:user) }
- set(:project) { create(:project, namespace: group) }
- set(:board1) { create(:board, group: group) }
- set(:development) { create(:group_label, group: group, name: 'Development') }
- set(:testing) { create(:group_label, group: group, name: 'Testing') }
- set(:list1) { create(:list, board: board1, label: development, position: 0) }
- set(:list2) { create(:list, board: board1, label: testing, position: 1) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:board1) { create(:board, group: group) }
+ let_it_be(:development) { create(:group_label, group: group, name: 'Development') }
+ let_it_be(:testing) { create(:group_label, group: group, name: 'Testing') }
+ let_it_be(:list1) { create(:list, board: board1, label: development, position: 0) }
+ let_it_be(:list2) { create(:list, board: board1, label: testing, position: 1) }
let(:params) { { board_id: board1.id, from_list_id: list1.id, to_list_id: list2.id } }
before do
diff --git a/spec/services/boards/list_service_spec.rb b/spec/services/boards/list_service_spec.rb
index c9d372ea166..4eb023907fa 100644
--- a/spec/services/boards/list_service_spec.rb
+++ b/spec/services/boards/list_service_spec.rb
@@ -10,6 +10,7 @@ describe Boards::ListService do
subject(:service) { described_class.new(parent, double) }
it_behaves_like 'boards list service'
+ it_behaves_like 'multiple boards list service'
end
context 'when board parent is a group' do
diff --git a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
new file mode 100644
index 00000000000..33cd6e164b0
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe Ci::CreatePipelineService do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:admin) }
+ let(:ref) { 'refs/heads/master' }
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+
+ context 'custom config content' do
+ let(:bridge) do
+ double(:bridge, yaml_for_downstream: <<~YML
+ rspec:
+ script: rspec
+ custom:
+ script: custom
+ YML
+ )
+ end
+
+ subject { service.execute(:push, bridge: bridge) }
+
+ it 'creates a pipeline using the content passed in as param' do
+ expect(subject).to be_persisted
+ expect(subject.builds.map(&:name)).to eq %w[rspec custom]
+ expect(subject.config_source).to eq 'bridge_source'
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 04e57b1a2d4..d6cc233088d 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Ci::CreatePipelineService do
include ProjectForksHelper
- set(:project) { create(:project, :repository) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
let(:user) { create(:admin) }
let(:ref_name) { 'refs/heads/master' }
@@ -362,11 +362,11 @@ describe Ci::CreatePipelineService do
context 'when build that is not marked as interruptible is running' do
it 'cancels running outdated pipelines', :sidekiq_might_not_need_inline do
- pipeline_on_previous_commit
- .builds
- .find_by_name('build_2_1')
- .tap(&:enqueue!)
- .run!
+ build_2_1 = pipeline_on_previous_commit
+ .builds.find_by_name('build_2_1')
+
+ build_2_1.enqueue!
+ build_2_1.reset.run!
pipeline
@@ -377,12 +377,12 @@ describe Ci::CreatePipelineService do
end
context 'when an uninterruptible build is running' do
- it 'does not cancel running outdated pipelines', :sidekiq_might_not_need_inline do
- pipeline_on_previous_commit
- .builds
- .find_by_name('build_3_1')
- .tap(&:enqueue!)
- .run!
+ it 'does not cancel running outdated pipelines', :sidekiq_inline do
+ build_3_1 = pipeline_on_previous_commit
+ .builds.find_by_name('build_3_1')
+
+ build_3_1.enqueue!
+ build_3_1.reset.run!
pipeline
@@ -493,12 +493,13 @@ describe Ci::CreatePipelineService do
before do
stub_ci_pipeline_yaml_file(nil)
allow_any_instance_of(Project).to receive(:auto_devops_enabled?).and_return(true)
+ create(:project_auto_devops, project: project)
end
it 'pull it from Auto-DevOps' do
pipeline = execute_service
expect(pipeline).to be_auto_devops_source
- expect(pipeline.builds.map(&:name)).to eq %w[test code_quality build]
+ expect(pipeline.builds.map(&:name)).to match_array(%w[test code_quality build])
end
end
@@ -914,6 +915,44 @@ describe Ci::CreatePipelineService do
end
end
+ context 'with resource group' do
+ context 'when resource group is defined' do
+ before do
+ config = YAML.dump(
+ test: { stage: 'test', script: 'ls', resource_group: resource_group_key }
+ )
+
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ let(:resource_group_key) { 'iOS' }
+
+ it 'persists the association correctly' do
+ result = execute_service
+ deploy_job = result.builds.find_by_name!(:test)
+ resource_group = project.resource_groups.find_by_key!(resource_group_key)
+
+ expect(result).to be_persisted
+ expect(deploy_job.resource_group.key).to eq(resource_group_key)
+ expect(project.resource_groups.count).to eq(1)
+ expect(resource_group.builds.count).to eq(1)
+ expect(resource_group.resources.count).to eq(1)
+ expect(resource_group.resources.first.build).to eq(nil)
+ end
+
+ context 'when resource group key includes predefined variables' do
+ let(:resource_group_key) { '$CI_COMMIT_REF_NAME-$CI_JOB_NAME' }
+
+ it 'interpolates the variables into the key correctly' do
+ result = execute_service
+
+ expect(result).to be_persisted
+ expect(project.resource_groups.exists?(key: 'master-test')).to eq(true)
+ end
+ end
+ end
+ end
+
context 'with timeout' do
context 'when builds with custom timeouts are configured' do
before do
@@ -930,6 +969,70 @@ describe Ci::CreatePipelineService do
end
end
+ context 'with release' do
+ shared_examples_for 'a successful release pipeline' do
+ before do
+ stub_feature_flags(ci_release_generation: true)
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ it 'is valid config' do
+ pipeline = execute_service
+ build = pipeline.builds.first
+ expect(pipeline).to be_kind_of(Ci::Pipeline)
+ expect(pipeline).to be_valid
+ expect(pipeline.yaml_errors).not_to be_present
+ expect(pipeline).to be_persisted
+ expect(build).to be_kind_of(Ci::Build)
+ expect(build.options).to eq(config[:release].except(:stage, :only).with_indifferent_access)
+ end
+ end
+
+ context 'simple example' do
+ it_behaves_like 'a successful release pipeline' do
+ let(:config) do
+ {
+ release: {
+ script: ["make changelog | tee release_changelog.txt"],
+ release: {
+ tag_name: "v0.06",
+ description: "./release_changelog.txt"
+ }
+ }
+ }
+ end
+ end
+ end
+
+ context 'example with all release metadata' do
+ it_behaves_like 'a successful release pipeline' do
+ let(:config) do
+ {
+ release: {
+ script: ["make changelog | tee release_changelog.txt"],
+ release: {
+ name: "Release $CI_TAG_NAME",
+ tag_name: "v0.06",
+ description: "./release_changelog.txt",
+ assets: {
+ links: [
+ {
+ name: "cool-app.zip",
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
+ },
+ {
+ url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.exe"
+ }
+ ]
+ }
+ }
+ }
+ }
+ end
+ end
+ end
+ end
+
shared_examples 'when ref is protected' do
let(:user) { create(:user) }
@@ -1020,21 +1123,6 @@ describe Ci::CreatePipelineService do
it_behaves_like 'when ref is protected'
end
- context 'when ref is not protected' do
- context 'when trigger belongs to no one' do
- let(:user) {}
- let(:trigger) { create(:ci_trigger, owner: nil) }
- let(:trigger_request) { create(:ci_trigger_request, trigger: trigger) }
- let(:pipeline) { execute_service(trigger_request: trigger_request) }
-
- it 'creates an unprotected pipeline' do
- expect(pipeline).to be_persisted
- expect(pipeline).not_to be_protected
- expect(Ci::Pipeline.count).to eq(1)
- end
- end
- end
-
context 'when pipeline is running for a tag' do
before do
config = YAML.dump(test: { script: 'test', only: ['branches'] },
diff --git a/spec/services/ci/ensure_stage_service_spec.rb b/spec/services/ci/ensure_stage_service_spec.rb
index 43bbd2130a4..de07a1ae238 100644
--- a/spec/services/ci/ensure_stage_service_spec.rb
+++ b/spec/services/ci/ensure_stage_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Ci::EnsureStageService, '#execute' do
- set(:project) { create(:project) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
let(:stage) { create(:ci_stage_entity) }
let(:job) { build(:ci_build) }
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index ff2d286465a..c0226654fd9 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Ci::ExpirePipelineCacheService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
subject { described_class.new }
describe '#execute' do
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
new file mode 100644
index 00000000000..c29c56c2b04
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection do
+ using RSpec::Parameterized::TableSyntax
+
+ set(:pipeline) { create(:ci_pipeline) }
+ set(:build_a) { create(:ci_build, :success, name: 'build-a', stage: 'build', stage_idx: 0, pipeline: pipeline) }
+ set(:build_b) { create(:ci_build, :failed, name: 'build-b', stage: 'build', stage_idx: 0, pipeline: pipeline) }
+ set(:test_a) { create(:ci_build, :running, name: 'test-a', stage: 'test', stage_idx: 1, pipeline: pipeline) }
+ set(:test_b) { create(:ci_build, :pending, name: 'test-b', stage: 'test', stage_idx: 1, pipeline: pipeline) }
+ set(:deploy) { create(:ci_build, :created, name: 'deploy', stage: 'deploy', stage_idx: 2, pipeline: pipeline) }
+
+ let(:collection) { described_class.new(pipeline) }
+
+ describe '#set_processable_status' do
+ it 'does update existing status of processable' do
+ collection.set_processable_status(test_a.id, 'success', 100)
+
+ expect(collection.status_for_names(['test-a'])).to eq('success')
+ end
+
+ it 'ignores a missing processable' do
+ collection.set_processable_status(-1, 'failed', 100)
+ end
+ end
+
+ describe '#status_of_all' do
+ it 'returns composite status of the collection' do
+ expect(collection.status_of_all).to eq('running')
+ end
+ end
+
+ describe '#status_for_names' do
+ where(:names, :status) do
+ %w[build-a] | 'success'
+ %w[build-a build-b] | 'failed'
+ %w[build-a test-a] | 'running'
+ end
+
+ with_them do
+ it 'returns composite status of given names' do
+ expect(collection.status_for_names(names)).to eq(status)
+ end
+ end
+ end
+
+ describe '#status_for_prior_stage_position' do
+ where(:stage, :status) do
+ 0 | 'success'
+ 1 | 'failed'
+ 2 | 'running'
+ end
+
+ with_them do
+ it 'returns composite status for processables in prior stages' do
+ expect(collection.status_for_prior_stage_position(stage)).to eq(status)
+ end
+ end
+ end
+
+ describe '#status_for_stage_position' do
+ where(:stage, :status) do
+ 0 | 'failed'
+ 1 | 'running'
+ 2 | 'created'
+ end
+
+ with_them do
+ it 'returns composite status for processables at a given stages' do
+ expect(collection.status_for_stage_position(stage)).to eq(status)
+ end
+ end
+ end
+
+ describe '#created_processable_ids_for_stage_position' do
+ it 'returns IDs of processables at a given stage position' do
+ expect(collection.created_processable_ids_for_stage_position(0)).to be_empty
+ expect(collection.created_processable_ids_for_stage_position(1)).to be_empty
+ expect(collection.created_processable_ids_for_stage_position(2)).to contain_exactly(deploy.id)
+ end
+ end
+
+ describe '#processing_processables' do
+ it 'returns processables marked as processing' do
+ expect(collection.processing_processables.map { |processable| processable[:id]} )
+ .to contain_exactly(build_a.id, build_b.id, test_a.id, test_b.id, deploy.id)
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
new file mode 100644
index 00000000000..38686b41a22
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_relative 'shared_processing_service.rb'
+
+describe Ci::PipelineProcessing::AtomicProcessingService do
+ before do
+ stub_feature_flags(ci_atomic_processing: true)
+ end
+
+ it_behaves_like 'Pipeline Processing Service'
+end
diff --git a/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
new file mode 100644
index 00000000000..2da1eb19818
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_relative 'shared_processing_service.rb'
+
+describe Ci::PipelineProcessing::LegacyProcessingService do
+ before do
+ stub_feature_flags(ci_atomic_processing: false)
+ end
+
+ it_behaves_like 'Pipeline Processing Service'
+end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
new file mode 100644
index 00000000000..cae5ae3f09d
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -0,0 +1,940 @@
+# frozen_string_literal: true
+
+shared_examples 'Pipeline Processing Service' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+
+ let(:pipeline) do
+ create(:ci_empty_pipeline, ref: 'master', project: project)
+ end
+
+ before do
+ stub_ci_pipeline_to_return_yaml_file
+
+ stub_not_protect_default_branch
+
+ project.add_developer(user)
+ end
+
+ context 'when simple pipeline is defined' do
+ before do
+ create_build('linux', stage_idx: 0)
+ create_build('mac', stage_idx: 0)
+ create_build('rspec', stage_idx: 1)
+ create_build('rubocop', stage_idx: 1)
+ create_build('deploy', stage_idx: 2)
+ end
+
+ it 'processes a pipeline', :sidekiq_inline do
+ expect(process_pipeline).to be_truthy
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(2)
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(4)
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(5)
+ end
+
+ it 'does not process pipeline if existing stage is running' do
+ expect(process_pipeline).to be_truthy
+ expect(builds.pending.count).to eq(2)
+
+ expect(process_pipeline).to be_falsey
+ expect(builds.pending.count).to eq(2)
+ end
+ end
+
+ context 'custom stage with first job allowed to fail' do
+ before do
+ create_build('clean_job', stage_idx: 0, allow_failure: true)
+ create_build('test_job', stage_idx: 1, allow_failure: true)
+ end
+
+ it 'automatically triggers a next stage when build finishes', :sidekiq_inline do
+ expect(process_pipeline).to be_truthy
+ expect(builds_statuses).to eq ['pending']
+
+ fail_running_or_pending
+
+ expect(builds_statuses).to eq %w(failed pending)
+
+ fail_running_or_pending
+
+ expect(pipeline.reload).to be_success
+ end
+ end
+
+ context 'when optional manual actions are defined', :sidekiq_inline do
+ before do
+ create_build('build', stage_idx: 0)
+ create_build('test', stage_idx: 1)
+ create_build('test_failure', stage_idx: 2, when: 'on_failure')
+ create_build('deploy', stage_idx: 3)
+ create_build('production', stage_idx: 3, when: 'manual', allow_failure: true)
+ create_build('cleanup', stage_idx: 4, when: 'always')
+ create_build('clear:cache', stage_idx: 4, when: 'manual', allow_failure: true)
+ end
+
+ context 'when builds are successful' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production)
+ expect(builds_statuses).to eq %w(success success pending manual)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production cleanup clear:cache)
+ expect(builds_statuses).to eq %w(success success success manual pending manual)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success success success manual success manual)
+ expect(pipeline.reload.status).to eq 'success'
+ end
+ end
+
+ context 'when test job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure)
+ expect(builds_statuses).to eq %w(success failed pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success failed success success)
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+ end
+
+ context 'when test and test_failure jobs fail' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure)
+ expect(builds_statuses).to eq %w(success failed pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed failed pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed failed success)
+ expect(pipeline.reload.status).to eq('failed')
+ end
+ end
+
+ context 'when deploy job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production)
+ expect(builds_statuses).to eq %w(success success pending manual)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production cleanup)
+ expect(builds_statuses).to eq %w(success success failed manual pending)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success success failed manual success)
+ expect(pipeline.reload).to be_failed
+ end
+ end
+
+ context 'when build is canceled in the second stage' do
+ it 'does not schedule builds after build has been canceled' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds.running_or_pending).not_to be_empty
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ cancel_running_or_pending
+
+ expect(builds.running_or_pending).to be_empty
+ expect(builds_names).to eq %w[build test]
+ expect(builds_statuses).to eq %w[success canceled]
+ expect(pipeline.reload).to be_canceled
+ end
+ end
+
+ context 'when listing optional manual actions' do
+ it 'returns only for skipped builds' do
+ # currently all builds are created
+ expect(process_pipeline).to be_truthy
+ expect(manual_actions).to be_empty
+
+ # succeed stage build
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_empty
+
+ # succeed stage test
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_one # production
+
+ # succeed stage deploy
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_many # production and clear cache
+ end
+ end
+ end
+
+ context 'when delayed jobs are defined', :sidekiq_inline do
+ context 'when the scene is timed incremental rollout' do
+ before do
+ create_build('build', stage_idx: 0)
+ create_build('rollout10%', **delayed_options, stage_idx: 1)
+ create_build('rollout100%', **delayed_options, stage_idx: 2)
+ create_build('cleanup', stage_idx: 3)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ context 'when builds are successful' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ Timecop.travel 2.minutes.from_now do
+ enqueue_scheduled('rollout10%')
+ end
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
+
+ Timecop.travel 2.minutes.from_now do
+ enqueue_scheduled('rollout100%')
+ end
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'success' })
+ expect(pipeline.reload.status).to eq 'success'
+ end
+ end
+
+ context 'when build job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'failed' })
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+ end
+
+ context 'when rollout 10% is unscheduled' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ unschedule
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'manual' })
+ expect(pipeline.reload.status).to eq 'manual'
+ end
+
+ context 'when user plays rollout 10%' do
+ it 'schedules rollout100%' do
+ process_pipeline
+ succeed_pending
+ unschedule
+ play_manual_action('rollout10%')
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
+ expect(pipeline.reload.status).to eq 'scheduled'
+ end
+ end
+ end
+
+ context 'when rollout 10% fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ Timecop.travel 2.minutes.from_now do
+ enqueue_scheduled('rollout10%')
+ end
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'failed' })
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+
+ context 'when user retries rollout 10%' do
+ it 'does not schedule rollout10% again' do
+ process_pipeline
+ succeed_pending
+ enqueue_scheduled('rollout10%')
+ fail_running_or_pending
+ retry_build('rollout10%')
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+ end
+
+ context 'when rollout 10% is played immidiately' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ play_manual_action('rollout10%')
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+ end
+
+ context 'when only one scheduled job exists in a pipeline' do
+ before do
+ create_build('delayed', **delayed_options, stage_idx: 0)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
+
+ expect(pipeline.reload.status).to eq 'scheduled'
+ end
+ end
+
+ context 'when there are two delayed jobs in a stage' do
+ before do
+ create_build('delayed1', **delayed_options, stage_idx: 0)
+ create_build('delayed2', **delayed_options, stage_idx: 0)
+ create_build('job', stage_idx: 1)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'blocks the stage until all scheduled jobs finished' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' })
+
+ Timecop.travel 2.minutes.from_now do
+ enqueue_scheduled('delayed1')
+ end
+
+ expect(builds_names_and_statuses).to eq({ 'delayed1': 'pending', 'delayed2': 'scheduled' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+
+ context 'when a delayed job is allowed to fail' do
+ before do
+ create_build('delayed', **delayed_options, allow_failure: true, stage_idx: 0)
+ create_build('job', stage_idx: 1)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'blocks the stage and continues after it failed' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
+
+ Timecop.travel 2.minutes.from_now do
+ enqueue_scheduled('delayed')
+ end
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'failed', 'job': 'pending' })
+ expect(pipeline.reload.status).to eq 'pending'
+ end
+ end
+ end
+
+ context 'when an exception is raised during a persistent ref creation' do
+ before do
+ successful_build('test', stage_idx: 0)
+
+ allow_next_instance_of(Ci::PersistentRef) do |instance|
+ allow(instance).to receive(:delete_refs) { raise ArgumentError }
+ end
+ end
+
+ it 'process the pipeline' do
+ expect { process_pipeline }.not_to raise_error
+ end
+ end
+
+ context 'when there are manual action in earlier stages' do
+ context 'when first stage has only optional manual actions' do
+ before do
+ create_build('build', stage_idx: 0, when: 'manual', allow_failure: true)
+ create_build('check', stage_idx: 1)
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'starts from the second stage' do
+ expect(all_builds_statuses).to eq %w[manual pending created]
+ end
+ end
+
+ context 'when second stage has only optional manual actions' do
+ before do
+ create_build('check', stage_idx: 0)
+ create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'skips second stage and continues on third stage', :sidekiq_inline do
+ expect(all_builds_statuses).to eq(%w[pending created created])
+
+ builds.first.success
+
+ expect(all_builds_statuses).to eq(%w[success manual pending])
+ end
+ end
+ end
+
+ context 'when there are only manual actions in stages' do
+ before do
+ create_build('image', stage_idx: 0, when: 'manual', allow_failure: true)
+ create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
+ create_build('deploy', stage_idx: 2, when: 'manual')
+ create_build('check', stage_idx: 3)
+
+ process_pipeline
+ end
+
+ it 'processes all jobs until blocking actions encountered' do
+ expect(all_builds_statuses).to eq(%w[manual manual manual created])
+ expect(all_builds_names).to eq(%w[image build deploy check])
+
+ expect(pipeline.reload).to be_blocked
+ end
+ end
+
+ context 'when there is only one manual action' do
+ before do
+ create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true)
+
+ process_pipeline
+ end
+
+ it 'skips the pipeline' do
+ expect(pipeline.reload).to be_skipped
+ end
+
+ context 'when the action was played' do
+ before do
+ play_manual_action('deploy')
+ end
+
+ it 'queues the action and pipeline', :sidekiq_inline do
+ expect(all_builds_statuses).to eq(%w[pending])
+
+ expect(pipeline.reload).to be_pending
+ end
+ end
+ end
+
+ context 'when blocking manual actions are defined', :sidekiq_inline do
+ before do
+ create_build('code:test', stage_idx: 0)
+ create_build('staging:deploy', stage_idx: 1, when: 'manual')
+ create_build('staging:test', stage_idx: 2, when: 'on_success')
+ create_build('production:deploy', stage_idx: 3, when: 'manual')
+ create_build('production:test', stage_idx: 4, when: 'always')
+ end
+
+ context 'when first stage succeeds' do
+ it 'blocks pipeline on stage with first manual action' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+ expect(pipeline.reload.status).to eq 'pending'
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy]
+ expect(builds_statuses).to eq %w[success manual]
+ expect(pipeline.reload).to be_manual
+ end
+ end
+
+ context 'when first stage fails' do
+ it 'does not take blocking action into account' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+ expect(pipeline.reload.status).to eq 'pending'
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w[code:test production:test]
+ expect(builds_statuses).to eq %w[failed pending]
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w[failed success]
+ expect(pipeline.reload).to be_failed
+ end
+ end
+
+ context 'when pipeline is promoted sequentially up to the end' do
+ before do
+ # Users need ability to merge into a branch in order to trigger
+ # protected manual actions.
+ #
+ create(:protected_branch, :developers_can_merge,
+ name: 'master', project: project)
+ end
+
+ it 'properly processes entire pipeline' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy]
+ expect(builds_statuses).to eq %w[success manual]
+ expect(pipeline.reload).to be_manual
+
+ play_manual_action('staging:deploy')
+
+ expect(builds_statuses).to eq %w[success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test]
+ expect(builds_statuses).to eq %w[success success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy]
+ expect(builds_statuses).to eq %w[success success success manual]
+
+ expect(pipeline.reload).to be_manual
+ expect(pipeline.reload).to be_blocked
+ expect(pipeline.reload).not_to be_active
+ expect(pipeline.reload).not_to be_complete
+
+ play_manual_action('production:deploy')
+
+ expect(builds_statuses).to eq %w[success success success pending]
+ expect(pipeline.reload).to be_running
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy production:test]
+ expect(builds_statuses).to eq %w[success success success success pending]
+ expect(pipeline.reload).to be_running
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy production:test]
+ expect(builds_statuses).to eq %w[success success success success success]
+ expect(pipeline.reload).to be_success
+ end
+ end
+ end
+
+ context 'when second stage has only on_failure jobs', :sidekiq_inline do
+ before do
+ create_build('check', stage_idx: 0)
+ create_build('build', stage_idx: 1, when: 'on_failure')
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'skips second stage and continues on third stage' do
+ expect(all_builds_statuses).to eq(%w[pending created created])
+
+ builds.first.success
+
+ expect(all_builds_statuses).to eq(%w[success skipped pending])
+ end
+ end
+
+ context 'when failed build in the middle stage is retried', :sidekiq_inline do
+ context 'when failed build is the only unsuccessful build in the stage' do
+ before do
+ create_build('build:1', stage_idx: 0)
+ create_build('build:2', stage_idx: 0)
+ create_build('test:1', stage_idx: 1)
+ create_build('test:2', stage_idx: 1)
+ create_build('deploy:1', stage_idx: 2)
+ create_build('deploy:2', stage_idx: 2)
+ end
+
+ it 'does trigger builds in the next stage' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build:1', 'build:2']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
+
+ pipeline.builds.find_by(name: 'test:1').success!
+ pipeline.builds.find_by(name: 'test:2').drop!
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
+
+ Ci::Build.retry(pipeline.builds.find_by(name: 'test:2'), user).reset.success!
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2',
+ 'test:2', 'deploy:1', 'deploy:2']
+ end
+ end
+ end
+
+ context 'when builds with auto-retries are configured', :sidekiq_inline do
+ before do
+ create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 })
+ create_build('test:1', stage_idx: 1, user: user, when: :on_failure)
+ create_build('test:2', stage_idx: 1, user: user, options: { script: 'aa', retry: 1 })
+ end
+
+ it 'automatically retries builds in a valid order' do
+ expect(process_pipeline).to be_truthy
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1]
+ expect(builds_statuses).to eq %w[failed pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1 test:2]
+ expect(builds_statuses).to eq %w[failed success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1 test:2]
+ expect(builds_statuses).to eq %w[failed success success]
+
+ expect(pipeline.reload).to be_success
+ end
+ end
+
+ context 'when pipeline with needs is created', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
+ let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
+ let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
+ let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1) }
+ let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1) }
+ let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
+
+ let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
+ let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') }
+
+ let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') }
+ let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') }
+
+ it 'when linux:* finishes first it runs it out of order' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+
+ # we follow the single path of linux
+ linux_build.reset.success!
+
+ expect(stages).to eq(%w(running pending created))
+ expect(builds.success).to contain_exactly(linux_build)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
+
+ linux_rspec.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rubocop)
+
+ linux_rubocop.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop)
+ expect(builds.pending).to contain_exactly(mac_build)
+
+ mac_build.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success pending))
+ expect(builds.success).to contain_exactly(
+ linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+
+ context 'when feature ci_dag_support is disabled' do
+ before do
+ stub_feature_flags(ci_dag_support: false)
+ end
+
+ it 'when linux:build finishes first it follows stages' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+
+ # we follow the single path of linux
+ linux_build.reset.success!
+
+ expect(stages).to eq(%w(running created created))
+ expect(builds.success).to contain_exactly(linux_build)
+ expect(builds.pending).to contain_exactly(mac_build)
+
+ mac_build.reset.success!
+
+ expect(stages).to eq(%w(success pending created))
+ expect(builds.success).to contain_exactly(linux_build, mac_build)
+ expect(builds.pending).to contain_exactly(
+ linux_rspec, linux_rubocop, mac_rspec, mac_rubocop)
+
+ linux_rspec.reset.success!
+ linux_rubocop.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success pending))
+ expect(builds.success).to contain_exactly(
+ linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+ end
+
+ context 'when one of the jobs is run on a failure' do
+ let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure') }
+
+ let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
+
+ context 'when another job in build phase fails first' do
+ context 'when ci_dag_support is enabled' do
+ it 'does skip linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ mac_build.reset.drop!
+ linux_build.reset.success!
+
+ expect(linux_notify.reset).to be_skipped
+ end
+ end
+
+ context 'when ci_dag_support is disabled' do
+ before do
+ stub_feature_flags(ci_dag_support: false)
+ end
+
+ it 'does run linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ mac_build.reset.drop!
+ linux_build.reset.success!
+
+ expect(linux_notify.reset).to be_pending
+ end
+ end
+ end
+
+ context 'when linux:build job fails first' do
+ it 'does run linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ linux_build.reset.drop!
+
+ expect(linux_notify.reset).to be_pending
+ end
+ end
+ end
+ end
+
+ def process_pipeline
+ described_class.new(pipeline).execute
+ end
+
+ def all_builds
+ pipeline.builds.order(:stage_idx, :id)
+ end
+
+ def builds
+ all_builds.where.not(status: [:created, :skipped])
+ end
+
+ def stages
+ pipeline.reset.stages.map(&:status)
+ end
+
+ def builds_names
+ builds.pluck(:name)
+ end
+
+ def builds_names_and_statuses
+ builds.each_with_object({}) do |b, h|
+ h[b.name.to_sym] = b.status
+ h
+ end
+ end
+
+ def all_builds_names
+ all_builds.pluck(:name)
+ end
+
+ def builds_statuses
+ builds.pluck(:status)
+ end
+
+ def all_builds_statuses
+ all_builds.pluck(:status)
+ end
+
+ def succeed_pending
+ builds.pending.each do |build|
+ build.reset.success
+ end
+ end
+
+ def succeed_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.success
+ end
+ end
+
+ def fail_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.drop
+ end
+ end
+
+ def cancel_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.cancel
+ end
+ end
+
+ def play_manual_action(name)
+ builds.find_by(name: name).play(user)
+ end
+
+ def enqueue_scheduled(name)
+ builds.scheduled.find_by(name: name).enqueue_scheduled
+ end
+
+ def retry_build(name)
+ Ci::Build.retry(builds.find_by(name: name), user)
+ end
+
+ def manual_actions
+ pipeline.manual_actions.reload
+ end
+
+ def create_build(name, **opts)
+ create(:ci_build, :created, pipeline: pipeline, name: name, **with_stage_opts(opts))
+ end
+
+ def successful_build(name, **opts)
+ create(:ci_build, :success, pipeline: pipeline, name: name, **with_stage_opts(opts))
+ end
+
+ def with_stage_opts(opts)
+ { stage: "stage-#{opts[:stage_idx].to_i}" }.merge(opts)
+ end
+
+ def delayed_options
+ { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } }
+ end
+
+ def unschedule
+ pipeline.builds.scheduled.map(&:unschedule)
+ end
+end
diff --git a/spec/services/ci/prepare_build_service_spec.rb b/spec/services/ci/prepare_build_service_spec.rb
index 3c3d8b90bb0..02928b58ff8 100644
--- a/spec/services/ci/prepare_build_service_spec.rb
+++ b/spec/services/ci/prepare_build_service_spec.rb
@@ -14,7 +14,7 @@ describe Ci::PrepareBuildService do
shared_examples 'build enqueueing' do
it 'enqueues the build' do
- expect(build).to receive(:enqueue).once
+ expect(build).to receive(:enqueue_preparing).once
subject
end
@@ -34,7 +34,7 @@ describe Ci::PrepareBuildService do
context 'prerequisites fail to complete' do
before do
- allow(build).to receive(:enqueue).and_return(false)
+ allow(build).to receive(:enqueue_preparing).and_return(false)
end
it 'drops the build' do
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index ba5891c8694..40ae1c4029b 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Ci::ProcessPipelineService, '#execute' do
+describe Ci::ProcessPipelineService do
let(:user) { create(:user) }
let(:project) { create(:project) }
@@ -18,658 +18,6 @@ describe Ci::ProcessPipelineService, '#execute' do
project.add_developer(user)
end
- context 'when simple pipeline is defined' do
- before do
- create_build('linux', stage_idx: 0)
- create_build('mac', stage_idx: 0)
- create_build('rspec', stage_idx: 1)
- create_build('rubocop', stage_idx: 1)
- create_build('deploy', stage_idx: 2)
- end
-
- it 'processes a pipeline', :sidekiq_might_not_need_inline do
- expect(process_pipeline).to be_truthy
-
- succeed_pending
-
- expect(builds.success.count).to eq(2)
-
- succeed_pending
-
- expect(builds.success.count).to eq(4)
-
- succeed_pending
-
- expect(builds.success.count).to eq(5)
- end
-
- it 'does not process pipeline if existing stage is running' do
- expect(process_pipeline).to be_truthy
- expect(builds.pending.count).to eq(2)
-
- expect(process_pipeline).to be_falsey
- expect(builds.pending.count).to eq(2)
- end
- end
-
- context 'custom stage with first job allowed to fail' do
- before do
- create_build('clean_job', stage_idx: 0, allow_failure: true)
- create_build('test_job', stage_idx: 1, allow_failure: true)
- end
-
- it 'automatically triggers a next stage when build finishes', :sidekiq_might_not_need_inline do
- expect(process_pipeline).to be_truthy
- expect(builds_statuses).to eq ['pending']
-
- fail_running_or_pending
-
- expect(builds_statuses).to eq %w(failed pending)
-
- fail_running_or_pending
-
- expect(pipeline.reload).to be_success
- end
- end
-
- context 'when optional manual actions are defined', :sidekiq_might_not_need_inline do
- before do
- create_build('build', stage_idx: 0)
- create_build('test', stage_idx: 1)
- create_build('test_failure', stage_idx: 2, when: 'on_failure')
- create_build('deploy', stage_idx: 3)
- create_build('production', stage_idx: 3, when: 'manual', allow_failure: true)
- create_build('cleanup', stage_idx: 4, when: 'always')
- create_build('clear:cache', stage_idx: 4, when: 'manual', allow_failure: true)
- end
-
- context 'when builds are successful' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production)
- expect(builds_statuses).to eq %w(success success pending manual)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production cleanup clear:cache)
- expect(builds_statuses).to eq %w(success success success manual pending manual)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success success success manual success manual)
- expect(pipeline.reload.status).to eq 'success'
- end
- end
-
- context 'when test job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure)
- expect(builds_statuses).to eq %w(success failed pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed success pending)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success failed success success)
- expect(pipeline.reload.status).to eq 'failed'
- end
- end
-
- context 'when test and test_failure jobs fail' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure)
- expect(builds_statuses).to eq %w(success failed pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed failed pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed failed success)
- expect(pipeline.reload.status).to eq('failed')
- end
- end
-
- context 'when deploy job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production)
- expect(builds_statuses).to eq %w(success success pending manual)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production cleanup)
- expect(builds_statuses).to eq %w(success success failed manual pending)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success success failed manual success)
- expect(pipeline.reload).to be_failed
- end
- end
-
- context 'when build is canceled in the second stage' do
- it 'does not schedule builds after build has been canceled' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds.running_or_pending).not_to be_empty
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- cancel_running_or_pending
-
- expect(builds.running_or_pending).to be_empty
- expect(builds_names).to eq %w[build test]
- expect(builds_statuses).to eq %w[success canceled]
- expect(pipeline.reload).to be_canceled
- end
- end
-
- context 'when listing optional manual actions' do
- it 'returns only for skipped builds' do
- # currently all builds are created
- expect(process_pipeline).to be_truthy
- expect(manual_actions).to be_empty
-
- # succeed stage build
- succeed_running_or_pending
-
- expect(manual_actions).to be_empty
-
- # succeed stage test
- succeed_running_or_pending
-
- expect(manual_actions).to be_one # production
-
- # succeed stage deploy
- succeed_running_or_pending
-
- expect(manual_actions).to be_many # production and clear cache
- end
- end
- end
-
- context 'when delayed jobs are defined', :sidekiq_might_not_need_inline do
- context 'when the scene is timed incremental rollout' do
- before do
- create_build('build', stage_idx: 0)
- create_build('rollout10%', **delayed_options, stage_idx: 1)
- create_build('rollout100%', **delayed_options, stage_idx: 2)
- create_build('cleanup', stage_idx: 3)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- context 'when builds are successful' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- enqueue_scheduled('rollout10%')
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
-
- enqueue_scheduled('rollout100%')
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'success' })
- expect(pipeline.reload.status).to eq 'success'
- end
- end
-
- context 'when build job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'failed' })
- expect(pipeline.reload.status).to eq 'failed'
- end
- end
-
- context 'when rollout 10% is unscheduled' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- unschedule
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'manual' })
- expect(pipeline.reload.status).to eq 'manual'
- end
-
- context 'when user plays rollout 10%' do
- it 'schedules rollout100%' do
- process_pipeline
- succeed_pending
- unschedule
- play_manual_action('rollout10%')
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
- expect(pipeline.reload.status).to eq 'scheduled'
- end
- end
- end
-
- context 'when rollout 10% fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- enqueue_scheduled('rollout10%')
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'failed' })
- expect(pipeline.reload.status).to eq 'failed'
- end
-
- context 'when user retries rollout 10%' do
- it 'does not schedule rollout10% again' do
- process_pipeline
- succeed_pending
- enqueue_scheduled('rollout10%')
- fail_running_or_pending
- retry_build('rollout10%')
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
- end
-
- context 'when rollout 10% is played immidiately' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- play_manual_action('rollout10%')
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
- end
-
- context 'when only one scheduled job exists in a pipeline' do
- before do
- create_build('delayed', **delayed_options, stage_idx: 0)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
-
- expect(pipeline.reload.status).to eq 'scheduled'
- end
- end
-
- context 'when there are two delayed jobs in a stage' do
- before do
- create_build('delayed1', **delayed_options, stage_idx: 0)
- create_build('delayed2', **delayed_options, stage_idx: 0)
- create_build('job', stage_idx: 1)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'blocks the stage until all scheduled jobs finished' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' })
-
- enqueue_scheduled('delayed1')
-
- expect(builds_names_and_statuses).to eq({ 'delayed1': 'pending', 'delayed2': 'scheduled' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
-
- context 'when a delayed job is allowed to fail' do
- before do
- create_build('delayed', **delayed_options, allow_failure: true, stage_idx: 0)
- create_build('job', stage_idx: 1)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'blocks the stage and continues after it failed' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
-
- enqueue_scheduled('delayed')
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'delayed': 'failed', 'job': 'pending' })
- expect(pipeline.reload.status).to eq 'pending'
- end
- end
- end
-
- context 'when an exception is raised during a persistent ref creation' do
- before do
- successful_build('test', stage_idx: 0)
-
- allow_next_instance_of(Ci::PersistentRef) do |instance|
- allow(instance).to receive(:delete_refs) { raise ArgumentError }
- end
- end
-
- it 'process the pipeline' do
- expect { process_pipeline }.not_to raise_error
- end
- end
-
- context 'when there are manual action in earlier stages' do
- context 'when first stage has only optional manual actions' do
- before do
- create_build('build', stage_idx: 0, when: 'manual', allow_failure: true)
- create_build('check', stage_idx: 1)
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'starts from the second stage' do
- expect(all_builds_statuses).to eq %w[manual pending created]
- end
- end
-
- context 'when second stage has only optional manual actions' do
- before do
- create_build('check', stage_idx: 0)
- create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'skips second stage and continues on third stage', :sidekiq_might_not_need_inline do
- expect(all_builds_statuses).to eq(%w[pending created created])
-
- builds.first.success
-
- expect(all_builds_statuses).to eq(%w[success manual pending])
- end
- end
- end
-
- context 'when there are only manual actions in stages' do
- before do
- create_build('image', stage_idx: 0, when: 'manual', allow_failure: true)
- create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
- create_build('deploy', stage_idx: 2, when: 'manual')
- create_build('check', stage_idx: 3)
-
- process_pipeline
- end
-
- it 'processes all jobs until blocking actions encountered' do
- expect(all_builds_statuses).to eq(%w[manual manual manual created])
- expect(all_builds_names).to eq(%w[image build deploy check])
-
- expect(pipeline.reload).to be_blocked
- end
- end
-
- context 'when there is only one manual action' do
- before do
- create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true)
-
- process_pipeline
- end
-
- it 'skips the pipeline' do
- expect(pipeline.reload).to be_skipped
- end
-
- context 'when the action was played' do
- before do
- play_manual_action('deploy')
- end
-
- it 'queues the action and pipeline', :sidekiq_might_not_need_inline do
- expect(all_builds_statuses).to eq(%w[pending])
-
- expect(pipeline.reload).to be_pending
- end
- end
- end
-
- context 'when blocking manual actions are defined', :sidekiq_might_not_need_inline do
- before do
- create_build('code:test', stage_idx: 0)
- create_build('staging:deploy', stage_idx: 1, when: 'manual')
- create_build('staging:test', stage_idx: 2, when: 'on_success')
- create_build('production:deploy', stage_idx: 3, when: 'manual')
- create_build('production:test', stage_idx: 4, when: 'always')
- end
-
- context 'when first stage succeeds' do
- it 'blocks pipeline on stage with first manual action' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
- expect(pipeline.reload.status).to eq 'pending'
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy]
- expect(builds_statuses).to eq %w[success manual]
- expect(pipeline.reload).to be_manual
- end
- end
-
- context 'when first stage fails' do
- it 'does not take blocking action into account' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
- expect(pipeline.reload.status).to eq 'pending'
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w[code:test production:test]
- expect(builds_statuses).to eq %w[failed pending]
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w[failed success]
- expect(pipeline.reload).to be_failed
- end
- end
-
- context 'when pipeline is promoted sequentially up to the end' do
- before do
- # Users need ability to merge into a branch in order to trigger
- # protected manual actions.
- #
- create(:protected_branch, :developers_can_merge,
- name: 'master', project: project)
- end
-
- it 'properly processes entire pipeline' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy]
- expect(builds_statuses).to eq %w[success manual]
- expect(pipeline.reload).to be_manual
-
- play_manual_action('staging:deploy')
-
- expect(builds_statuses).to eq %w[success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test]
- expect(builds_statuses).to eq %w[success success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy]
- expect(builds_statuses).to eq %w[success success success manual]
-
- expect(pipeline.reload).to be_manual
- expect(pipeline.reload).to be_blocked
- expect(pipeline.reload).not_to be_active
- expect(pipeline.reload).not_to be_complete
-
- play_manual_action('production:deploy')
-
- expect(builds_statuses).to eq %w[success success success pending]
- expect(pipeline.reload).to be_running
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy production:test]
- expect(builds_statuses).to eq %w[success success success success pending]
- expect(pipeline.reload).to be_running
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy production:test]
- expect(builds_statuses).to eq %w[success success success success success]
- expect(pipeline.reload).to be_success
- end
- end
- end
-
- context 'when second stage has only on_failure jobs', :sidekiq_might_not_need_inline do
- before do
- create_build('check', stage_idx: 0)
- create_build('build', stage_idx: 1, when: 'on_failure')
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'skips second stage and continues on third stage' do
- expect(all_builds_statuses).to eq(%w[pending created created])
-
- builds.first.success
-
- expect(all_builds_statuses).to eq(%w[success skipped pending])
- end
- end
-
- context 'when failed build in the middle stage is retried', :sidekiq_might_not_need_inline do
- context 'when failed build is the only unsuccessful build in the stage' do
- before do
- create_build('build:1', stage_idx: 0)
- create_build('build:2', stage_idx: 0)
- create_build('test:1', stage_idx: 1)
- create_build('test:2', stage_idx: 1)
- create_build('deploy:1', stage_idx: 2)
- create_build('deploy:2', stage_idx: 2)
- end
-
- it 'does trigger builds in the next stage' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build:1', 'build:2']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
-
- pipeline.builds.find_by(name: 'test:1').success
- pipeline.builds.find_by(name: 'test:2').drop
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
-
- Ci::Build.retry(pipeline.builds.find_by(name: 'test:2'), user).success
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2',
- 'test:2', 'deploy:1', 'deploy:2']
- end
- end
- end
-
context 'updates a list of retried builds' do
subject { described_class.retried.order(:id) }
@@ -685,251 +33,15 @@ describe Ci::ProcessPipelineService, '#execute' do
end
end
- context 'when builds with auto-retries are configured', :sidekiq_might_not_need_inline do
- before do
- create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 })
- create_build('test:1', stage_idx: 1, user: user, when: :on_failure)
- create_build('test:2', stage_idx: 1, user: user, options: { script: 'aa', retry: 1 })
- end
-
- it 'automatically retries builds in a valid order' do
- expect(process_pipeline).to be_truthy
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1]
- expect(builds_statuses).to eq %w[failed pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1 test:2]
- expect(builds_statuses).to eq %w[failed success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1 test:2]
- expect(builds_statuses).to eq %w[failed success success]
-
- expect(pipeline.reload).to be_success
- end
- end
-
- context 'when pipeline with needs is created', :sidekiq_might_not_need_inline do
- let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
- let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
- let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
- let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1) }
- let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1) }
- let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1) }
- let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
-
- let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
- let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') }
-
- let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') }
- let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') }
-
- it 'when linux:* finishes first it runs it out of order' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(pending created created))
- expect(builds.pending).to contain_exactly(linux_build, mac_build)
-
- # we follow the single path of linux
- linux_build.reset.success!
-
- expect(stages).to eq(%w(running pending created))
- expect(builds.success).to contain_exactly(linux_build)
- expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
-
- linux_rspec.reset.success!
-
- expect(stages).to eq(%w(running running created))
- expect(builds.success).to contain_exactly(linux_build, linux_rspec)
- expect(builds.pending).to contain_exactly(mac_build, linux_rubocop)
-
- linux_rubocop.reset.success!
-
- expect(stages).to eq(%w(running running created))
- expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop)
- expect(builds.pending).to contain_exactly(mac_build)
-
- mac_build.reset.success!
- mac_rspec.reset.success!
- mac_rubocop.reset.success!
-
- expect(stages).to eq(%w(success success pending))
- expect(builds.success).to contain_exactly(
- linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
- expect(builds.pending).to contain_exactly(deploy)
- end
-
- context 'when feature ci_dag_support is disabled' do
- before do
- stub_feature_flags(ci_dag_support: false)
- end
-
- it 'when linux:build finishes first it follows stages' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(pending created created))
- expect(builds.pending).to contain_exactly(linux_build, mac_build)
-
- # we follow the single path of linux
- linux_build.reset.success!
-
- expect(stages).to eq(%w(running created created))
- expect(builds.success).to contain_exactly(linux_build)
- expect(builds.pending).to contain_exactly(mac_build)
-
- mac_build.reset.success!
-
- expect(stages).to eq(%w(success pending created))
- expect(builds.success).to contain_exactly(linux_build, mac_build)
- expect(builds.pending).to contain_exactly(
- linux_rspec, linux_rubocop, mac_rspec, mac_rubocop)
-
- linux_rspec.reset.success!
- linux_rubocop.reset.success!
- mac_rspec.reset.success!
- mac_rubocop.reset.success!
-
- expect(stages).to eq(%w(success success pending))
- expect(builds.success).to contain_exactly(
- linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
- expect(builds.pending).to contain_exactly(deploy)
- end
- end
-
- context 'when one of the jobs is run on a failure' do
- let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure') }
-
- let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
-
- context 'when another job in build phase fails first' do
- context 'when ci_dag_support is enabled' do
- it 'does skip linux:notify' do
- expect(process_pipeline).to be_truthy
-
- mac_build.reset.drop!
- linux_build.reset.success!
-
- expect(linux_notify.reset).to be_skipped
- end
- end
-
- context 'when ci_dag_support is disabled' do
- before do
- stub_feature_flags(ci_dag_support: false)
- end
-
- it 'does run linux:notify' do
- expect(process_pipeline).to be_truthy
-
- mac_build.reset.drop!
- linux_build.reset.success!
-
- expect(linux_notify.reset).to be_pending
- end
- end
- end
-
- context 'when linux:build job fails first' do
- it 'does run linux:notify' do
- expect(process_pipeline).to be_truthy
-
- linux_build.reset.drop!
-
- expect(linux_notify.reset).to be_pending
- end
- end
- end
- end
-
def process_pipeline
described_class.new(pipeline).execute
end
- def all_builds
- pipeline.builds.order(:stage_idx, :id)
- end
-
- def builds
- all_builds.where.not(status: [:created, :skipped])
- end
-
- def stages
- pipeline.reset.stages.map(&:status)
- end
-
- def builds_names
- builds.pluck(:name)
- end
-
- def builds_names_and_statuses
- builds.each_with_object({}) do |b, h|
- h[b.name.to_sym] = b.status
- h
- end
- end
-
- def all_builds_names
- all_builds.pluck(:name)
- end
-
- def builds_statuses
- builds.pluck(:status)
- end
-
- def all_builds_statuses
- all_builds.pluck(:status)
- end
-
- def succeed_pending
- builds.pending.map(&:success)
- end
-
- def succeed_running_or_pending
- pipeline.builds.running_or_pending.each(&:success)
- end
-
- def fail_running_or_pending
- pipeline.builds.running_or_pending.each(&:drop)
- end
-
- def cancel_running_or_pending
- pipeline.builds.running_or_pending.each(&:cancel)
- end
-
- def play_manual_action(name)
- builds.find_by(name: name).play(user)
- end
-
- def enqueue_scheduled(name)
- builds.scheduled.find_by(name: name).enqueue
- end
-
- def retry_build(name)
- Ci::Build.retry(builds.find_by(name: name), user)
- end
-
- def manual_actions
- pipeline.manual_actions.reload
- end
-
def create_build(name, **opts)
create(:ci_build, :created, pipeline: pipeline, name: name, **opts)
end
- def successful_build(name, **opts)
- create(:ci_build, :success, pipeline: pipeline, name: name, **opts)
- end
-
- def delayed_options
- { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } }
- end
-
- def unschedule
- pipeline.builds.scheduled.map(&:unschedule)
+ def all_builds
+ pipeline.builds.order(:stage_idx, :id)
end
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 0339c6cc2d6..0f2d994efd4 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -4,9 +4,9 @@ require 'spec_helper'
module Ci
describe RegisterJobService do
- set(:group) { create(:group) }
- set(:project) { create(:project, group: group, shared_runners_enabled: false, group_runners_enabled: false) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project, reload: true) { create(:project, group: group, shared_runners_enabled: false, group_runners_enabled: false) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let!(:shared_runner) { create(:ci_runner, :instance) }
let!(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
diff --git a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
new file mode 100644
index 00000000000..50d312647ae
--- /dev/null
+++ b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::ResourceGroups::AssignResourceFromResourceGroupService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let(:service) { described_class.new(project, user) }
+
+ describe '#execute' do
+ subject { service.execute(resource_group) }
+
+ let(:resource_group) { create(:ci_resource_group, project: project) }
+ let!(:build) { create(:ci_build, :waiting_for_resource, project: project, user: user, resource_group: resource_group) }
+
+ context 'when there is an available resource' do
+ it 'requests resource' do
+ subject
+
+ expect(build.reload).to be_pending
+ expect(build.resource).to be_present
+ end
+
+ context 'when failed to request resource' do
+ before do
+ allow_next_instance_of(Ci::Build) do |build|
+ allow(build).to receive(:enqueue_waiting_for_resource) { false }
+ end
+ end
+
+ it 'has a build waiting for resource' do
+ subject
+
+ expect(build).to be_waiting_for_resource
+ end
+ end
+
+ context 'when the build has already retained a resource' do
+ before do
+ resource_group.assign_resource_to(build)
+ build.update_column(:status, :pending)
+ end
+
+ it 'has a pending build' do
+ subject
+
+ expect(build).to be_pending
+ end
+ end
+ end
+
+ context 'when there are no available resources' do
+ before do
+ resource_group.assign_resource_to(create(:ci_build))
+ end
+
+ it 'does not request resource' do
+ expect_any_instance_of(Ci::Build).not_to receive(:enqueue_waiting_for_resource)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index b1368f7776b..b3189974440 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -3,9 +3,12 @@
require 'spec_helper'
describe Ci::RetryBuildService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:pipeline) do
+ create(:ci_pipeline, project: project,
+ sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0')
+ end
let(:stage) do
create(:ci_stage_entity, project: project,
@@ -29,9 +32,11 @@ describe Ci::RetryBuildService do
job_artifacts_metadata job_artifacts_trace job_artifacts_junit
job_artifacts_sast job_artifacts_dependency_scanning
job_artifacts_container_scanning job_artifacts_dast
- job_artifacts_license_management job_artifacts_performance
+ job_artifacts_license_management job_artifacts_license_scanning
+ job_artifacts_performance
job_artifacts_codequality job_artifacts_metrics scheduled_at
- job_variables].freeze
+ job_variables waiting_for_resource_at job_artifacts_metrics_referee
+ job_artifacts_network_referee].freeze
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
@@ -40,14 +45,15 @@ describe Ci::RetryBuildService do
user_id auto_canceled_by_id retried failure_reason
sourced_pipelines artifacts_file_store artifacts_metadata_store
metadata runner_session trace_chunks upstream_pipeline_id
- artifacts_file artifacts_metadata artifacts_size commands].freeze
+ artifacts_file artifacts_metadata artifacts_size commands
+ resource resource_group_id processed].freeze
shared_examples 'build duplication' do
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
let(:build) do
create(:ci_build, :failed, :expired, :erased, :queued, :coverage, :tags,
- :allowed_to_fail, :on_tag, :triggered, :teardown_environment,
+ :allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group,
description: 'my-job', stage: 'test', stage_id: stage.id,
pipeline: pipeline, auto_canceled_by: another_pipeline,
scheduled_at: 10.seconds.since)
@@ -197,17 +203,19 @@ describe Ci::RetryBuildService do
it 'does not enqueue the new build' do
expect(new_build).to be_created
+ expect(new_build).not_to be_processed
end
- it 'does mark old build as retried in the database and on the instance' do
+ it 'does mark old build as retried' do
expect(new_build).to be_latest
expect(build).to be_retried
- expect(build.reload).to be_retried
+ expect(build).to be_processed
end
context 'when build with deployment is retried' do
let!(:build) do
- create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline, stage_id: stage.id)
+ create(:ci_build, :with_deployment, :deploy_to_production,
+ pipeline: pipeline, stage_id: stage.id, project: project)
end
it 'creates a new deployment' do
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 4b949761b8f..e7a241ed335 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -330,7 +330,7 @@ describe Ci::RetryPipelineService, '#execute' do
stage: "stage_#{stage_num}",
stage_idx: stage_num,
pipeline: pipeline, **opts) do |build|
- pipeline.update_status
+ pipeline.update_legacy_status
end
end
end
diff --git a/spec/services/ci/run_scheduled_build_service_spec.rb b/spec/services/ci/run_scheduled_build_service_spec.rb
index ab8d9f4ba2e..43d110cbc8f 100644
--- a/spec/services/ci/run_scheduled_build_service_spec.rb
+++ b/spec/services/ci/run_scheduled_build_service_spec.rb
@@ -26,6 +26,18 @@ describe Ci::RunScheduledBuildService do
expect(build).to be_pending
end
+
+ context 'when build requires resource' do
+ let(:resource_group) { create(:ci_resource_group, project: project) }
+
+ before do
+ build.update!(resource_group: resource_group)
+ end
+
+ it 'transits to waiting for resource status' do
+ expect { subject }.to change { build.status }.from('scheduled').to('waiting_for_resource')
+ end
+ end
end
context 'when scheduled_at is not expired' do
diff --git a/spec/services/clusters/applications/check_installation_progress_service_spec.rb b/spec/services/clusters/applications/check_installation_progress_service_spec.rb
index 7b37eb97800..2f224d40920 100644
--- a/spec/services/clusters/applications/check_installation_progress_service_spec.rb
+++ b/spec/services/clusters/applications/check_installation_progress_service_spec.rb
@@ -160,6 +160,12 @@ describe Clusters::Applications::CheckInstallationProgressService, '#execute' do
expect(application).to be_installed
expect(application.status_reason).to be_nil
end
+
+ it 'tracks application install' do
+ expect(Gitlab::Tracking).to receive(:event).with('cluster:applications', "cluster_application_helm_installed")
+
+ service.execute
+ end
end
context 'when installation POD failed' do
diff --git a/spec/services/clusters/applications/create_service_spec.rb b/spec/services/clusters/applications/create_service_spec.rb
index bdacb9ce071..f62af86f1bf 100644
--- a/spec/services/clusters/applications/create_service_spec.rb
+++ b/spec/services/clusters/applications/create_service_spec.rb
@@ -47,6 +47,33 @@ describe Clusters::Applications::CreateService do
create(:clusters_applications_helm, :installed, cluster: cluster)
end
+ context 'ingress application' do
+ let(:params) do
+ {
+ application: 'ingress',
+ modsecurity_enabled: true
+ }
+ end
+
+ before do
+ expect_any_instance_of(Clusters::Applications::Ingress)
+ .to receive(:make_scheduled!)
+ .and_call_original
+ end
+
+ it 'creates the application' do
+ expect do
+ subject
+
+ cluster.reload
+ end.to change(cluster, :application_ingress)
+ end
+
+ it 'sets modsecurity_enabled' do
+ expect(subject.modsecurity_enabled).to eq(true)
+ end
+ end
+
context 'cert manager application' do
let(:params) do
{
@@ -136,8 +163,7 @@ describe Clusters::Applications::CreateService do
context 'elastic stack application' do
let(:params) do
{
- application: 'elastic_stack',
- kibana_hostname: 'example.com'
+ application: 'elastic_stack'
}
end
@@ -155,10 +181,6 @@ describe Clusters::Applications::CreateService do
cluster.reload
end.to change(cluster, :application_elastic_stack)
end
-
- it 'sets the kibana_hostname' do
- expect(subject.kibana_hostname).to eq('example.com')
- end
end
end
diff --git a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
index bd1a90996a8..3982d2310d8 100644
--- a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
@@ -57,11 +57,21 @@ describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute' do
end.to change(Clusters::KubernetesNamespace, :count).by(1)
end
- it 'creates project service account' do
- expect_next_instance_of(Clusters::Kubernetes::CreateOrUpdateServiceAccountService) do |instance|
- expect(instance).to receive(:execute).once
- end
-
+ it 'creates project service account and namespace' do
+ account_service = double(Clusters::Kubernetes::CreateOrUpdateServiceAccountService)
+ expect(Clusters::Kubernetes::CreateOrUpdateServiceAccountService).to(
+ receive(:namespace_creator).with(
+ cluster.platform.kubeclient,
+ service_account_name: "#{namespace}-service-account",
+ service_account_namespace: namespace,
+ service_account_namespace_labels: {
+ 'app.gitlab.com/app' => project.full_path_slug,
+ 'app.gitlab.com/env' => environment.slug
+ },
+ rbac: true
+ ).and_return(account_service)
+ )
+ expect(account_service).to receive(:execute).once
subject
end
@@ -73,6 +83,29 @@ describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute' do
expect(kubernetes_namespace.service_account_name).to eq("#{namespace}-service-account")
expect(kubernetes_namespace.encrypted_service_account_token).to be_present
end
+
+ context 'without environment' do
+ before do
+ kubernetes_namespace.environment = nil
+ end
+
+ it 'creates project service account and namespace' do
+ account_service = double(Clusters::Kubernetes::CreateOrUpdateServiceAccountService)
+ expect(Clusters::Kubernetes::CreateOrUpdateServiceAccountService).to(
+ receive(:namespace_creator).with(
+ cluster.platform.kubeclient,
+ service_account_name: "#{namespace}-service-account",
+ service_account_namespace: namespace,
+ service_account_namespace_labels: {
+ 'app.gitlab.com/app' => project.full_path_slug
+ },
+ rbac: true
+ ).and_return(account_service)
+ )
+ expect(account_service).to receive(:execute).once
+ subject
+ end
+ end
end
context 'group clusters' do
diff --git a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
index 4df73fcc2ae..8fa22422074 100644
--- a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
@@ -116,6 +116,7 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
describe '.namespace_creator' do
let(:namespace) { "#{project.path}-#{project.id}" }
+ let(:namespace_labels) { { app: project.full_path_slug, env: "staging" } }
let(:service_account_name) { "#{namespace}-service-account" }
let(:token_name) { "#{namespace}-token" }
@@ -124,6 +125,7 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
kubeclient,
service_account_name: service_account_name,
service_account_namespace: namespace,
+ service_account_namespace_labels: namespace_labels,
rbac: rbac
).execute
end
@@ -149,6 +151,16 @@ describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME, namespace: namespace)
end
+ it 'creates a namespace object' do
+ kubernetes_namespace = double(Gitlab::Kubernetes::Namespace)
+ expect(Gitlab::Kubernetes::Namespace).to(
+ receive(:new).with(namespace, kubeclient, labels: namespace_labels).and_return(kubernetes_namespace)
+ )
+ expect(kubernetes_namespace).to receive(:ensure_exists!)
+
+ subject
+ end
+
it_behaves_like 'creates service account and token'
it 'creates a namespaced role binding with edit access' do
diff --git a/spec/services/container_expiration_policy_service_spec.rb b/spec/services/container_expiration_policy_service_spec.rb
new file mode 100644
index 00000000000..1e4899c627f
--- /dev/null
+++ b/spec/services/container_expiration_policy_service_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ContainerExpirationPolicyService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
+ let(:project) { container_expiration_policy.project }
+ let(:container_repository) { create(:container_repository, project: project) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe '#execute' do
+ subject { described_class.new(project, user).execute(container_expiration_policy) }
+
+ it 'kicks off a cleanup worker for the container repository' do
+ expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
+ .with(user.id, container_repository.id, anything)
+
+ subject
+ end
+
+ it 'sets next_run_at on the container_expiration_policy' do
+ subject
+
+ expect(container_expiration_policy.next_run_at).to be > Time.zone.now
+ end
+ end
+end
diff --git a/spec/services/create_snippet_service_spec.rb b/spec/services/create_snippet_service_spec.rb
deleted file mode 100644
index 1751029a78c..00000000000
--- a/spec/services/create_snippet_service_spec.rb
+++ /dev/null
@@ -1,117 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe CreateSnippetService do
- let(:user) { create(:user) }
- let(:admin) { create(:user, :admin) }
- let(:opts) { base_opts.merge(extra_opts) }
- let(:base_opts) do
- {
- title: 'Test snippet',
- file_name: 'snippet.rb',
- content: 'puts "hello world"',
- visibility_level: Gitlab::VisibilityLevel::PRIVATE
- }
- end
- let(:extra_opts) { {} }
-
- context 'When public visibility is restricted' do
- let(:extra_opts) { { visibility_level: Gitlab::VisibilityLevel::PUBLIC } }
-
- before do
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
- end
-
- it 'non-admins are not able to create a public snippet' do
- snippet = create_snippet(nil, user, opts)
- expect(snippet.errors.messages).to have_key(:visibility_level)
- expect(snippet.errors.messages[:visibility_level].first).to(
- match('has been restricted')
- )
- end
-
- it 'admins are able to create a public snippet' do
- snippet = create_snippet(nil, admin, opts)
- expect(snippet.errors.any?).to be_falsey
- expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
- end
-
- describe "when visibility level is passed as a string" do
- let(:extra_opts) { { visibility: 'internal' } }
-
- before do
- base_opts.delete(:visibility_level)
- end
-
- it "assigns the correct visibility level" do
- snippet = create_snippet(nil, user, opts)
- expect(snippet.errors.any?).to be_falsey
- expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
- end
- end
-
- context 'checking spam' do
- shared_examples 'marked as spam' do
- let(:snippet) { create_snippet(nil, admin, opts) }
-
- it 'marks a snippet as a spam ' do
- expect(snippet).to be_spam
- end
-
- it 'invalidates the snippet' do
- expect(snippet).to be_invalid
- end
-
- it 'creates a new spam_log' do
- expect { snippet }
- .to log_spam(title: snippet.title, noteable_type: 'PersonalSnippet')
- end
-
- it 'assigns a spam_log to an issue' do
- expect(snippet.spam_log).to eq(SpamLog.last)
- end
- end
-
- let(:extra_opts) do
- { visibility_level: Gitlab::VisibilityLevel::PUBLIC, request: double(:request, env: {}) }
- end
-
- before do
- expect_next_instance_of(AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
- end
- end
-
- [true, false, nil].each do |allow_possible_spam|
- context "when recaptcha_disabled flag is #{allow_possible_spam.inspect}" do
- before do
- stub_feature_flags(allow_possible_spam: allow_possible_spam) unless allow_possible_spam.nil?
- end
-
- it_behaves_like 'marked as spam'
- end
- end
- end
-
- describe 'usage counter' do
- let(:counter) { Gitlab::UsageDataCounters::SnippetCounter }
-
- it 'increments count' do
- expect do
- create_snippet(nil, admin, opts)
- end.to change { counter.read(:create) }.by 1
- end
-
- it 'does not increment count if create fails' do
- expect do
- create_snippet(nil, admin, {})
- end.not_to change { counter.read(:create) }
- end
- end
-
- def create_snippet(project, user, opts)
- CreateSnippetService.new(project, user, opts).execute
- end
-end
diff --git a/spec/services/deployments/after_create_service_spec.rb b/spec/services/deployments/after_create_service_spec.rb
index 4ca96658db0..51c6de2c0b9 100644
--- a/spec/services/deployments/after_create_service_spec.rb
+++ b/spec/services/deployments/after_create_service_spec.rb
@@ -6,10 +6,18 @@ describe Deployments::AfterCreateService do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:options) { { name: 'production' } }
+ let(:pipeline) do
+ create(
+ :ci_pipeline,
+ sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
+ project: project
+ )
+ end
let(:job) do
create(:ci_build,
:with_deployment,
+ pipeline: pipeline,
ref: 'master',
tag: false,
environment: 'production',
@@ -53,14 +61,6 @@ describe Deployments::AfterCreateService do
service.execute
end
- it 'links merge requests to deployment' do
- expect_next_instance_of(Deployments::LinkMergeRequestsService, deployment) do |link_mr_service|
- expect(link_mr_service).to receive(:execute)
- end
-
- service.execute
- end
-
it 'returns the deployment' do
expect(subject.execute).to eq(deployment)
end
@@ -139,6 +139,7 @@ describe Deployments::AfterCreateService do
let(:job) do
create(:ci_build,
:with_deployment,
+ pipeline: pipeline,
ref: 'master',
environment: 'production',
project: project,
@@ -152,6 +153,7 @@ describe Deployments::AfterCreateService do
let(:job) do
create(:ci_build,
:with_deployment,
+ pipeline: pipeline,
ref: 'master',
environment: 'prod-slug',
project: project,
@@ -165,6 +167,7 @@ describe Deployments::AfterCreateService do
let(:job) do
create(:ci_build,
:with_deployment,
+ pipeline: pipeline,
yaml_variables: [{ key: :APP_HOST, value: 'host' }],
environment: 'production',
project: project,
@@ -175,7 +178,7 @@ describe Deployments::AfterCreateService do
end
context 'when yaml environment does not have url' do
- let(:job) { create(:ci_build, :with_deployment, environment: 'staging', project: project) }
+ let(:job) { create(:ci_build, :with_deployment, pipeline: pipeline, environment: 'staging', project: project) }
it 'returns the external_url from persisted environment' do
is_expected.to be_nil
@@ -202,6 +205,7 @@ describe Deployments::AfterCreateService do
let(:job) do
create(:ci_build,
:with_deployment,
+ pipeline: pipeline,
ref: 'master',
tag: false,
environment: 'staging',
@@ -260,30 +264,4 @@ describe Deployments::AfterCreateService do
end
end
end
-
- describe '#update_environment' do
- it 'links the merge requests' do
- double = instance_double(Deployments::LinkMergeRequestsService)
-
- allow(Deployments::LinkMergeRequestsService)
- .to receive(:new)
- .with(deployment)
- .and_return(double)
-
- expect(double).to receive(:execute)
-
- service.update_environment(deployment)
- end
-
- context 'when the tracking of merge requests is disabled' do
- it 'does nothing' do
- stub_feature_flags(deployment_merge_requests: false)
-
- expect(Deployments::LinkMergeRequestsService)
- .not_to receive(:new)
-
- service.update_environment(deployment)
- end
- end
- end
end
diff --git a/spec/services/deployments/link_merge_requests_service_spec.rb b/spec/services/deployments/link_merge_requests_service_spec.rb
index ba069658dfd..307fe22a192 100644
--- a/spec/services/deployments/link_merge_requests_service_spec.rb
+++ b/spec/services/deployments/link_merge_requests_service_spec.rb
@@ -3,10 +3,15 @@
require 'spec_helper'
describe Deployments::LinkMergeRequestsService do
+ let(:project) { create(:project, :repository) }
+
describe '#execute' do
- context 'when the deployment did not succeed' do
+ context 'when the deployment is for a review environment' do
it 'does nothing' do
- deploy = create(:deployment, :failed)
+ environment =
+ create(:environment, environment_type: 'review', name: 'review/foo')
+
+ deploy = create(:deployment, :success, environment: environment)
expect(deploy).not_to receive(:link_merge_requests)
@@ -16,20 +21,29 @@ describe Deployments::LinkMergeRequestsService do
context 'when there is a previous deployment' do
it 'links all merge requests merged since the previous deployment' do
- deploy1 = create(:deployment, :success, sha: 'foo')
+ deploy1 = create(
+ :deployment,
+ :success,
+ project: project,
+ sha: '7975be0116940bf2ad4321f79d02a55c5f7779aa'
+ )
+
deploy2 = create(
:deployment,
:success,
- sha: 'bar',
project: deploy1.project,
- environment: deploy1.environment
+ environment: deploy1.environment,
+ sha: 'ddd0f15ae83993f5cb66a927a28673882e99100b'
)
service = described_class.new(deploy2)
expect(service)
.to receive(:link_merge_requests_for_range)
- .with('foo', 'bar')
+ .with(
+ '7975be0116940bf2ad4321f79d02a55c5f7779aa',
+ 'ddd0f15ae83993f5cb66a927a28673882e99100b'
+ )
service.execute
end
@@ -37,7 +51,7 @@ describe Deployments::LinkMergeRequestsService do
context 'when there are no previous deployments' do
it 'links all merged merge requests' do
- deploy = create(:deployment, :success)
+ deploy = create(:deployment, :success, project: project)
service = described_class.new(deploy)
expect(service).to receive(:link_all_merged_merge_requests)
@@ -49,7 +63,6 @@ describe Deployments::LinkMergeRequestsService do
describe '#link_merge_requests_for_range' do
it 'links merge requests' do
- project = create(:project, :repository)
environment = create(:environment, project: project)
deploy =
create(:deployment, :success, project: project, environment: environment)
@@ -81,7 +94,6 @@ describe Deployments::LinkMergeRequestsService do
describe '#link_all_merged_merge_requests' do
it 'links all merged merge requests targeting the deployed branch' do
- project = create(:project, :repository)
environment = create(:environment, project: project)
deploy =
create(:deployment, :success, project: project, environment: environment)
diff --git a/spec/services/error_tracking/list_issues_service_spec.rb b/spec/services/error_tracking/list_issues_service_spec.rb
index e0e280591cd..ecb6bcc541b 100644
--- a/spec/services/error_tracking/list_issues_service_spec.rb
+++ b/spec/services/error_tracking/list_issues_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe ErrorTracking::ListIssuesService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
let(:params) { { search_term: 'something', sort: 'last_seen', cursor: 'some-cursor' } }
let(:list_sentry_issues_args) do
{
diff --git a/spec/services/error_tracking/list_projects_service_spec.rb b/spec/services/error_tracking/list_projects_service_spec.rb
index cd4b835e097..ddd369d45f2 100644
--- a/spec/services/error_tracking/list_projects_service_spec.rb
+++ b/spec/services/error_tracking/list_projects_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe ErrorTracking::ListProjectsService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
let(:token) { 'test-token' }
diff --git a/spec/services/external_pull_requests/create_pipeline_service_spec.rb b/spec/services/external_pull_requests/create_pipeline_service_spec.rb
index a4da5b38b97..d1893960960 100644
--- a/spec/services/external_pull_requests/create_pipeline_service_spec.rb
+++ b/spec/services/external_pull_requests/create_pipeline_service_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
describe ExternalPullRequests::CreatePipelineService do
describe '#execute' do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :auto_devops, :repository) }
+ let_it_be(:user) { create(:user) }
let(:pull_request) { create(:external_pull_request, project: project) }
before do
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 19d7b84a3ce..4d7ec7ac1d8 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
describe Git::BranchPushService, services: true do
include RepoHelpers
- set(:user) { create(:user) }
- set(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
let(:blankrev) { Gitlab::Git::BLANK_SHA }
let(:oldrev) { sample_commit.parent_id }
let(:newrev) { sample_commit.id }
@@ -108,7 +108,7 @@ describe Git::BranchPushService, services: true do
end
it 'reports an error' do
- allow(Sidekiq).to receive(:server?).and_return(true)
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
expect(Sidekiq.logger).to receive(:warn)
expect { subject }.not_to change { Ci::Pipeline.count }
diff --git a/spec/services/groups/auto_devops_service_spec.rb b/spec/services/groups/auto_devops_service_spec.rb
index 7591b2f6f12..63fbdc70c1b 100644
--- a/spec/services/groups/auto_devops_service_spec.rb
+++ b/spec/services/groups/auto_devops_service_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
describe Groups::AutoDevopsService, '#execute' do
- set(:group) { create(:group) }
- set(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
let(:group_params) { { auto_devops_enabled: '0' } }
let(:service) { described_class.new(group, user, group_params) }
diff --git a/spec/services/issues/referenced_merge_requests_service_spec.rb b/spec/services/issues/referenced_merge_requests_service_spec.rb
index 61d1612829f..2c5af11d2e6 100644
--- a/spec/services/issues/referenced_merge_requests_service_spec.rb
+++ b/spec/services/issues/referenced_merge_requests_service_spec.rb
@@ -15,16 +15,16 @@ describe Issues::ReferencedMergeRequestsService do
end
end
- set(:user) { create(:user) }
- set(:project) { create(:project, :public, :repository) }
- set(:other_project) { create(:project, :public, :repository) }
- set(:issue) { create(:issue, author: user, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:other_project) { create(:project, :public, :repository) }
+ let_it_be(:issue) { create(:issue, author: user, project: project) }
- set(:closing_mr) { create_closing_mr(source_project: project) }
- set(:closing_mr_other_project) { create_closing_mr(source_project: other_project) }
+ let_it_be(:closing_mr) { create_closing_mr(source_project: project) }
+ let_it_be(:closing_mr_other_project) { create_closing_mr(source_project: other_project) }
- set(:referencing_mr) { create_referencing_mr(source_project: project, source_branch: 'csv') }
- set(:referencing_mr_other_project) { create_referencing_mr(source_project: other_project, source_branch: 'csv') }
+ let_it_be(:referencing_mr) { create_referencing_mr(source_project: project, source_branch: 'csv') }
+ let_it_be(:referencing_mr_other_project) { create_referencing_mr(source_project: other_project, source_branch: 'csv') }
let(:service) { described_class.new(project, user) }
diff --git a/spec/services/issues/reorder_service_spec.rb b/spec/services/issues/reorder_service_spec.rb
index b147cdf4e64..6d72d698b1d 100644
--- a/spec/services/issues/reorder_service_spec.rb
+++ b/spec/services/issues/reorder_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Issues::ReorderService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
shared_examples 'issues reorder service' do
context 'when reordering issues' do
diff --git a/spec/services/issues/zoom_link_service_spec.rb b/spec/services/issues/zoom_link_service_spec.rb
index f34d2a18552..3fb1eae361a 100644
--- a/spec/services/issues/zoom_link_service_spec.rb
+++ b/spec/services/issues/zoom_link_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Issues::ZoomLinkService do
- set(:user) { create(:user) }
- set(:issue) { create(:issue) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
let(:project) { issue.project }
let(:service) { described_class.new(issue, user) }
diff --git a/spec/services/merge_requests/conflicts/list_service_spec.rb b/spec/services/merge_requests/conflicts/list_service_spec.rb
index 68a9c0a8b86..13d69307084 100644
--- a/spec/services/merge_requests/conflicts/list_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/list_service_spec.rb
@@ -74,7 +74,9 @@ describe MergeRequests::Conflicts::ListService do
it 'returns a falsey value when the MR has a missing ref after a force push' do
merge_request = create_merge_request('conflict-resolvable')
service = conflicts_service(merge_request)
- allow_any_instance_of(Gitlab::GitalyClient::ConflictsService).to receive(:list_conflict_files).and_raise(GRPC::Unknown)
+ allow_next_instance_of(Gitlab::GitalyClient::ConflictsService) do |instance|
+ allow(instance).to receive(:list_conflict_files).and_raise(GRPC::Unknown)
+ end
expect(service.can_be_resolved_in_ui?).to be_falsey
end
diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb
index 7145cfe7897..3d58ecdd8cd 100644
--- a/spec/services/merge_requests/create_from_issue_service_spec.rb
+++ b/spec/services/merge_requests/create_from_issue_service_spec.rb
@@ -55,7 +55,9 @@ describe MergeRequests::CreateFromIssueService do
end
it 'creates the new_issue_branch system note when the branch could be created but the merge_request cannot be created', :sidekiq_might_not_need_inline do
- expect_any_instance_of(MergeRequest).to receive(:valid?).at_least(:once).and_return(false)
+ expect_next_instance_of(MergeRequest) do |instance|
+ expect(instance).to receive(:valid?).at_least(:once).and_return(false)
+ end
expect(SystemNoteService).to receive(:new_issue_branch).with(issue, project, user, issue.to_branch_name, branch_project: target_project)
diff --git a/spec/services/merge_requests/create_pipeline_service_spec.rb b/spec/services/merge_requests/create_pipeline_service_spec.rb
index 25f5c54a413..9eb28759061 100644
--- a/spec/services/merge_requests/create_pipeline_service_spec.rb
+++ b/spec/services/merge_requests/create_pipeline_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe MergeRequests::CreatePipelineService do
- set(:project) { create(:project, :repository) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project, user, params) }
let(:params) { {} }
diff --git a/spec/services/merge_requests/get_urls_service_spec.rb b/spec/services/merge_requests/get_urls_service_spec.rb
index dcb8c8080a1..bb8a1873dac 100644
--- a/spec/services/merge_requests/get_urls_service_spec.rb
+++ b/spec/services/merge_requests/get_urls_service_spec.rb
@@ -45,6 +45,13 @@ describe MergeRequests::GetUrlsService do
end
end
+ context 'when project is nil' do
+ let(:project) { nil }
+ let(:changes) { default_branch_changes }
+
+ it_behaves_like 'no_merge_request_url'
+ end
+
context 'pushing to default branch' do
let(:changes) { default_branch_changes }
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 61c8103353c..fa1a8f60256 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe MergeRequests::MergeService do
- set(:user) { create(:user) }
- set(:user2) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
let(:project) { merge_request.project }
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 77e38f1eb4c..5c26e32bb22 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -67,7 +67,7 @@ describe MergeRequests::MergeToRefService do
end
end
- set(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:merge_request) { create(:merge_request, :simple) }
let(:project) { merge_request.project }
@@ -214,7 +214,7 @@ describe MergeRequests::MergeToRefService do
end
describe 'cascading merge refs' do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:params) { { commit_message: 'Cascading merge', first_parent_ref: first_parent_ref, target_ref: target_ref, sha: merge_request.diff_head_sha } }
context 'when first merge happens' do
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 7f9c47d8670..420c8513c72 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -714,9 +714,9 @@ describe MergeRequests::PushOptionsHandlerService do
let(:exception) { StandardError.new('My standard error') }
def run_service_with_exception
- allow_any_instance_of(
- MergeRequests::BuildService
- ).to receive(:execute).and_raise(exception)
+ allow_next_instance_of(MergeRequests::BuildService) do |instance|
+ allow(instance).to receive(:execute).and_raise(exception)
+ end
service.execute
end
@@ -766,9 +766,9 @@ describe MergeRequests::PushOptionsHandlerService do
invalid_merge_request = MergeRequest.new
invalid_merge_request.errors.add(:base, 'my error')
- expect_any_instance_of(
- MergeRequests::CreateService
- ).to receive(:execute).and_return(invalid_merge_request)
+ expect_next_instance_of(MergeRequests::CreateService) do |instance|
+ expect(instance).to receive(:execute).and_return(invalid_merge_request)
+ end
service.execute
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index 9c535664c26..184f3f37339 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -15,6 +15,7 @@ describe MergeRequests::RebaseService do
end
let(:project) { merge_request.project }
let(:repository) { project.repository.raw }
+ let(:skip_ci) { false }
subject(:service) { described_class.new(project, user, {}) }
@@ -115,7 +116,7 @@ describe MergeRequests::RebaseService do
context 'valid params' do
shared_examples_for 'a service that can execute a successful rebase' do
before do
- service.execute(merge_request)
+ service.execute(merge_request, skip_ci: skip_ci)
end
it 'rebases source branch' do
@@ -155,6 +156,12 @@ describe MergeRequests::RebaseService do
it_behaves_like 'a service that can execute a successful rebase'
end
+ context 'when skip_ci flag is set' do
+ let(:skip_ci) { true }
+
+ it_behaves_like 'a service that can execute a successful rebase'
+ end
+
context 'fork' do
describe 'successful fork rebase' do
let(:forked_project) do
diff --git a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
new file mode 100644
index 00000000000..274d594fd68
--- /dev/null
+++ b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
@@ -0,0 +1,197 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memory_store_caching do
+ include MetricsDashboardHelpers
+
+ set(:user) { create(:user) }
+ set(:project) { create(:project, :repository) }
+ set(:environment) { create(:environment, project: project) }
+
+ describe '#execute' do
+ subject(:service_call) { described_class.new(project, user, params).execute }
+
+ let(:commit_message) { 'test' }
+ let(:branch) { "dashboard_new_branch" }
+ let(:dashboard) { 'config/prometheus/common_metrics.yml' }
+ let(:file_name) { 'custom_dashboard.yml' }
+ let(:params) do
+ {
+ dashboard: dashboard,
+ file_name: file_name,
+ commit_message: commit_message,
+ branch: branch
+ }
+ end
+
+ let(:dashboard_attrs) do
+ {
+ commit_message: commit_message,
+ branch_name: branch,
+ start_branch: project.default_branch,
+ encoding: 'text',
+ file_path: ".gitlab/dashboards/#{file_name}",
+ file_content: File.read(dashboard)
+ }
+ end
+
+ context 'user does not have push right to repository' do
+ it_behaves_like 'misconfigured dashboard service response', :forbidden, %q(You can't commit to this project)
+ end
+
+ context 'with rights to push to the repository' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'wrong target file extension' do
+ let(:file_name) { 'custom_dashboard.txt' }
+
+ it_behaves_like 'misconfigured dashboard service response', :bad_request, 'The file name should have a .yml extension'
+ end
+
+ context 'wrong source dashboard file' do
+ let(:dashboard) { 'config/prometheus/common_metrics_123.yml' }
+
+ it_behaves_like 'misconfigured dashboard service response', :not_found, 'Not found.'
+ end
+
+ context 'path traversal attack attempt' do
+ let(:dashboard) { 'config/prometheus/../database.yml' }
+
+ it_behaves_like 'misconfigured dashboard service response', :not_found, 'Not found.'
+ end
+
+ context 'path traversal attack attempt on target file' do
+ let(:file_name) { '../../custom_dashboard.yml' }
+ let(:dashboard_attrs) do
+ {
+ commit_message: commit_message,
+ branch_name: branch,
+ start_branch: project.default_branch,
+ encoding: 'text',
+ file_path: ".gitlab/dashboards/custom_dashboard.yml",
+ file_content: File.read(dashboard)
+ }
+ end
+
+ it 'strips target file name to safe value', :aggregate_failures do
+ service_instance = instance_double(::Files::CreateService)
+ expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
+ expect(service_instance).to receive(:execute).and_return(status: :success)
+
+ service_call
+ end
+ end
+
+ context 'valid parameters' do
+ it 'delegates commit creation to Files::CreateService', :aggregate_failures do
+ service_instance = instance_double(::Files::CreateService)
+ expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
+ expect(service_instance).to receive(:execute).and_return(status: :success)
+
+ service_call
+ end
+
+ context 'selected branch already exists' do
+ let(:branch) { 'existing_branch' }
+
+ before do
+ project.repository.add_branch(user, branch, 'master')
+ end
+
+ it_behaves_like 'misconfigured dashboard service response', :bad_request, "There was an error creating the dashboard, branch named: existing_branch already exists."
+
+ # temporary not available function for first iteration
+ # follow up issue https://gitlab.com/gitlab-org/gitlab/issues/196237 which
+ # require this feature
+ # it 'pass correct params to Files::CreateService', :aggregate_failures do
+ # project.repository.add_branch(user, branch, 'master')
+ #
+ # service_instance = instance_double(::Files::CreateService)
+ # expect(::Files::CreateService).to receive(:new).with(project, user, dashboard_attrs).and_return(service_instance)
+ # expect(service_instance).to receive(:execute).and_return(status: :success)
+ #
+ # service_call
+ # end
+ end
+
+ context 'blank branch name' do
+ let(:branch) { '' }
+
+ it_behaves_like 'misconfigured dashboard service response', :bad_request, 'There was an error creating the dashboard, branch name is invalid.'
+ end
+
+ context 'dashboard file already exists' do
+ let(:branch) { 'custom_dashboard' }
+
+ before do
+ Files::CreateService.new(
+ project,
+ user,
+ commit_message: 'Create custom dashboard custom_dashboard.yml',
+ branch_name: 'master',
+ start_branch: 'master',
+ file_path: ".gitlab/dashboards/custom_dashboard.yml",
+ file_content: File.read('config/prometheus/common_metrics.yml')
+ ).execute
+ end
+
+ it_behaves_like 'misconfigured dashboard service response', :bad_request, "A file with 'custom_dashboard.yml' already exists in custom_dashboard branch"
+ end
+
+ it 'extends dashboard template path to absolute url' do
+ allow(::Files::CreateService).to receive(:new).and_return(double(execute: { status: :success }))
+
+ expect(File).to receive(:read).with(Rails.root.join('config/prometheus/common_metrics.yml')).and_return('')
+
+ service_call
+ end
+
+ context 'Files::CreateService success' do
+ before do
+ allow(::Files::CreateService).to receive(:new).and_return(double(execute: { status: :success }))
+ end
+
+ it 'clears dashboards cache' do
+ expect(project.repository).to receive(:refresh_method_caches).with([:metrics_dashboard])
+
+ service_call
+ end
+
+ it 'returns success', :aggregate_failures do
+ result = service_call
+ dashboard_details = {
+ path: '.gitlab/dashboards/custom_dashboard.yml',
+ display_name: 'custom_dashboard.yml',
+ default: false,
+ system_dashboard: false
+ }
+
+ expect(result[:status]).to be :success
+ expect(result[:http_status]).to be :created
+ expect(result[:dashboard]).to match dashboard_details
+ end
+ end
+
+ context 'Files::CreateService fails' do
+ before do
+ allow(::Files::CreateService).to receive(:new).and_return(double(execute: { status: :error }))
+ end
+
+ it 'does NOT clear dashboards cache' do
+ expect(project.repository).not_to receive(:refresh_method_caches)
+
+ service_call
+ end
+
+ it 'returns error' do
+ result = service_call
+ expect(result[:status]).to be :error
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
index 53b7497ae21..744693dad15 100644
--- a/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Metrics::Dashboard::CustomMetricEmbedService do
include MetricsDashboardHelpers
- set(:project) { build(:project) }
- set(:user) { create(:user) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:project, reload: true) { build(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/services/metrics/dashboard/default_embed_service_spec.rb b/spec/services/metrics/dashboard/default_embed_service_spec.rb
index 803b9a93be7..741a9644905 100644
--- a/spec/services/metrics/dashboard/default_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/default_embed_service_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Metrics::Dashboard::DefaultEmbedService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:project) { build(:project) }
- set(:user) { create(:user) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:project) { build(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb b/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
index a0f7315f750..c1ce9818f21 100644
--- a/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Metrics::Dashboard::DynamicEmbedService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:project) { build(:project) }
- set(:user) { create(:user) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:project) { build(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/services/metrics/dashboard/project_dashboard_service_spec.rb b/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
index ab7a7b97861..cba8ef2ec98 100644
--- a/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/project_dashboard_service_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Metrics::Dashboard::ProjectDashboardService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/services/metrics/dashboard/system_dashboard_service_spec.rb b/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
index 95c5a1479a4..cc9f711c611 100644
--- a/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/system_dashboard_service_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
describe Metrics::Dashboard::SystemDashboardService, :use_clean_rails_memory_store_caching do
include MetricsDashboardHelpers
- set(:user) { create(:user) }
- set(:project) { create(:project) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
diff --git a/spec/services/metrics/sample_metrics_service_spec.rb b/spec/services/metrics/sample_metrics_service_spec.rb
index 8574674ebc4..3b4f7cb8062 100644
--- a/spec/services/metrics/sample_metrics_service_spec.rb
+++ b/spec/services/metrics/sample_metrics_service_spec.rb
@@ -4,7 +4,10 @@ require 'spec_helper'
describe Metrics::SampleMetricsService do
describe 'query' do
- subject { described_class.new(identifier).query }
+ let(:range_start) { '2019-12-02T23:31:45.000Z' }
+ let(:range_end) { '2019-12-03T00:01:45.000Z' }
+
+ subject { described_class.new(identifier, range_start: range_start, range_end: range_end).query }
context 'when the file is not found' do
let(:identifier) { nil }
@@ -26,10 +29,10 @@ describe Metrics::SampleMetricsService do
FileUtils.rm(destination)
end
- subject { described_class.new(identifier).query }
+ subject { described_class.new(identifier, range_start: range_start, range_end: range_end).query }
it 'loads data from the sample file correctly' do
- expect(subject).to eq(YAML.load_file(source))
+ expect(subject).to eq(YAML.load_file(source)[30])
end
end
diff --git a/spec/services/milestones/promote_service_spec.rb b/spec/services/milestones/promote_service_spec.rb
index 22c7e9dde30..fa893b86cdb 100644
--- a/spec/services/milestones/promote_service_spec.rb
+++ b/spec/services/milestones/promote_service_spec.rb
@@ -31,7 +31,9 @@ describe Milestones::PromoteService do
it 'does not promote milestone and update issuables if promoted milestone is not valid' do
issue = create(:issue, milestone: milestone, project: project)
merge_request = create(:merge_request, milestone: milestone, source_project: project)
- allow_any_instance_of(Milestone).to receive(:valid?).and_return(false)
+ allow_next_instance_of(Milestone) do |instance|
+ allow(instance).to receive(:valid?).and_return(false)
+ end
expect { service.execute(milestone) }.to raise_error(described_class::PromoteMilestoneError)
diff --git a/spec/services/milestones/transfer_service_spec.rb b/spec/services/milestones/transfer_service_spec.rb
index b3d41eb0763..711969ce504 100644
--- a/spec/services/milestones/transfer_service_spec.rb
+++ b/spec/services/milestones/transfer_service_spec.rb
@@ -71,7 +71,9 @@ describe Milestones::TransferService do
context 'when find_or_create_milestone returns nil' do
before do
- allow_any_instance_of(Milestones::FindOrCreateService).to receive(:execute).and_return(nil)
+ allow_next_instance_of(Milestones::FindOrCreateService) do |instance|
+ allow(instance).to receive(:execute).and_return(nil)
+ end
end
it 'removes issues group milestone' do
diff --git a/spec/services/namespaces/statistics_refresher_service_spec.rb b/spec/services/namespaces/statistics_refresher_service_spec.rb
index 9d42e917efe..1fa0a794edd 100644
--- a/spec/services/namespaces/statistics_refresher_service_spec.rb
+++ b/spec/services/namespaces/statistics_refresher_service_spec.rb
@@ -17,7 +17,9 @@ describe Namespaces::StatisticsRefresherService, '#execute' do
end
it 'recalculate the namespace statistics' do
- expect_any_instance_of(Namespace::RootStorageStatistics).to receive(:recalculate!).once
+ expect_next_instance_of(Namespace::RootStorageStatistics) do |instance|
+ expect(instance).to receive(:recalculate!).once
+ end
service.execute(group)
end
@@ -45,8 +47,9 @@ describe Namespaces::StatisticsRefresherService, '#execute' do
context 'when something goes wrong' do
before do
- allow_any_instance_of(Namespace::RootStorageStatistics)
- .to receive(:recalculate!).and_raise(ActiveRecord::ActiveRecordError)
+ allow_next_instance_of(Namespace::RootStorageStatistics) do |instance|
+ allow(instance).to receive(:recalculate!).and_raise(ActiveRecord::ActiveRecordError)
+ end
end
it 'raises RefreshError' do
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 8fd03428eb2..c5e2fe8de12 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Notes::CreateService do
- set(:project) { create(:project, :repository) }
- set(:issue) { create(:issue, project: project) }
- set(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:user) { create(:user) }
let(:opts) do
{ note: 'Awesome comment', noteable_type: 'Issue', noteable_id: issue.id }
end
@@ -216,12 +216,12 @@ describe Notes::CreateService do
context 'note with commands' do
context 'all quick actions' do
- set(:milestone) { create(:milestone, project: project, title: "sprint") }
- set(:bug_label) { create(:label, project: project, title: 'bug') }
- set(:to_be_copied_label) { create(:label, project: project, title: 'to be copied') }
- set(:feature_label) { create(:label, project: project, title: 'feature') }
- set(:issue) { create(:issue, project: project, labels: [bug_label], due_date: '2019-01-01') }
- set(:issue_2) { create(:issue, project: project, labels: [bug_label, to_be_copied_label]) }
+ let_it_be(:milestone) { create(:milestone, project: project, title: "sprint") }
+ let_it_be(:bug_label) { create(:label, project: project, title: 'bug') }
+ let_it_be(:to_be_copied_label) { create(:label, project: project, title: 'to be copied') }
+ let_it_be(:feature_label) { create(:label, project: project, title: 'feature') }
+ let_it_be(:issue, reload: true) { create(:issue, project: project, labels: [bug_label], due_date: '2019-01-01') }
+ let_it_be(:issue_2) { create(:issue, project: project, labels: [bug_label, to_be_copied_label]) }
context 'for issues' do
let(:issuable) { issue }
@@ -272,7 +272,7 @@ describe Notes::CreateService do
end
context 'for merge requests' do
- set(:merge_request) { create(:merge_request, source_project: project, labels: [bug_label]) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, labels: [bug_label]) }
let(:issuable) { merge_request }
let(:note_params) { opts.merge(noteable_type: 'MergeRequest', noteable_id: merge_request.id) }
let(:merge_request_quick_actions) do
diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb
index 9faf1299ef2..258e5c68265 100644
--- a/spec/services/notes/destroy_service_spec.rb
+++ b/spec/services/notes/destroy_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Notes::DestroyService do
- set(:project) { create(:project, :public) }
- set(:issue) { create(:issue, project: project) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
let(:user) { issue.author }
describe '#execute' do
diff --git a/spec/services/notes/resolve_service_spec.rb b/spec/services/notes/resolve_service_spec.rb
index 3f82e1dbdc0..c98384c226e 100644
--- a/spec/services/notes/resolve_service_spec.rb
+++ b/spec/services/notes/resolve_service_spec.rb
@@ -17,7 +17,9 @@ describe Notes::ResolveService do
end
it "sends notifications if all discussions are resolved" do
- expect_any_instance_of(MergeRequests::ResolvedDiscussionNotificationService).to receive(:execute).with(merge_request)
+ expect_next_instance_of(MergeRequests::ResolvedDiscussionNotificationService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request)
+ end
described_class.new(merge_request.project, user).execute(note)
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index b80f75c70e6..80b8d36aa07 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -155,7 +155,7 @@ describe NotificationService, :mailer do
describe '#async' do
let(:async) { notification.async }
- set(:key) { create(:personal_key) }
+ let_it_be(:key) { create(:personal_key) }
it 'returns an Async object with the correct parent' do
expect(async).to be_a(described_class::Async)
@@ -2508,14 +2508,14 @@ describe NotificationService, :mailer do
end
describe 'Pages domains' do
- set(:project) { create(:project) }
- set(:domain) { create(:pages_domain, project: project) }
- set(:u_blocked) { create(:user, :blocked) }
- set(:u_silence) { create_user_with_notification(:disabled, 'silent', project) }
- set(:u_owner) { project.owner }
- set(:u_maintainer1) { create(:user) }
- set(:u_maintainer2) { create(:user) }
- set(:u_developer) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:domain, reload: true) { create(:pages_domain, project: project) }
+ let_it_be(:u_blocked) { create(:user, :blocked) }
+ let_it_be(:u_silence) { create_user_with_notification(:disabled, 'silent', project) }
+ let_it_be(:u_owner) { project.owner }
+ let_it_be(:u_maintainer1) { create(:user) }
+ let_it_be(:u_maintainer2) { create(:user) }
+ let_it_be(:u_developer) { create(:user) }
before do
project.add_maintainer(u_blocked)
@@ -2707,7 +2707,7 @@ describe NotificationService, :mailer do
# User to be participant by default
# This user does not contain any record in notification settings table
# It should be treated with a :participating notification_level
- @u_lazy_participant = create(:user, username: 'lazy-participant')
+ @u_lazy_participant = create(:user, username: 'lazy-participant')
@u_guest_watcher = create_user_with_notification(:watch, 'guest_watching')
@u_guest_custom = create_user_with_notification(:custom, 'guest_custom')
diff --git a/spec/services/pages_domains/create_acme_order_service_spec.rb b/spec/services/pages_domains/create_acme_order_service_spec.rb
index d59aa9b979e..154b3fd5600 100644
--- a/spec/services/pages_domains/create_acme_order_service_spec.rb
+++ b/spec/services/pages_domains/create_acme_order_service_spec.rb
@@ -45,12 +45,34 @@ describe PagesDomains::CreateAcmeOrderService do
expect { OpenSSL::PKey::RSA.new(saved_order.private_key) }.not_to raise_error
end
- it 'properly saves order attributes' do
+ it 'properly saves order url' do
service.execute
saved_order = PagesDomainAcmeOrder.last
expect(saved_order.url).to eq(order_double.url)
- expect(saved_order.expires_at).to be_like_time(order_double.expires)
+ end
+
+ context 'when order expires in 2 days' do
+ it 'sets expiration time in 2 hours' do
+ Timecop.freeze do
+ service.execute
+
+ saved_order = PagesDomainAcmeOrder.last
+ expect(saved_order.expires_at).to be_like_time(2.hours.from_now)
+ end
+ end
+ end
+
+ context 'when order expires in an hour' do
+ it 'sets expiration time accordingly to order' do
+ Timecop.freeze do
+ allow(order_double).to receive(:expires).and_return(1.hour.from_now)
+ service.execute
+
+ saved_order = PagesDomainAcmeOrder.last
+ expect(saved_order.expires_at).to be_like_time(1.hour.from_now)
+ end
+ end
end
it 'properly saves challenge attributes' do
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index af79a42b611..9832ba95524 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -32,9 +32,9 @@ describe PagesDomains::ObtainLetsEncryptCertificateService do
def stub_lets_encrypt_order(url, status)
order = ::Gitlab::LetsEncrypt::Order.new(acme_order_double(status: status))
- allow_any_instance_of(::Gitlab::LetsEncrypt::Client).to(
- receive(:load_order).with(url).and_return(order)
- )
+ allow_next_instance_of(::Gitlab::LetsEncrypt::Client) do |instance|
+ allow(instance).to receive(:load_order).with(url).and_return(order)
+ end
order
end
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index 14772d172e8..78b969c8a0e 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Projects::ContainerRepository::CleanupTagsService do
- set(:user) { create(:user) }
- set(:project) { create(:project, :private) }
- set(:repository) { create(:container_repository, :root, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:repository) { create(:container_repository, :root, project: project) }
let(:service) { described_class.new(project, user, params) }
diff --git a/spec/services/projects/container_repository/delete_tags_service_spec.rb b/spec/services/projects/container_repository/delete_tags_service_spec.rb
index 7ceb02c9cf8..decbbb7597f 100644
--- a/spec/services/projects/container_repository/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/delete_tags_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe Projects::ContainerRepository::DeleteTagsService do
- set(:user) { create(:user) }
- set(:project) { create(:project, :private) }
- set(:repository) { create(:container_repository, :root, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:repository) { create(:container_repository, :root, project: project) }
let(:params) { { tags: tags } }
let(:service) { described_class.new(project, user, params) }
diff --git a/spec/services/projects/container_repository/destroy_service_spec.rb b/spec/services/projects/container_repository/destroy_service_spec.rb
index affcc66d2bb..cc8fd2716e1 100644
--- a/spec/services/projects/container_repository/destroy_service_spec.rb
+++ b/spec/services/projects/container_repository/destroy_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Projects::ContainerRepository::DestroyService do
- set(:user) { create(:user) }
- set(:project) { create(:project, :private) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
subject { described_class.new(project, user) }
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 814bf912c8c..bce3f72a287 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -247,7 +247,9 @@ describe Projects::CreateService, '#execute' do
context 'repository creation' do
it 'synchronously creates the repository' do
- expect_any_instance_of(Project).to receive(:create_repository)
+ expect_next_instance_of(Project) do |instance|
+ expect(instance).to receive(:create_repository)
+ end
project = create_project(user, opts)
expect(project).to be_valid
diff --git a/spec/services/projects/detect_repository_languages_service_spec.rb b/spec/services/projects/detect_repository_languages_service_spec.rb
index df5eed18ac0..76600b0e77c 100644
--- a/spec/services/projects/detect_repository_languages_service_spec.rb
+++ b/spec/services/projects/detect_repository_languages_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Projects::DetectRepositoryLanguagesService, :clean_gitlab_redis_shared_state do
- set(:project) { create(:project, :repository) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
subject { described_class.new(project) }
@@ -51,7 +51,7 @@ describe Projects::DetectRepositoryLanguagesService, :clean_gitlab_redis_shared_
end
context 'when no repository exists' do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
it 'has no languages' do
expect(subject.execute).to be_empty
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index fc01c93b5cf..e7b904fcd60 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -224,6 +224,19 @@ describe Projects::ForkService do
end
end
end
+
+ context 'when forking is disabled' do
+ before do
+ @from_project.project_feature.update_attribute(
+ :forking_access_level, ProjectFeature::DISABLED)
+ end
+
+ it 'fails' do
+ to_project = fork_project(@from_project, @to_user, namespace: @to_user.namespace)
+
+ expect(to_project.errors[:forked_from_project_id]).to eq(['is forbidden'])
+ end
+ end
end
describe 'fork to namespace' do
diff --git a/spec/services/projects/gitlab_projects_import_service_spec.rb b/spec/services/projects/gitlab_projects_import_service_spec.rb
index 78580bfa604..1662d4577aa 100644
--- a/spec/services/projects/gitlab_projects_import_service_spec.rb
+++ b/spec/services/projects/gitlab_projects_import_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Projects::GitlabProjectsImportService do
- set(:namespace) { create(:namespace) }
+ let_it_be(:namespace) { create(:namespace) }
let(:path) { 'test-path' }
let(:file) { fixture_file_upload('spec/fixtures/project_export.tar.gz') }
let(:overwrite) { false }
diff --git a/spec/services/projects/housekeeping_service_spec.rb b/spec/services/projects/housekeeping_service_spec.rb
index c99054d9fd5..60804a8dba6 100644
--- a/spec/services/projects/housekeeping_service_spec.rb
+++ b/spec/services/projects/housekeeping_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Projects::HousekeepingService do
subject { described_class.new(project) }
- set(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
before do
project.reset_pushes_since_gc
diff --git a/spec/services/projects/import_export/export_service_spec.rb b/spec/services/projects/import_export/export_service_spec.rb
index a557e61da78..c7ac07fc524 100644
--- a/spec/services/projects/import_export/export_service_spec.rb
+++ b/spec/services/projects/import_export/export_service_spec.rb
@@ -94,7 +94,9 @@ describe Projects::ImportExport::ExportService do
end
it 'notifies the user' do
- expect_any_instance_of(NotificationService).to receive(:project_not_exported)
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:project_not_exported)
+ end
end
it 'notifies logger' do
@@ -122,7 +124,9 @@ describe Projects::ImportExport::ExportService do
end
it 'notifies the user' do
- expect_any_instance_of(NotificationService).to receive(:project_not_exported)
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:project_not_exported)
+ end
end
it 'notifies logger' do
diff --git a/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
index 7ca20a6d751..016028a96bf 100644
--- a/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
@@ -16,7 +16,9 @@ describe Projects::LfsPointers::LfsImportService do
it 'downloads lfs objects' do
service = double
- expect_any_instance_of(Projects::LfsPointers::LfsObjectDownloadListService).to receive(:execute).and_return(oid_download_links)
+ expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
+ expect(instance).to receive(:execute).and_return(oid_download_links)
+ end
expect(Projects::LfsPointers::LfsDownloadService).to receive(:new).and_return(service).twice
expect(service).to receive(:execute).twice
@@ -27,7 +29,9 @@ describe Projects::LfsPointers::LfsImportService do
context 'when no downloadable lfs object links' do
it 'does not call LfsDownloadService' do
- expect_any_instance_of(Projects::LfsPointers::LfsObjectDownloadListService).to receive(:execute).and_return({})
+ expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
+ expect(instance).to receive(:execute).and_return({})
+ end
expect(Projects::LfsPointers::LfsDownloadService).not_to receive(:new)
result = subject.execute
@@ -39,7 +43,9 @@ describe Projects::LfsPointers::LfsImportService do
context 'when an exception is raised' do
it 'returns error' do
error_message = "error message"
- expect_any_instance_of(Projects::LfsPointers::LfsObjectDownloadListService).to receive(:execute).and_raise(StandardError, error_message)
+ expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
+ expect(instance).to receive(:execute).and_raise(StandardError, error_message)
+ end
result = subject.execute
diff --git a/spec/services/projects/open_merge_requests_count_service_spec.rb b/spec/services/projects/open_merge_requests_count_service_spec.rb
index f9fff4cbd4c..7d848f9f2c3 100644
--- a/spec/services/projects/open_merge_requests_count_service_spec.rb
+++ b/spec/services/projects/open_merge_requests_count_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Projects::OpenMergeRequestsCountService, :use_clean_rails_memory_store_caching do
- set(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
subject { described_class.new(project) }
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index 81d59a98b9b..93cd5c43e86 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe Projects::Operations::UpdateService do
- set(:user) { create(:user) }
- set(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
let(:result) { subject.execute }
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index 239d28557ee..6eaf7a71b23 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
describe Projects::ParticipantsService do
describe '#groups' do
- set(:user) { create(:user) }
- set(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
let(:service) { described_class.new(project, user) }
it 'avoids N+1 queries' do
@@ -62,10 +62,10 @@ describe Projects::ParticipantsService do
subject(:usernames) { service.project_members.map { |member| member[:username] } }
context 'when there is a project in group namespace' do
- set(:public_group) { create(:group, :public) }
- set(:public_project) { create(:project, :public, namespace: public_group)}
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- set(:public_group_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
let(:service) { described_class.new(public_project, create(:user)) }
@@ -79,18 +79,18 @@ describe Projects::ParticipantsService do
end
context 'when there is a private group and a public project' do
- set(:public_group) { create(:group, :public) }
- set(:private_group) { create(:group, :private, :nested) }
- set(:public_project) { create(:project, :public, namespace: public_group)}
+ let_it_be(:public_group) { create(:group, :public) }
+ let_it_be(:private_group) { create(:group, :private, :nested) }
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
- set(:project_issue) { create(:issue, project: public_project)}
+ let_it_be(:project_issue) { create(:issue, project: public_project)}
- set(:public_group_owner) { create(:user) }
- set(:private_group_member) { create(:user) }
- set(:public_project_maintainer) { create(:user) }
- set(:private_group_owner) { create(:user) }
+ let_it_be(:public_group_owner) { create(:user) }
+ let_it_be(:private_group_member) { create(:user) }
+ let_it_be(:public_project_maintainer) { create(:user) }
+ let_it_be(:private_group_owner) { create(:user) }
- set(:group_ancestor_owner) { create(:user) }
+ let_it_be(:group_ancestor_owner) { create(:user) }
before(:context) do
public_group.add_owner public_group_owner
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index fe92b53cd91..714256d9b08 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -3,9 +3,9 @@
require "spec_helper"
describe Projects::UpdatePagesService do
- set(:project) { create(:project, :repository) }
- set(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
- set(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
+ let_it_be(:project, refind: true) { create(:project, :repository) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) }
+ let_it_be(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') }
let(:invalid_file) { fixture_file_upload('spec/fixtures/dk.png') }
let(:file) { fixture_file_upload("spec/fixtures/pages.zip") }
@@ -110,8 +110,9 @@ describe Projects::UpdatePagesService do
context 'when timeout happens by DNS error' do
before do
- allow_any_instance_of(described_class)
- .to receive(:extract_zip_archive!).and_raise(SocketError)
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:extract_zip_archive!).and_raise(SocketError)
+ end
end
it 'raises an error' do
@@ -125,9 +126,10 @@ describe Projects::UpdatePagesService do
context 'when failed to extract zip artifacts' do
before do
- expect_any_instance_of(described_class)
- .to receive(:extract_zip_archive!)
- .and_raise(Projects::UpdatePagesService::FailedToExtractError)
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:extract_zip_archive!)
+ .and_raise(Projects::UpdatePagesService::FailedToExtractError)
+ end
end
it 'raises an error' do
@@ -185,60 +187,20 @@ describe Projects::UpdatePagesService do
.and_return(metadata)
end
- shared_examples 'pages size limit exceeded' do
- it 'limits the maximum size of gitlab pages' do
- subject.execute
-
- expect(deploy_status.description)
- .to match(/artifacts for pages are too large/)
- expect(deploy_status).to be_script_failure
- expect(project.pages_metadatum).not_to be_deployed
- end
- end
-
context 'when maximum pages size is set to zero' do
before do
stub_application_setting(max_pages_size: 0)
end
- context 'when page size does not exceed internal maximum' do
- before do
- allow(metadata).to receive(:total_size).and_return(200.megabytes)
- end
-
- it 'updates pages correctly' do
- subject.execute
-
- expect(deploy_status.description).not_to be_present
- expect(project.pages_metadatum).to be_deployed
- end
- end
-
- context 'when pages size does exceed internal maximum' do
- before do
- allow(metadata).to receive(:total_size).and_return(2.terabytes)
- end
-
- it_behaves_like 'pages size limit exceeded'
- end
- end
-
- context 'when pages size is greater than max size setting' do
- before do
- stub_application_setting(max_pages_size: 200)
- allow(metadata).to receive(:total_size).and_return(201.megabytes)
- end
-
- it_behaves_like 'pages size limit exceeded'
+ it_behaves_like 'pages size limit is', ::Gitlab::Pages::MAX_SIZE
end
- context 'when max size setting is greater than internal max size' do
+ context 'when size is limited on the instance level' do
before do
- stub_application_setting(max_pages_size: 3.terabytes / 1.megabyte)
- allow(metadata).to receive(:total_size).and_return(2.terabytes)
+ stub_application_setting(max_pages_size: 100)
end
- it_behaves_like 'pages size limit exceeded'
+ it_behaves_like 'pages size limit is', 100.megabytes
end
end
diff --git a/spec/services/prometheus/adapter_service_spec.rb b/spec/services/prometheus/adapter_service_spec.rb
deleted file mode 100644
index 52e035e1f70..00000000000
--- a/spec/services/prometheus/adapter_service_spec.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Prometheus::AdapterService do
- let(:project) { create(:project) }
-
- subject { described_class.new(project) }
-
- describe '#prometheus_adapter' do
- let(:cluster) { create(:cluster, :provided_by_user, environment_scope: '*', projects: [project]) }
-
- context 'prometheus service can execute queries' do
- let(:prometheus_service) { double(:prometheus_service, can_query?: true) }
-
- before do
- allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
- end
-
- it 'return prometheus service as prometheus adapter' do
- expect(subject.prometheus_adapter).to eq(prometheus_service)
- end
- end
-
- context "prometheus service can't execute queries" do
- let(:prometheus_service) { double(:prometheus_service, can_query?: false) }
-
- context 'with cluster with prometheus not available' do
- let!(:prometheus) { create(:clusters_applications_prometheus, :installable, cluster: cluster) }
-
- it 'returns nil' do
- expect(subject.prometheus_adapter).to be_nil
- end
- end
-
- context 'with cluster with prometheus available' do
- let!(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
-
- it 'returns application handling all environments' do
- expect(subject.prometheus_adapter).to eq(prometheus)
- end
- end
-
- context 'with cluster without prometheus installed' do
- it 'returns nil' do
- expect(subject.prometheus_adapter).to be_nil
- end
- end
- end
- end
-end
diff --git a/spec/services/prometheus/proxy_service_spec.rb b/spec/services/prometheus/proxy_service_spec.rb
index 03bda94e9c6..5a036194d01 100644
--- a/spec/services/prometheus/proxy_service_spec.rb
+++ b/spec/services/prometheus/proxy_service_spec.rb
@@ -5,8 +5,14 @@ require 'spec_helper'
describe Prometheus::ProxyService do
include ReactiveCachingHelpers
- set(:project) { create(:project) }
- set(:environment) { create(:environment, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+
+ describe 'configuration' do
+ it 'ReactiveCaching refresh is not needed' do
+ expect(described_class.reactive_cache_refresh_interval).to be > described_class.reactive_cache_lifetime
+ end
+ end
describe '#initialize' do
let(:params) { ActionController::Parameters.new(query: '1').permit! }
diff --git a/spec/services/prometheus/proxy_variable_substitution_service_spec.rb b/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
index b1cdb8fd3ae..9978c631366 100644
--- a/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
+++ b/spec/services/prometheus/proxy_variable_substitution_service_spec.rb
@@ -39,8 +39,12 @@ describe Prometheus::ProxyVariableSubstitutionService do
end
context 'with predefined variables' do
+ let(:params_keys) { { query: 'up{%{environment_filter}}' } }
+
it_behaves_like 'success' do
- let(:expected_query) { %Q[up{environment="#{environment.slug}"}] }
+ let(:expected_query) do
+ %Q[up{container_name!="POD",environment="#{environment.slug}"}]
+ end
end
context 'with nil query' do
@@ -50,6 +54,133 @@ describe Prometheus::ProxyVariableSubstitutionService do
let(:expected_query) { nil }
end
end
+
+ context 'with liquid format' do
+ let(:params_keys) do
+ { query: 'up{environment="{{ci_environment_slug}}"}' }
+ end
+
+ it_behaves_like 'success' do
+ let(:expected_query) { %Q[up{environment="#{environment.slug}"}] }
+ end
+ end
+
+ context 'with ruby and liquid formats' do
+ let(:params_keys) do
+ { query: 'up{%{environment_filter},env2="{{ci_environment_slug}}"}' }
+ end
+
+ it_behaves_like 'success' do
+ let(:expected_query) do
+ %Q[up{container_name!="POD",environment="#{environment.slug}",env2="#{environment.slug}"}]
+ end
+ end
+ end
+ end
+
+ context 'with custom variables' do
+ let(:pod_name) { "pod1" }
+
+ let(:params_keys) do
+ {
+ query: 'up{pod_name="{{pod_name}}"}',
+ variables: ['pod_name', pod_name]
+ }
+ end
+
+ it_behaves_like 'success' do
+ let(:expected_query) { %q[up{pod_name="pod1"}] }
+ end
+
+ context 'with ruby variable interpolation format' do
+ let(:params_keys) do
+ {
+ query: 'up{pod_name="%{pod_name}"}',
+ variables: ['pod_name', pod_name]
+ }
+ end
+
+ it_behaves_like 'success' do
+ # Custom variables cannot be used with the Ruby interpolation format.
+ let(:expected_query) { "up{pod_name=\"%{pod_name}\"}" }
+ end
+ end
+
+ context 'with predefined variables in variables parameter' do
+ let(:params_keys) do
+ {
+ query: 'up{pod_name="{{pod_name}}",env="{{ci_environment_slug}}"}',
+ variables: ['pod_name', pod_name, 'ci_environment_slug', 'custom_value']
+ }
+ end
+
+ it_behaves_like 'success' do
+ # Predefined variable values should not be overwritten by custom variable
+ # values.
+ let(:expected_query) { "up{pod_name=\"#{pod_name}\",env=\"#{environment.slug}\"}" }
+ end
+ end
+
+ context 'with invalid variables parameter' do
+ let(:params_keys) do
+ {
+ query: 'up{pod_name="{{pod_name}}"}',
+ variables: ['a']
+ }
+ end
+
+ it_behaves_like 'error', 'Optional parameter "variables" must be an ' \
+ 'array of keys and values. Ex: [key1, value1, key2, value2]'
+ end
+
+ context 'with nil variables' do
+ let(:params_keys) do
+ {
+ query: 'up{pod_name="{{pod_name}}"}',
+ variables: nil
+ }
+ end
+
+ it_behaves_like 'success' do
+ let(:expected_query) { 'up{pod_name=""}' }
+ end
+ end
+
+ context 'with ruby and liquid variables' do
+ let(:params_keys) do
+ {
+ query: 'up{env1="%{ruby_variable}",env2="{{ liquid_variable }}"}',
+ variables: %w(ruby_variable value liquid_variable env_slug)
+ }
+ end
+
+ it_behaves_like 'success' do
+ # It should replace only liquid variables with their values
+ let(:expected_query) { %q[up{env1="%{ruby_variable}",env2="env_slug"}] }
+ end
+ end
+ end
+
+ context 'with liquid tags and ruby format variables' do
+ let(:params_keys) do
+ {
+ query: 'up{ {% if true %}env1="%{ci_environment_slug}",' \
+ 'env2="{{ci_environment_slug}}"{% endif %} }'
+ }
+ end
+
+ # The following spec will fail and should be changed to a 'success' spec
+ # once we remove support for the Ruby interpolation format.
+ # https://gitlab.com/gitlab-org/gitlab/issues/37990
+ #
+ # Liquid tags `{% %}` cannot be used currently because the Ruby `%`
+ # operator raises an error when it encounters a Liquid `{% %}` tag in the
+ # string.
+ #
+ # Once we remove support for the Ruby format, users can start using
+ # Liquid tags.
+
+ it_behaves_like 'error', 'Malformed string'
end
context 'ruby template rendering' do
@@ -139,5 +270,18 @@ describe Prometheus::ProxyVariableSubstitutionService do
end
end
end
+
+ context 'when liquid template rendering raises error' do
+ before do
+ liquid_service = instance_double(TemplateEngines::LiquidService)
+
+ allow(TemplateEngines::LiquidService).to receive(:new).and_return(liquid_service)
+ allow(liquid_service).to receive(:render).and_raise(
+ TemplateEngines::LiquidService::RenderError, 'error message'
+ )
+ end
+
+ it_behaves_like 'error', 'error message'
+ end
end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index b105e1e40ce..b2576cae575 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -856,9 +856,10 @@ describe QuickActions::InterpretService do
end
context 'only group milestones available' do
- let(:group) { create(:group) }
+ let(:ancestor_group) { create(:group) }
+ let(:group) { create(:group, parent: ancestor_group) }
let(:project) { create(:project, :public, namespace: group) }
- let(:milestone) { create(:milestone, group: group, title: '10.0') }
+ let(:milestone) { create(:milestone, group: ancestor_group, title: '10.0') }
it_behaves_like 'milestone command' do
let(:content) { "/milestone %#{milestone.title}" }
diff --git a/spec/services/releases/update_service_spec.rb b/spec/services/releases/update_service_spec.rb
index 178bac3574f..f6c70873540 100644
--- a/spec/services/releases/update_service_spec.rb
+++ b/spec/services/releases/update_service_spec.rb
@@ -21,6 +21,7 @@ describe Releases::UpdateService do
it 'raises an error' do
result = service.execute
expect(result[:status]).to eq(:error)
+ expect(result[:milestones_updated]).to be_falsy
end
end
@@ -50,21 +51,33 @@ describe Releases::UpdateService do
end
context 'when a milestone is passed in' do
- let(:new_title) { 'v2.0' }
let(:milestone) { create(:milestone, project: project, title: 'v1.0') }
- let(:new_milestone) { create(:milestone, project: project, title: new_title) }
let(:params_with_milestone) { params.merge!({ milestones: [new_title] }) }
+ let(:new_milestone) { create(:milestone, project: project, title: new_title) }
let(:service) { described_class.new(new_milestone.project, user, params_with_milestone) }
before do
release.milestones << milestone
+ end
- service.execute
- release.reload
+ context 'a different milestone' do
+ let(:new_title) { 'v2.0' }
+
+ it 'updates the related milestone accordingly' do
+ result = service.execute
+ release.reload
+
+ expect(release.milestones.first.title).to eq(new_title)
+ expect(result[:milestones_updated]).to be_truthy
+ end
end
- it 'updates the related milestone accordingly' do
- expect(release.milestones.first.title).to eq(new_title)
+ context 'an identical milestone' do
+ let(:new_title) { 'v1.0' }
+
+ it "raises an error" do
+ expect { service.execute }.to raise_error(ActiveRecord::RecordInvalid)
+ end
end
end
@@ -76,12 +89,14 @@ describe Releases::UpdateService do
release.milestones << milestone
service.params = params_with_empty_milestone
- service.execute
- release.reload
end
it 'removes the old milestone and does not associate any new milestone' do
+ result = service.execute
+ release.reload
+
expect(release.milestones).not_to be_present
+ expect(result[:milestones_updated]).to be_truthy
end
end
@@ -96,14 +111,15 @@ describe Releases::UpdateService do
create(:milestone, project: project, title: new_title_1)
create(:milestone, project: project, title: new_title_2)
release.milestones << milestone
-
- service.execute
- release.reload
end
it 'removes the old milestone and update the release with the new ones' do
+ result = service.execute
+ release.reload
+
milestone_titles = release.milestones.map(&:title)
expect(milestone_titles).to match_array([new_title_1, new_title_2])
+ expect(result[:milestones_updated]).to be_truthy
end
end
end
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index 070964eb1ec..2b987b7fec9 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe ResourceEvents::ChangeLabelsService do
- set(:project) { create(:project) }
- set(:author) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:author) { create(:user) }
let(:resource) { create(:issue, project: project) }
describe '.change_labels' do
diff --git a/spec/services/resource_events/merge_into_notes_service_spec.rb b/spec/services/resource_events/merge_into_notes_service_spec.rb
index 72467091791..6bad1b86fca 100644
--- a/spec/services/resource_events/merge_into_notes_service_spec.rb
+++ b/spec/services/resource_events/merge_into_notes_service_spec.rb
@@ -16,11 +16,11 @@ describe ResourceEvents::MergeIntoNotesService do
create(:note_on_issue, opts.merge(params))
end
- set(:project) { create(:project) }
- set(:user) { create(:user) }
- set(:resource) { create(:issue, project: project) }
- set(:label) { create(:label, project: project) }
- set(:label2) { create(:label, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:resource) { create(:issue, project: project) }
+ let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:label2) { create(:label, project: project) }
let(:time) { Time.now }
describe '#execute' do
diff --git a/spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb b/spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb
new file mode 100644
index 00000000000..41902bc1da1
--- /dev/null
+++ b/spec/services/resource_events/synthetic_label_notes_builder_service_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ResourceEvents::SyntheticLabelNotesBuilderService do
+ describe '#execute' do
+ let!(:user) { create(:user) }
+
+ let!(:issue) { create(:issue, author: user) }
+
+ let!(:event1) { create(:resource_label_event, issue: issue) }
+ let!(:event2) { create(:resource_label_event, issue: issue) }
+ let!(:event3) { create(:resource_label_event, issue: issue) }
+
+ it 'returns the expected synthetic notes' do
+ notes = ResourceEvents::SyntheticLabelNotesBuilderService.new(issue, user).execute
+
+ expect(notes.size).to eq(3)
+ end
+ end
+end
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
new file mode 100644
index 00000000000..6f7ce7959ff
--- /dev/null
+++ b/spec/services/snippets/create_service_spec.rb
@@ -0,0 +1,170 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Snippets::CreateService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
+ let(:opts) { base_opts.merge(extra_opts) }
+ let(:base_opts) do
+ {
+ title: 'Test snippet',
+ file_name: 'snippet.rb',
+ content: 'puts "hello world"',
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE
+ }
+ end
+ let(:extra_opts) { {} }
+ let(:creator) { admin }
+
+ subject { Snippets::CreateService.new(project, creator, opts).execute }
+
+ let(:snippet) { subject.payload[:snippet] }
+
+ shared_examples 'a service that creates a snippet' do
+ it 'creates a snippet with the provided attributes' do
+ expect(snippet.title).to eq(opts[:title])
+ expect(snippet.file_name).to eq(opts[:file_name])
+ expect(snippet.content).to eq(opts[:content])
+ expect(snippet.visibility_level).to eq(opts[:visibility_level])
+ end
+ end
+
+ shared_examples 'public visibility level restrictions apply' do
+ let(:extra_opts) { { visibility_level: Gitlab::VisibilityLevel::PUBLIC } }
+
+ before do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
+ end
+
+ context 'when user is not an admin' do
+ let(:creator) { user }
+
+ it 'responds with an error' do
+ expect(subject).to be_error
+ end
+
+ it 'does not create a public snippet' do
+ expect(subject.message).to match('has been restricted')
+ end
+ end
+
+ context 'when user is an admin' do
+ it 'responds with success' do
+ expect(subject).to be_success
+ end
+
+ it 'creates a public snippet' do
+ expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ end
+ end
+
+ describe 'when visibility level is passed as a string' do
+ let(:extra_opts) { { visibility: 'internal' } }
+
+ before do
+ base_opts.delete(:visibility_level)
+ end
+
+ it 'assigns the correct visibility level' do
+ expect(subject).to be_success
+ expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
+ end
+
+ shared_examples 'spam check is performed' do
+ shared_examples 'marked as spam' do
+ it 'marks a snippet as spam ' do
+ expect(snippet).to be_spam
+ end
+
+ it 'invalidates the snippet' do
+ expect(snippet).to be_invalid
+ end
+
+ it 'creates a new spam_log' do
+ expect { snippet }
+ .to log_spam(title: snippet.title, noteable_type: snippet.class.name)
+ end
+
+ it 'assigns a spam_log to an issue' do
+ expect(snippet.spam_log).to eq(SpamLog.last)
+ end
+ end
+
+ let(:extra_opts) do
+ { visibility_level: Gitlab::VisibilityLevel::PUBLIC, request: double(:request, env: {}) }
+ end
+
+ before do
+ expect_next_instance_of(AkismetService) do |akismet_service|
+ expect(akismet_service).to receive_messages(spam?: true)
+ end
+ end
+
+ [true, false, nil].each do |allow_possible_spam|
+ context "when recaptcha_disabled flag is #{allow_possible_spam.inspect}" do
+ before do
+ stub_feature_flags(allow_possible_spam: allow_possible_spam) unless allow_possible_spam.nil?
+ end
+
+ it_behaves_like 'marked as spam'
+ end
+ end
+ end
+
+ shared_examples 'snippet create data is tracked' do
+ let(:counter) { Gitlab::UsageDataCounters::SnippetCounter }
+
+ it 'increments count when create succeeds' do
+ expect { subject }.to change { counter.read(:create) }.by 1
+ end
+
+ context 'when create fails' do
+ let(:opts) { {} }
+
+ it 'does not increment count' do
+ expect { subject }.not_to change { counter.read(:create) }
+ end
+ end
+ end
+
+ shared_examples 'an error service response when save fails' do
+ let(:extra_opts) { { content: nil } }
+
+ it 'responds with an error' do
+ expect(subject).to be_error
+ end
+
+ it 'does not create the snippet' do
+ expect { subject }.not_to change { Snippet.count }
+ end
+ end
+
+ context 'when Project Snippet' do
+ let_it_be(:project) { create(:project) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'a service that creates a snippet'
+ it_behaves_like 'public visibility level restrictions apply'
+ it_behaves_like 'spam check is performed'
+ it_behaves_like 'snippet create data is tracked'
+ it_behaves_like 'an error service response when save fails'
+ end
+
+ context 'when PersonalSnippet' do
+ let(:project) { nil }
+
+ it_behaves_like 'a service that creates a snippet'
+ it_behaves_like 'public visibility level restrictions apply'
+ it_behaves_like 'spam check is performed'
+ it_behaves_like 'snippet create data is tracked'
+ it_behaves_like 'an error service response when save fails'
+ end
+ end
+end
diff --git a/spec/services/snippets/destroy_service_spec.rb b/spec/services/snippets/destroy_service_spec.rb
new file mode 100644
index 00000000000..bb035d275ab
--- /dev/null
+++ b/spec/services/snippets/destroy_service_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Snippets::DestroyService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+
+ describe '#execute' do
+ subject { Snippets::DestroyService.new(user, snippet).execute }
+
+ context 'when snippet is nil' do
+ let(:snippet) { nil }
+
+ it 'returns a ServiceResponse error' do
+ expect(subject).to be_error
+ end
+ end
+
+ shared_examples 'a successful destroy' do
+ it 'deletes the snippet' do
+ expect { subject }.to change { Snippet.count }.by(-1)
+ end
+
+ it 'returns ServiceResponse success' do
+ expect(subject).to be_success
+ end
+ end
+
+ shared_examples 'an unsuccessful destroy' do
+ it 'does not delete the snippet' do
+ expect { subject }.to change { Snippet.count }.by(0)
+ end
+
+ it 'returns ServiceResponse error' do
+ expect(subject).to be_error
+ end
+ end
+
+ context 'when ProjectSnippet' do
+ let!(:snippet) { create(:project_snippet, project: project, author: author) }
+
+ context 'when user is able to admin_project_snippet' do
+ let(:author) { user }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'a successful destroy'
+ end
+
+ context 'when user is not able to admin_project_snippet' do
+ let(:author) { other_user }
+
+ it_behaves_like 'an unsuccessful destroy'
+ end
+ end
+
+ context 'when PersonalSnippet' do
+ let!(:snippet) { create(:personal_snippet, author: author) }
+
+ context 'when user is able to admin_personal_snippet' do
+ let(:author) { user }
+
+ it_behaves_like 'a successful destroy'
+ end
+
+ context 'when user is not able to admin_personal_snippet' do
+ let(:author) { other_user }
+
+ it_behaves_like 'an unsuccessful destroy'
+ end
+ end
+ end
+end
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
new file mode 100644
index 00000000000..b8215f9779d
--- /dev/null
+++ b/spec/services/snippets/update_service_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Snippets::UpdateService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create :user, admin: true }
+ let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
+ let(:options) do
+ {
+ title: 'Test snippet',
+ file_name: 'snippet.rb',
+ content: 'puts "hello world"',
+ visibility_level: visibility_level
+ }
+ end
+ let(:updater) { user }
+
+ subject do
+ Snippets::UpdateService.new(
+ project,
+ updater,
+ options
+ ).execute(snippet)
+ end
+
+ shared_examples 'a service that updates a snippet' do
+ it 'updates a snippet with the provided attributes' do
+ expect { subject }.to change { snippet.title }.from(snippet.title).to(options[:title])
+ .and change { snippet.file_name }.from(snippet.file_name).to(options[:file_name])
+ .and change { snippet.content }.from(snippet.content).to(options[:content])
+ end
+ end
+
+ shared_examples 'public visibility level restrictions apply' do
+ let(:visibility_level) { Gitlab::VisibilityLevel::PUBLIC }
+
+ before do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
+ end
+
+ context 'when user is not an admin' do
+ it 'responds with an error' do
+ expect(subject).to be_error
+ end
+
+ it 'does not update snippet to public visibility' do
+ original_visibility = snippet.visibility_level
+
+ expect(subject.message).to match('has been restricted')
+ expect(snippet.visibility_level).to eq(original_visibility)
+ end
+ end
+
+ context 'when user is an admin' do
+ let(:updater) { admin }
+
+ it 'responds with success' do
+ expect(subject).to be_success
+ end
+
+ it 'updates the snippet to public visibility' do
+ old_visibility = snippet.visibility_level
+
+ expect(subject.payload[:snippet]).not_to be_nil
+ expect(snippet.visibility_level).not_to eq(old_visibility)
+ expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ end
+ end
+
+ context 'when visibility level is passed as a string' do
+ before do
+ options[:visibility] = 'internal'
+ options.delete(:visibility_level)
+ end
+
+ it 'assigns the correct visibility level' do
+ expect(subject).to be_success
+ expect(snippet.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
+ end
+
+ shared_examples 'snippet update data is tracked' do
+ let(:counter) { Gitlab::UsageDataCounters::SnippetCounter }
+
+ it 'increments count when create succeeds' do
+ expect { subject }.to change { counter.read(:update) }.by 1
+ end
+
+ context 'when update fails' do
+ let(:options) { { title: '' } }
+
+ it 'does not increment count' do
+ expect { subject }.not_to change { counter.read(:update) }
+ end
+ end
+ end
+
+ context 'when Project Snippet' do
+ let_it_be(:project) { create(:project) }
+ let!(:snippet) { create(:project_snippet, author: user, project: project) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'a service that updates a snippet'
+ it_behaves_like 'public visibility level restrictions apply'
+ it_behaves_like 'snippet update data is tracked'
+ end
+
+ context 'when PersonalSnippet' do
+ let(:project) { nil }
+ let!(:snippet) { create(:personal_snippet, author: user) }
+
+ it_behaves_like 'a service that updates a snippet'
+ it_behaves_like 'public visibility level restrictions apply'
+ it_behaves_like 'snippet update data is tracked'
+ end
+ end
+end
diff --git a/spec/services/spam/mark_as_spam_service_spec.rb b/spec/services/spam/mark_as_spam_service_spec.rb
new file mode 100644
index 00000000000..cba9d6a39cb
--- /dev/null
+++ b/spec/services/spam/mark_as_spam_service_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Spam::MarkAsSpamService do
+ let(:user_agent_detail) { build(:user_agent_detail) }
+ let(:spammable) { build(:issue, user_agent_detail: user_agent_detail) }
+ let(:fake_akismet_service) { double(:akismet_service, submit_spam: true) }
+
+ subject { described_class.new(spammable: spammable) }
+
+ describe '#execute' do
+ before do
+ allow(subject).to receive(:akismet).and_return(fake_akismet_service)
+ end
+
+ context 'when the spammable object is not submittable' do
+ before do
+ allow(spammable).to receive(:submittable_as_spam?).and_return false
+ end
+
+ it 'does not submit as spam' do
+ expect(subject.execute).to be_falsey
+ end
+ end
+
+ context 'spam is submitted successfully' do
+ before do
+ allow(spammable).to receive(:submittable_as_spam?).and_return true
+ allow(fake_akismet_service).to receive(:submit_spam).and_return true
+ end
+
+ it 'submits as spam' do
+ expect(subject.execute).to be_truthy
+ end
+
+ it "updates the spammable object's user agent detail as being submitted as spam" do
+ expect(user_agent_detail).to receive(:update_attribute)
+
+ subject.execute
+ end
+
+ context 'when Akismet does not consider it spam' do
+ it 'does not update the spammable object as spam' do
+ allow(fake_akismet_service).to receive(:submit_spam).and_return false
+
+ expect(subject.execute).to be_falsey
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/spam_service_spec.rb b/spec/services/spam_service_spec.rb
index 76f77583612..c8ebe87e4d2 100644
--- a/spec/services/spam_service_spec.rb
+++ b/spec/services/spam_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe SpamService do
describe '#when_recaptcha_verified' do
def check_spam(issue, request, recaptcha_verified)
- described_class.new(issue, request).when_recaptcha_verified(recaptcha_verified) do
+ described_class.new(spammable: issue, request: request).when_recaptcha_verified(recaptcha_verified) do
'yielded'
end
end
@@ -45,7 +45,7 @@ describe SpamService do
context 'when indicated as spam by akismet' do
shared_examples 'akismet spam' do
- it 'doesnt check as spam when request is missing' do
+ it "doesn't check as spam when request is missing" do
check_spam(issue, nil, false)
expect(issue).not_to be_spam
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index bdbcb0fdb07..84529af7187 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -48,10 +48,34 @@ describe Suggestions::ApplyService do
expect(commit.committer_email).to eq(user.commit_email)
expect(commit.author_name).to eq(user.name)
end
+
+ context 'when a custom suggestion commit message' do
+ before do
+ project.update!(suggestion_commit_message: message)
+
+ apply(suggestion)
+ end
+
+ context 'is not specified' do
+ let(:message) { nil }
+
+ it 'sets default commit message' do
+ expect(project.repository.commit.message).to eq("Apply suggestion to files/ruby/popen.rb")
+ end
+ end
+
+ context 'is specified' do
+ let(:message) { 'refactor: %{project_path} %{project_name} %{file_path} %{branch_name} %{username} %{user_full_name}' }
+
+ it 'sets custom commit message' do
+ expect(project.repository.commit.message).to eq("refactor: project-1 Project_1 files/ruby/popen.rb master test.user Test User")
+ end
+ end
+ end
end
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user, :commit_email) }
+ let(:project) { create(:project, :repository, path: 'project-1', name: 'Project_1') }
+ let(:user) { create(:user, :commit_email, name: 'Test User', username: 'test.user') }
let(:position) { build_position }
@@ -113,7 +137,8 @@ describe Suggestions::ApplyService do
context 'non-fork project' do
let(:merge_request) do
create(:merge_request, source_project: project,
- target_project: project)
+ target_project: project,
+ source_branch: 'master')
end
before do
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index a952e26e338..4ba22af85f0 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -7,9 +7,9 @@ describe SystemNoteService do
include RepoHelpers
include AssetsHelpers
- set(:group) { create(:group) }
- set(:project) { create(:project, :repository, group: group) }
- set(:author) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:author) { create(:user) }
let(:noteable) { create(:issue, project: project) }
let(:issue) { noteable }
@@ -76,28 +76,14 @@ describe SystemNoteService do
end
describe '.change_due_date' do
- subject { described_class.change_due_date(noteable, project, author, due_date) }
+ let(:due_date) { double }
- let(:due_date) { Date.today }
-
- it_behaves_like 'a note with overridable created_at'
-
- it_behaves_like 'a system note' do
- let(:action) { 'due_date' }
- end
-
- context 'when due date added' do
- it 'sets the note text' do
- expect(subject.note).to eq "changed due date to #{Date.today.to_s(:long)}"
+ it 'calls TimeTrackingService' do
+ expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
+ expect(service).to receive(:change_due_date).with(due_date)
end
- end
- context 'when due date removed' do
- let(:due_date) { nil }
-
- it 'sets the note text' do
- expect(subject.note).to eq 'removed due date'
- end
+ described_class.change_due_date(noteable, project, author, due_date)
end
end
@@ -488,36 +474,12 @@ describe SystemNoteService do
end
describe '.change_time_estimate' do
- subject { described_class.change_time_estimate(noteable, project, author) }
-
- it_behaves_like 'a system note' do
- let(:action) { 'time_tracking' }
- end
-
- context 'with a time estimate' do
- it 'sets the note text' do
- noteable.update_attribute(:time_estimate, 277200)
-
- expect(subject.note).to eq "changed time estimate to 1w 4d 5h"
- end
-
- context 'when time_tracking_limit_to_hours setting is true' do
- before do
- stub_application_setting(time_tracking_limit_to_hours: true)
- end
-
- it 'sets the note text' do
- noteable.update_attribute(:time_estimate, 277200)
-
- expect(subject.note).to eq "changed time estimate to 77h"
- end
+ it 'calls TimeTrackingService' do
+ expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
+ expect(service).to receive(:change_time_estimate)
end
- end
- context 'without a time estimate' do
- it 'sets the note text' do
- expect(subject.note).to eq "removed time estimate"
- end
+ described_class.change_time_estimate(noteable, project, author)
end
end
@@ -548,61 +510,12 @@ describe SystemNoteService do
end
describe '.change_time_spent' do
- # We need a custom noteable in order to the shared examples to be green.
- let(:noteable) do
- mr = create(:merge_request, source_project: project)
- mr.spend_time(duration: 360000, user_id: author.id)
- mr.save!
- mr
- end
-
- subject do
- described_class.change_time_spent(noteable, project, author)
- end
-
- it_behaves_like 'a system note' do
- let(:action) { 'time_tracking' }
- end
-
- context 'when time was added' do
- it 'sets the note text' do
- spend_time!(277200)
-
- expect(subject.note).to eq "added 1w 4d 5h of time spent"
+ it 'calls TimeTrackingService' do
+ expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
+ expect(service).to receive(:change_time_spent)
end
- end
-
- context 'when time was subtracted' do
- it 'sets the note text' do
- spend_time!(-277200)
-
- expect(subject.note).to eq "subtracted 1w 4d 5h of time spent"
- end
- end
-
- context 'when time was removed' do
- it 'sets the note text' do
- spend_time!(:reset)
- expect(subject.note).to eq "removed time spent"
- end
- end
-
- context 'when time_tracking_limit_to_hours setting is true' do
- before do
- stub_application_setting(time_tracking_limit_to_hours: true)
- end
-
- it 'sets the note text' do
- spend_time!(277200)
-
- expect(subject.note).to eq "added 77h of time spent"
- end
- end
-
- def spend_time!(seconds)
- noteable.spend_time(duration: seconds, user_id: author.id)
- noteable.save!
+ described_class.change_time_spent(noteable, project, author)
end
end
diff --git a/spec/services/system_notes/commit_service_spec.rb b/spec/services/system_notes/commit_service_spec.rb
index 4d4403be59a..5839a17e4a0 100644
--- a/spec/services/system_notes/commit_service_spec.rb
+++ b/spec/services/system_notes/commit_service_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
describe SystemNotes::CommitService do
- set(:group) { create(:group) }
- set(:project) { create(:project, :repository, group: group) }
- set(:author) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:author) { create(:user) }
let(:commit_service) { described_class.new(noteable: noteable, project: project, author: author) }
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index c2f627c681b..56ef0039b63 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -265,7 +265,9 @@ describe ::SystemNotes::IssuablesService do
context 'when cross-reference disallowed' do
before do
- expect_any_instance_of(described_class).to receive(:cross_reference_disallowed?).and_return(true)
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:cross_reference_disallowed?).and_return(true)
+ end
end
it 'returns nil' do
@@ -279,7 +281,9 @@ describe ::SystemNotes::IssuablesService do
context 'when cross-reference allowed' do
before do
- expect_any_instance_of(described_class).to receive(:cross_reference_disallowed?).and_return(false)
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:cross_reference_disallowed?).and_return(false)
+ end
end
it_behaves_like 'a system note' do
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
new file mode 100644
index 00000000000..7e3e6a75cdf
--- /dev/null
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -0,0 +1,129 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::SystemNotes::TimeTrackingService do
+ let_it_be(:author) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:noteable) { create(:issue, project: project) }
+
+ describe '#change_due_date' do
+ subject { described_class.new(noteable: noteable, project: project, author: author).change_due_date(due_date) }
+
+ let(:due_date) { Date.today }
+
+ it_behaves_like 'a note with overridable created_at'
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'due_date' }
+ end
+
+ context 'when due date added' do
+ it 'sets the note text' do
+ expect(subject.note).to eq "changed due date to #{due_date.to_s(:long)}"
+ end
+ end
+
+ context 'when due date removed' do
+ let(:due_date) { nil }
+
+ it 'sets the note text' do
+ expect(subject.note).to eq 'removed due date'
+ end
+ end
+ end
+
+ describe '.change_time_estimate' do
+ subject { described_class.new(noteable: noteable, project: project, author: author).change_time_estimate }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'time_tracking' }
+ end
+
+ context 'with a time estimate' do
+ it 'sets the note text' do
+ noteable.update_attribute(:time_estimate, 277200)
+
+ expect(subject.note).to eq "changed time estimate to 1w 4d 5h"
+ end
+
+ context 'when time_tracking_limit_to_hours setting is true' do
+ before do
+ stub_application_setting(time_tracking_limit_to_hours: true)
+ end
+
+ it 'sets the note text' do
+ noteable.update_attribute(:time_estimate, 277200)
+
+ expect(subject.note).to eq "changed time estimate to 77h"
+ end
+ end
+ end
+
+ context 'without a time estimate' do
+ it 'sets the note text' do
+ expect(subject.note).to eq "removed time estimate"
+ end
+ end
+ end
+
+ describe '.change_time_spent' do
+ # We need a custom noteable in order to the shared examples to be green.
+ let(:noteable) do
+ mr = create(:merge_request, source_project: project)
+ mr.spend_time(duration: 360000, user_id: author.id)
+ mr.save!
+ mr
+ end
+
+ subject do
+ described_class.new(noteable: noteable, project: project, author: author).change_time_spent
+ end
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'time_tracking' }
+ end
+
+ context 'when time was added' do
+ it 'sets the note text' do
+ spend_time!(277200)
+
+ expect(subject.note).to eq "added 1w 4d 5h of time spent"
+ end
+ end
+
+ context 'when time was subtracted' do
+ it 'sets the note text' do
+ spend_time!(-277200)
+
+ expect(subject.note).to eq "subtracted 1w 4d 5h of time spent"
+ end
+ end
+
+ context 'when time was removed' do
+ it 'sets the note text' do
+ spend_time!(:reset)
+
+ expect(subject.note).to eq "removed time spent"
+ end
+ end
+
+ context 'when time_tracking_limit_to_hours setting is true' do
+ before do
+ stub_application_setting(time_tracking_limit_to_hours: true)
+ end
+
+ it 'sets the note text' do
+ spend_time!(277200)
+
+ expect(subject.note).to eq "added 77h of time spent"
+ end
+ end
+
+ def spend_time!(seconds)
+ noteable.spend_time(duration: seconds, user_id: author.id)
+ noteable.save!
+ end
+ end
+end
diff --git a/spec/services/task_list_toggle_service_spec.rb b/spec/services/task_list_toggle_service_spec.rb
index a309951bbcb..82a5446dcb8 100644
--- a/spec/services/task_list_toggle_service_spec.rb
+++ b/spec/services/task_list_toggle_service_spec.rb
@@ -121,7 +121,7 @@ describe TaskListToggleService do
> * [x] Task 2
EOT
- markdown_html = Banzai::Pipeline::FullPipeline.call(markdown, project: nil)[:output].to_html
+ markdown_html = parse_markdown(markdown)
toggler = described_class.new(markdown, markdown_html,
toggle_as_checked: true,
line_source: '> > * [ ] Task 1', line_number: 1)
@@ -142,7 +142,7 @@ describe TaskListToggleService do
* [x] Task 2
EOT
- markdown_html = Banzai::Pipeline::FullPipeline.call(markdown, project: nil)[:output].to_html
+ markdown_html = parse_markdown(markdown)
toggler = described_class.new(markdown, markdown_html,
toggle_as_checked: true,
line_source: '* [ ] Task 1', line_number: 5)
@@ -151,4 +151,44 @@ describe TaskListToggleService do
expect(toggler.updated_markdown.lines[4]).to eq "* [x] Task 1\n"
expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
end
+
+ context 'when clicking an embedded subtask' do
+ it 'properly handles it inside an unordered list' do
+ markdown =
+ <<-EOT.strip_heredoc
+ - - [ ] Task 1
+ - [x] Task 2
+ EOT
+
+ markdown_html = parse_markdown(markdown)
+ toggler = described_class.new(markdown, markdown_html,
+ toggle_as_checked: true,
+ line_source: '- - [ ] Task 1', line_number: 1)
+
+ expect(toggler.execute).to be_truthy
+ expect(toggler.updated_markdown.lines[0]).to eq "- - [x] Task 1\n"
+ expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
+ end
+
+ it 'properly handles it inside an ordered list' do
+ markdown =
+ <<-EOT.strip_heredoc
+ 1. - [ ] Task 1
+ - [x] Task 2
+ EOT
+
+ markdown_html = parse_markdown(markdown)
+ toggler = described_class.new(markdown, markdown_html,
+ toggle_as_checked: true,
+ line_source: '1. - [ ] Task 1', line_number: 1)
+
+ expect(toggler.execute).to be_truthy
+ expect(toggler.updated_markdown.lines[0]).to eq "1. - [x] Task 1\n"
+ expect(toggler.updated_markdown_html).to include('disabled checked> Task 1')
+ end
+ end
+
+ def parse_markdown(markdown)
+ Banzai::Pipeline::FullPipeline.call(markdown, project: nil)[:output].to_html
+ end
end
diff --git a/spec/services/template_engines/liquid_service_spec.rb b/spec/services/template_engines/liquid_service_spec.rb
new file mode 100644
index 00000000000..7c5262bc264
--- /dev/null
+++ b/spec/services/template_engines/liquid_service_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe TemplateEngines::LiquidService do
+ describe '#render' do
+ let(:template) { 'up{env={{ci_environment_slug}}}' }
+ let(:result) { subject }
+
+ let_it_be(:slug) { 'env_slug' }
+
+ let_it_be(:context) do
+ {
+ ci_environment_slug: slug,
+ environment_filter: "container_name!=\"POD\",environment=\"#{slug}\""
+ }
+ end
+
+ subject { described_class.new(template).render(context) }
+
+ it 'with symbol keys in context it substitutes variables' do
+ expect(result).to include("up{env=#{slug}")
+ end
+
+ context 'with multiple occurrences of variable in template' do
+ let(:template) do
+ 'up{env1={{ci_environment_slug}},env2={{ci_environment_slug}}}'
+ end
+
+ it 'substitutes variables' do
+ expect(result).to eq("up{env1=#{slug},env2=#{slug}}")
+ end
+ end
+
+ context 'with multiple variables in template' do
+ let(:template) do
+ 'up{env={{ci_environment_slug}},' \
+ '{{environment_filter}}}'
+ end
+
+ it 'substitutes all variables' do
+ expect(result).to eq(
+ "up{env=#{slug}," \
+ "container_name!=\"POD\",environment=\"#{slug}\"}"
+ )
+ end
+ end
+
+ context 'with unknown variables in template' do
+ let(:template) { 'up{env={{env_slug}}}' }
+
+ it 'does not substitute unknown variables' do
+ expect(result).to eq("up{env=}")
+ end
+ end
+
+ context 'with extra variables in context' do
+ let(:template) { 'up{env={{ci_environment_slug}}}' }
+
+ it 'substitutes variables' do
+ # If context has only 1 key, there is no need for this spec.
+ expect(context.count).to be > 1
+ expect(result).to eq("up{env=#{slug}}")
+ end
+ end
+
+ context 'with unknown and known variables in template' do
+ let(:template) { 'up{env={{ci_environment_slug}},other_env={{env_slug}}}' }
+
+ it 'substitutes known variables' do
+ expect(result).to eq("up{env=#{slug},other_env=}")
+ end
+ end
+
+ context 'Liquid errors' do
+ shared_examples 'raises RenderError' do |message|
+ it do
+ expect { result }.to raise_error(described_class::RenderError, message)
+ end
+ end
+
+ context 'when liquid raises error' do
+ let(:template) { 'up{env={{ci_environment_slug}}' }
+ let(:liquid_template) { Liquid::Template.new }
+
+ before do
+ allow(Liquid::Template).to receive(:parse).with(template).and_return(liquid_template)
+ allow(liquid_template).to receive(:render!).and_raise(exception, message)
+ end
+
+ context 'raises Liquid::MemoryError' do
+ let(:exception) { Liquid::MemoryError }
+ let(:message) { 'Liquid error: Memory limits exceeded' }
+
+ it_behaves_like 'raises RenderError', 'Memory limit exceeded while rendering template'
+ end
+
+ context 'raises Liquid::Error' do
+ let(:exception) { Liquid::Error }
+ let(:message) { 'Liquid error: Generic error message' }
+
+ it_behaves_like 'raises RenderError', 'Error rendering query'
+ end
+ end
+
+ context 'with template that is expensive to render' do
+ let(:template) do
+ '{% assign loop_count = 1000 %}'\
+ '{% assign padStr = "0" %}'\
+ '{% assign number_to_pad = "1" %}'\
+ '{% assign strLength = number_to_pad | size %}'\
+ '{% assign padLength = loop_count | minus: strLength %}'\
+ '{% if padLength > 0 %}'\
+ ' {% assign padded = number_to_pad %}'\
+ ' {% for position in (1..padLength) %}'\
+ ' {% assign padded = padded | prepend: padStr %}'\
+ ' {% endfor %}'\
+ ' {{ padded }}'\
+ '{% endif %}'
+ end
+
+ it_behaves_like 'raises RenderError', 'Memory limit exceeded while rendering template'
+ end
+ end
+ end
+end
diff --git a/spec/services/update_snippet_service_spec.rb b/spec/services/update_snippet_service_spec.rb
deleted file mode 100644
index 19b35dca6a7..00000000000
--- a/spec/services/update_snippet_service_spec.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe UpdateSnippetService do
- before do
- @user = create :user
- @admin = create :user, admin: true
- @opts = {
- title: 'Test snippet',
- file_name: 'snippet.rb',
- content: 'puts "hello world"',
- visibility_level: Gitlab::VisibilityLevel::PRIVATE
- }
- end
-
- context 'When public visibility is restricted' do
- before do
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
-
- @snippet = create_snippet(@project, @user, @opts)
- @opts.merge!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- end
-
- it 'non-admins should not be able to update to public visibility' do
- old_visibility = @snippet.visibility_level
- update_snippet(@project, @user, @snippet, @opts)
- expect(@snippet.errors.messages).to have_key(:visibility_level)
- expect(@snippet.errors.messages[:visibility_level].first).to(
- match('has been restricted')
- )
- expect(@snippet.visibility_level).to eq(old_visibility)
- end
-
- it 'admins should be able to update to public visibility' do
- old_visibility = @snippet.visibility_level
- update_snippet(@project, @admin, @snippet, @opts)
- expect(@snippet.visibility_level).not_to eq(old_visibility)
- expect(@snippet.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
- end
-
- describe "when visibility level is passed as a string" do
- before do
- @opts[:visibility] = 'internal'
- @opts.delete(:visibility_level)
- end
-
- it "assigns the correct visibility level" do
- update_snippet(@project, @user, @snippet, @opts)
- expect(@snippet.errors.any?).to be_falsey
- expect(@snippet.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
- end
- end
-
- describe 'usage counter' do
- let(:counter) { Gitlab::UsageDataCounters::SnippetCounter }
- let(:snippet) { create_snippet(nil, @user, @opts) }
-
- it 'increments count' do
- expect do
- update_snippet(nil, @admin, snippet, @opts)
- end.to change { counter.read(:update) }.by 1
- end
-
- it 'does not increment count if create fails' do
- expect do
- update_snippet(nil, @admin, snippet, { title: '' })
- end.not_to change { counter.read(:update) }
- end
- end
-
- def create_snippet(project, user, opts)
- CreateSnippetService.new(project, user, opts).execute
- end
-
- def update_snippet(project, user, snippet, opts)
- UpdateSnippetService.new(project, user, snippet, opts).execute
- end
-end
diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb
index d8d2be87fd3..f477eee1dd6 100644
--- a/spec/services/users/activity_service_spec.rb
+++ b/spec/services/users/activity_service_spec.rb
@@ -7,7 +7,7 @@ describe Users::ActivityService do
let(:user) { create(:user, last_activity_on: last_activity_on) }
- subject { described_class.new(user, 'type') }
+ subject { described_class.new(user) }
describe '#execute', :clean_gitlab_redis_shared_state do
context 'when last activity is nil' do
@@ -40,7 +40,7 @@ describe Users::ActivityService do
let(:fake_object) { double(username: 'hello') }
it 'does not record activity' do
- service = described_class.new(fake_object, 'pull')
+ service = described_class.new(fake_object)
expect(service).not_to receive(:record_activity)
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index 23a0c71175e..d9335cef5cc 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -20,6 +20,22 @@ describe Users::DestroyService do
expect { Namespace.find(namespace.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
+ it 'deletes user associations in batches' do
+ expect(user).to receive(:destroy_dependent_associations_in_batches)
+
+ service.execute(user)
+ end
+
+ context 'when :destroy_user_associations_in_batches flag is disabled' do
+ it 'does not delete user associations in batches' do
+ stub_feature_flags(destroy_user_associations_in_batches: false)
+
+ expect(user).not_to receive(:destroy_dependent_associations_in_batches)
+
+ service.execute(user)
+ end
+ end
+
it 'will delete the project' do
expect_next_instance_of(Projects::DestroyService) do |destroy_service|
expect(destroy_service).to receive(:execute).once.and_return(true)
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index 9384287f98a..50bbb16e368 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -6,13 +6,6 @@ describe Users::UpdateService do
let(:user) { create(:user) }
describe '#execute' do
- it 'updates the name' do
- result = update_user(user, name: 'New Name')
-
- expect(result).to eq(status: :success)
- expect(user.name).to eq('New Name')
- end
-
it 'updates time preferences' do
result = update_user(user, timezone: 'Europe/Warsaw', time_display_relative: true, time_format_in_24h: false)