summaryrefslogtreecommitdiff
path: root/spec/services
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb12
-rw-r--r--spec/services/bulk_imports/tree_export_service_spec.rb23
-rw-r--r--spec/services/bulk_imports/uploads_export_service_spec.rb24
-rw-r--r--spec/services/ci/create_pipeline_service/logger_spec.rb139
-rw-r--r--spec/services/ci/create_pipeline_service/tags_spec.rb173
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb110
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb54
-rw-r--r--spec/services/ci/generate_terraform_reports_service_spec.rb2
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb40
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb85
-rw-r--r--spec/services/ci/parse_dotenv_artifact_service_spec.rb40
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb1102
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb1040
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb61
-rw-r--r--spec/services/ci/play_build_service_spec.rb17
-rw-r--r--spec/services/ci/process_sync_events_service_spec.rb129
-rw-r--r--spec/services/ci/register_job_service_spec.rb106
-rw-r--r--spec/services/ci/retry_build_service_spec.rb27
-rw-r--r--spec/services/ci/stuck_builds/drop_pending_service_spec.rb46
-rw-r--r--spec/services/ci/update_pending_build_service_spec.rb8
-rw-r--r--spec/services/clusters/agent_tokens/create_service_spec.rb19
-rw-r--r--spec/services/clusters/cleanup/project_namespace_service_spec.rb26
-rw-r--r--spec/services/clusters/cleanup/service_account_service_spec.rb14
-rw-r--r--spec/services/clusters/integrations/prometheus_health_check_service_spec.rb2
-rw-r--r--spec/services/concerns/audit_event_save_type_spec.rb28
-rw-r--r--spec/services/dependency_proxy/find_cached_manifest_service_spec.rb (renamed from spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb)40
-rw-r--r--spec/services/dependency_proxy/pull_manifest_service_spec.rb77
-rw-r--r--spec/services/deployments/older_deployments_drop_service_spec.rb2
-rw-r--r--spec/services/events/destroy_service_spec.rb50
-rw-r--r--spec/services/feature_flags/create_service_spec.rb20
-rw-r--r--spec/services/feature_flags/update_service_spec.rb20
-rw-r--r--spec/services/git/branch_push_service_spec.rb38
-rw-r--r--spec/services/git/process_ref_changes_service_spec.rb44
-rw-r--r--spec/services/google_cloud/service_accounts_service_spec.rb69
-rw-r--r--spec/services/groups/create_service_spec.rb10
-rw-r--r--spec/services/groups/transfer_service_spec.rb2
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb6
-rw-r--r--spec/services/import/github_service_spec.rb33
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb30
-rw-r--r--spec/services/integrations/propagate_service_spec.rb (renamed from spec/services/admin/propagate_integration_service_spec.rb)2
-rw-r--r--spec/services/issues/create_service_spec.rb24
-rw-r--r--spec/services/issues/set_crm_contacts_service_spec.rb88
-rw-r--r--spec/services/issues/update_service_spec.rb14
-rw-r--r--spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb59
-rw-r--r--spec/services/loose_foreign_keys/cleaner_service_spec.rb25
-rw-r--r--spec/services/members/create_service_spec.rb74
-rw-r--r--spec/services/merge_requests/after_create_service_spec.rb56
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb10
-rw-r--r--spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb45
-rw-r--r--spec/services/merge_requests/close_service_spec.rb4
-rw-r--r--spec/services/merge_requests/handle_assignees_change_service_spec.rb8
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb2
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb5
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb21
-rw-r--r--spec/services/merge_requests/remove_attention_requested_service_spec.rb85
-rw-r--r--spec/services/merge_requests/resolved_discussion_notification_service_spec.rb12
-rw-r--r--spec/services/merge_requests/squash_service_spec.rb32
-rw-r--r--spec/services/merge_requests/toggle_attention_requested_service_spec.rb39
-rw-r--r--spec/services/notification_service_spec.rb39
-rw-r--r--spec/services/packages/debian/update_distribution_service_spec.rb4
-rw-r--r--spec/services/packages/npm/create_package_service_spec.rb11
-rw-r--r--spec/services/projects/destroy_service_spec.rb129
-rw-r--r--spec/services/projects/prometheus/alerts/create_service_spec.rb2
-rw-r--r--spec/services/projects/prometheus/alerts/destroy_service_spec.rb2
-rw-r--r--spec/services/projects/prometheus/alerts/update_service_spec.rb2
-rw-r--r--spec/services/projects/transfer_service_spec.rb4
-rw-r--r--spec/services/protected_tags/create_service_spec.rb39
-rw-r--r--spec/services/protected_tags/update_service_spec.rb39
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb48
-rw-r--r--spec/services/repositories/changelog_service_spec.rb31
-rw-r--r--spec/services/search_service_spec.rb104
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb19
-rw-r--r--spec/services/system_note_service_spec.rb42
-rw-r--r--spec/services/system_notes/commit_service_spec.rb2
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb78
-rw-r--r--spec/services/todos/destroy/unauthorized_features_service_spec.rb (renamed from spec/services/todos/destroy/private_features_service_spec.rb)22
-rw-r--r--spec/services/users/dismiss_callout_service_spec.rb (renamed from spec/services/users/dismiss_user_callout_service_spec.rb)6
-rw-r--r--spec/services/users/refresh_authorized_projects_service_spec.rb39
-rw-r--r--spec/services/verify_pages_domain_service_spec.rb3
79 files changed, 3207 insertions, 1831 deletions
diff --git a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
index 8a53d9fbf7c..c6b184bd003 100644
--- a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
+++ b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
@@ -59,7 +59,9 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
.create!(project: project2, access_level: Gitlab::Access::MAINTAINER)
to_be_removed = [project2.id]
- to_be_added = [[user.id, project.id, Gitlab::Access::MAINTAINER]]
+ to_be_added = [
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ ]
expect(service.execute).to eq([to_be_removed, to_be_added])
end
@@ -70,7 +72,9 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
end
to_be_removed = [project.id]
- to_be_added = [[user.id, project.id, Gitlab::Access::MAINTAINER]]
+ to_be_added = [
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ ]
expect(service.execute).to eq([to_be_removed, to_be_added])
end
@@ -80,7 +84,9 @@ RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
.create!(project: project, access_level: Gitlab::Access::DEVELOPER)
to_be_removed = [project.id]
- to_be_added = [[user.id, project.id, Gitlab::Access::MAINTAINER]]
+ to_be_added = [
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ ]
expect(service.execute).to eq([to_be_removed, to_be_added])
end
diff --git a/spec/services/bulk_imports/tree_export_service_spec.rb b/spec/services/bulk_imports/tree_export_service_spec.rb
index f2ed747b64e..ffb81fe2b5f 100644
--- a/spec/services/bulk_imports/tree_export_service_spec.rb
+++ b/spec/services/bulk_imports/tree_export_service_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe BulkImports::TreeExportService do
let_it_be(:project) { create(:project) }
let_it_be(:export_path) { Dir.mktmpdir }
- let_it_be(:relation) { 'issues' }
+
+ let(:relation) { 'issues' }
subject(:service) { described_class.new(project, export_path, relation) }
@@ -25,11 +26,31 @@ RSpec.describe BulkImports::TreeExportService do
expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
end
end
+
+ context 'when relation is self' do
+ let(:relation) { 'self' }
+
+ it 'executes export on portable itself' do
+ expect_next_instance_of(Gitlab::ImportExport::Json::StreamingSerializer) do |serializer|
+ expect(serializer).to receive(:serialize_root)
+ end
+
+ subject.execute
+ end
+ end
end
describe '#exported_filename' do
it 'returns filename of the exported file' do
expect(subject.exported_filename).to eq('issues.ndjson')
end
+
+ context 'when relation is self' do
+ let(:relation) { 'self' }
+
+ it 'returns filename of the exported file' do
+ expect(subject.exported_filename).to eq('self.json')
+ end
+ end
end
end
diff --git a/spec/services/bulk_imports/uploads_export_service_spec.rb b/spec/services/bulk_imports/uploads_export_service_spec.rb
new file mode 100644
index 00000000000..39bcacfdc5e
--- /dev/null
+++ b/spec/services/bulk_imports/uploads_export_service_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::UploadsExportService do
+ let_it_be(:project) { create(:project, avatar: fixture_file_upload('spec/fixtures/rails_sample.png', 'image/png')) }
+ let_it_be(:upload) { create(:upload, :with_file, :issuable_upload, uploader: FileUploader, model: project) }
+ let_it_be(:export_path) { Dir.mktmpdir }
+
+ subject(:service) { described_class.new(project, export_path) }
+
+ after do
+ FileUtils.remove_entry(export_path) if Dir.exist?(export_path)
+ end
+
+ describe '#execute' do
+ it 'exports project uploads and avatar' do
+ subject.execute
+
+ expect(File.exist?(File.join(export_path, 'avatar', 'rails_sample.png'))).to eq(true)
+ expect(File.exist?(File.join(export_path, upload.secret, upload.retrieve_uploader.filename))).to eq(true)
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/logger_spec.rb b/spec/services/ci/create_pipeline_service/logger_spec.rb
new file mode 100644
index 00000000000..dfe0859015d
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/logger_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService do
+ context 'pipeline logger' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
+
+ let(:ref) { 'refs/heads/master' }
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+ let(:pipeline) { service.execute(:push).payload }
+ let(:file_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' }
+
+ before do
+ stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
+ end
+
+ let(:counters) do
+ {
+ 'count' => a_kind_of(Numeric),
+ 'avg' => a_kind_of(Numeric),
+ 'max' => a_kind_of(Numeric),
+ 'min' => a_kind_of(Numeric)
+ }
+ end
+
+ let(:loggable_data) do
+ {
+ 'pipeline_creation_caller' => 'Ci::CreatePipelineService',
+ 'pipeline_source' => 'push',
+ 'pipeline_id' => a_kind_of(Numeric),
+ 'pipeline_persisted' => true,
+ 'project_id' => project.id,
+ 'pipeline_creation_service_duration_s' => a_kind_of(Numeric),
+ 'pipeline_creation_duration_s' => counters,
+ 'pipeline_size_count' => counters,
+ 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
+ }
+ end
+
+ context 'when the duration is under the threshold' do
+ it 'does not create a log entry but it collects the data' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+ expect(pipeline).to be_created_successfully
+
+ expect(service.logger.observations_hash)
+ .to match(
+ a_hash_including(
+ 'pipeline_creation_duration_s' => counters,
+ 'pipeline_size_count' => counters,
+ 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
+ )
+ )
+ end
+ end
+
+ context 'when the durations exceeds the threshold' do
+ let(:timer) do
+ proc do
+ @timer = @timer.to_i + 30
+ end
+ end
+
+ before do
+ allow(Gitlab::Ci::Pipeline::Logger)
+ .to receive(:current_monotonic_time) { timer.call }
+ end
+
+ it 'creates a log entry' do
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including(loggable_data))
+ .and_call_original
+
+ expect(pipeline).to be_created_successfully
+ end
+
+ context 'when the pipeline is not persisted' do
+ let(:loggable_data) do
+ {
+ 'pipeline_creation_caller' => 'Ci::CreatePipelineService',
+ 'pipeline_source' => 'push',
+ 'pipeline_id' => nil,
+ 'pipeline_persisted' => false,
+ 'project_id' => project.id,
+ 'pipeline_creation_service_duration_s' => a_kind_of(Numeric),
+ 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
+ }
+ end
+
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ expect(pipeline).to receive(:save!).and_raise { RuntimeError }
+ end
+ end
+
+ it 'creates a log entry' do
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including(loggable_data))
+ .and_call_original
+
+ expect { pipeline }.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_pipeline_creation_logger: false)
+ end
+
+ it 'does not create a log entry' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+
+ expect(pipeline).to be_created_successfully
+ expect(service.logger.observations_hash).to eq({})
+ end
+ end
+ end
+
+ context 'when the size exceeds the threshold' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:total_size) { 5000 }
+ end
+ end
+
+ it 'creates a log entry' do
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including(loggable_data))
+ .and_call_original
+
+ expect(pipeline).to be_created_successfully
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/tags_spec.rb b/spec/services/ci/create_pipeline_service/tags_spec.rb
index 335d35010c8..cbbeb870c5f 100644
--- a/spec/services/ci/create_pipeline_service/tags_spec.rb
+++ b/spec/services/ci/create_pipeline_service/tags_spec.rb
@@ -7,16 +7,15 @@ RSpec.describe Ci::CreatePipelineService do
let_it_be(:user) { project.owner }
let(:ref) { 'refs/heads/master' }
- let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
- let(:pipeline) { service.execute(source).payload }
+ let(:pipeline) { create_pipeline }
before do
- stub_ci_pipeline_yaml_file(config)
+ stub_yaml_config(config)
end
context 'with valid config' do
- let(:config) { YAML.dump({ test: { script: 'ls', tags: %w[tag1 tag2] } }) }
+ let(:config) { { test: { script: 'ls', tags: %w[tag1 tag2] } } }
it 'creates a pipeline', :aggregate_failures do
expect(pipeline).to be_created_successfully
@@ -25,8 +24,8 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'with too many tags' do
- let(:tags) { Array.new(50) {|i| "tag-#{i}" } }
- let(:config) { YAML.dump({ test: { script: 'ls', tags: tags } }) }
+ let(:tags) { build_tag_list(label: 'custom', size: 50) }
+ let(:config) { { test: { script: 'ls', tags: tags } } }
it 'creates a pipeline without builds', :aggregate_failures do
expect(pipeline).not_to be_created_successfully
@@ -34,5 +33,167 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline.yaml_errors).to eq("jobs:test:tags config must be less than the limit of #{Gitlab::Ci::Config::Entry::Tags::TAGS_LIMIT} tags")
end
end
+
+ context 'tags persistence' do
+ let(:config) do
+ {
+ build: {
+ script: 'ls',
+ stage: 'build',
+ tags: build_tag_list(label: 'build')
+ },
+ test: {
+ script: 'ls',
+ stage: 'test',
+ tags: build_tag_list(label: 'test')
+ }
+ }
+ end
+
+ let(:config_without_tags) do
+ config.transform_values { |job| job.except(:tags) }
+ end
+
+ context 'with multiple tags' do
+ context 'when the tags do not exist' do
+ it 'does not execute N+1 queries' do
+ stub_yaml_config(config_without_tags)
+
+ # warm up the cached objects so we get a more accurate count
+ create_pipeline
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ create_pipeline
+ end
+
+ stub_yaml_config(config)
+
+ # 2 select tags.*
+ # 1 insert tags
+ # 1 insert taggings
+ tags_queries_size = 4
+
+ expect { pipeline }
+ .not_to exceed_all_query_limit(control)
+ .with_threshold(tags_queries_size)
+
+ expect(pipeline).to be_created_successfully
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_bulk_insert_tags: false)
+ end
+
+ it 'executes N+1s queries' do
+ stub_yaml_config(config_without_tags)
+
+ # warm up the cached objects so we get a more accurate count
+ create_pipeline
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ create_pipeline
+ end
+
+ stub_yaml_config(config)
+
+ expect { pipeline }
+ .to exceed_all_query_limit(control)
+ .with_threshold(4)
+
+ expect(pipeline).to be_created_successfully
+ end
+ end
+
+ context 'when tags are already persisted' do
+ it 'does not execute N+1 queries' do
+ # warm up the cached objects so we get a more accurate count
+ # and insert the tags
+ create_pipeline
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ create_pipeline
+ end
+
+ # 1 select tags.*
+ # 1 insert taggings
+ tags_queries_size = 2
+
+ expect { pipeline }
+ .not_to exceed_all_query_limit(control)
+ .with_threshold(tags_queries_size)
+
+ expect(pipeline).to be_created_successfully
+ end
+ end
+ end
+
+ context 'with bridge jobs' do
+ let(:config) do
+ {
+ test_1: {
+ script: 'ls',
+ stage: 'test',
+ tags: build_tag_list(label: 'test_1')
+ },
+ test_2: {
+ script: 'ls',
+ stage: 'test',
+ tags: build_tag_list(label: '$CI_JOB_NAME')
+ },
+ test_3: {
+ script: 'ls',
+ stage: 'test',
+ tags: build_tag_list(label: 'test_1') + build_tag_list(label: 'test_2')
+ },
+ test_4: {
+ script: 'ls',
+ stage: 'test'
+ },
+ deploy: {
+ stage: 'deploy',
+ trigger: 'my/project'
+ }
+ }
+ end
+
+ it do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.bridges.size).to eq(1)
+ expect(pipeline.builds.size).to eq(4)
+
+ expect(tags_for('test_1'))
+ .to have_attributes(count: 5)
+ .and all(match(/test_1-tag-\d+/))
+
+ expect(tags_for('test_2'))
+ .to have_attributes(count: 5)
+ .and all(match(/test_2-tag-\d+/))
+
+ expect(tags_for('test_3'))
+ .to have_attributes(count: 10)
+ .and all(match(/test_[1,2]-tag-\d+/))
+
+ expect(tags_for('test_4')).to be_empty
+ end
+ end
+ end
+ end
+
+ def tags_for(build_name)
+ pipeline.builds.find_by_name(build_name).tag_list
+ end
+
+ def stub_yaml_config(config)
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ def create_pipeline
+ service.execute(:push).payload
+ end
+
+ def build_tag_list(label:, size: 5)
+ Array.new(size) { |index| "#{label}-tag-#{index}" }
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index c78e19ea62d..ef879d536c3 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -46,6 +46,47 @@ RSpec.describe Ci::CreatePipelineService do
end
# rubocop:enable Metrics/ParameterLists
+ context 'performance' do
+ it_behaves_like 'pipelines are created without N+1 SQL queries' do
+ let(:config1) do
+ <<~YAML
+ job1:
+ stage: build
+ script: exit 0
+
+ job2:
+ stage: test
+ script: exit 0
+ YAML
+ end
+
+ let(:config2) do
+ <<~YAML
+ job1:
+ stage: build
+ script: exit 0
+
+ job2:
+ stage: test
+ script: exit 0
+
+ job3:
+ stage: deploy
+ script: exit 0
+ YAML
+ end
+
+ let(:accepted_n_plus_ones) do
+ 1 + # SELECT "ci_instance_variables"
+ 1 + # INSERT INTO "ci_stages"
+ 1 + # SELECT "ci_builds".* FROM "ci_builds"
+ 1 + # INSERT INTO "ci_builds"
+ 1 + # INSERT INTO "ci_builds_metadata"
+ 1 # SELECT "taggings".* FROM "taggings"
+ end
+ end
+ end
+
context 'valid params' do
let(:pipeline) { execute_service.payload }
@@ -1951,6 +1992,75 @@ RSpec.describe Ci::CreatePipelineService do
let(:rules_job) { find_job('rules-job') }
let(:delayed_job) { find_job('delayed-job') }
+ context 'with when:manual' do
+ let(:config) do
+ <<-EOY
+ job-with-rules:
+ script: 'echo hey'
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+
+ job-when-with-rules:
+ script: 'echo hey'
+ when: manual
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+
+ job-when-with-rules-when:
+ script: 'echo hey'
+ when: manual
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: on_success
+
+ job-with-rules-when:
+ script: 'echo hey'
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+
+ job-without-rules:
+ script: 'echo this is a job with NO rules'
+ EOY
+ end
+
+ let(:job_with_rules) { find_job('job-with-rules') }
+ let(:job_when_with_rules) { find_job('job-when-with-rules') }
+ let(:job_when_with_rules_when) { find_job('job-when-with-rules-when') }
+ let(:job_with_rules_when) { find_job('job-with-rules-when') }
+ let(:job_without_rules) { find_job('job-without-rules') }
+
+ context 'when matching the rules' do
+ let(:ref_name) { 'refs/heads/master' }
+
+ it 'adds the job-with-rules with a when:manual' do
+ expect(job_with_rules).to be_persisted
+ expect(job_when_with_rules).to be_persisted
+ expect(job_when_with_rules_when).to be_persisted
+ expect(job_with_rules_when).to be_persisted
+ expect(job_without_rules).to be_persisted
+
+ expect(job_with_rules.when).to eq('on_success')
+ expect(job_when_with_rules.when).to eq('manual')
+ expect(job_when_with_rules_when.when).to eq('on_success')
+ expect(job_with_rules_when.when).to eq('manual')
+ expect(job_without_rules.when).to eq('on_success')
+ end
+ end
+
+ context 'when there is no match to the rule' do
+ let(:ref_name) { 'refs/heads/wip' }
+
+ it 'does not add job_with_rules' do
+ expect(job_with_rules).to be_nil
+ expect(job_when_with_rules).to be_nil
+ expect(job_when_with_rules_when).to be_nil
+ expect(job_with_rules_when).to be_nil
+ expect(job_without_rules).to be_persisted
+ end
+ end
+ end
+
shared_examples 'rules jobs are excluded' do
it 'only persists the job without rules' do
expect(pipeline).to be_persisted
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index 613bbe45e68..8cfe756faf3 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -16,14 +16,16 @@ RSpec.describe Ci::ExpirePipelineCacheService do
pipeline_path = "/#{project.full_path}/-/pipelines/#{pipeline.id}.json"
graphql_pipeline_path = "/api/graphql:pipelines/id/#{pipeline.id}"
graphql_pipeline_sha_path = "/api/graphql:pipelines/sha/#{pipeline.sha}"
+ graphql_project_on_demand_scan_counts_path = "/api/graphql:on_demand_scan/counts/#{project.full_path}"
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
- expect(store).to receive(:touch).with(pipelines_path)
- expect(store).to receive(:touch).with(new_mr_pipelines_path)
- expect(store).to receive(:touch).with(pipeline_path)
- expect(store).to receive(:touch).with(graphql_pipeline_path)
- expect(store).to receive(:touch).with(graphql_pipeline_sha_path)
- end
+ expect_touched_etag_caching_paths(
+ pipelines_path,
+ new_mr_pipelines_path,
+ pipeline_path,
+ graphql_pipeline_path,
+ graphql_pipeline_sha_path,
+ graphql_project_on_demand_scan_counts_path
+ )
subject.execute(pipeline)
end
@@ -35,9 +37,10 @@ RSpec.describe Ci::ExpirePipelineCacheService do
merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json"
merge_request_widget_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/cached_widget.json"
- allow_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch)
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_pipelines_path)
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_widget_path)
+ expect_touched_etag_caching_paths(
+ merge_request_pipelines_path,
+ merge_request_widget_path
+ )
subject.execute(merge_request.all_pipelines.last)
end
@@ -76,10 +79,7 @@ RSpec.describe Ci::ExpirePipelineCacheService do
it 'updates the cache of dependent pipeline' do
dependent_pipeline_path = "/#{source.source_project.full_path}/-/pipelines/#{source.source_pipeline.id}.json"
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
- allow(store).to receive(:touch)
- expect(store).to receive(:touch).with(dependent_pipeline_path)
- end
+ expect_touched_etag_caching_paths(dependent_pipeline_path)
subject.execute(pipeline)
end
@@ -92,13 +92,31 @@ RSpec.describe Ci::ExpirePipelineCacheService do
it 'updates the cache of dependent pipeline' do
dependent_pipeline_path = "/#{source.project.full_path}/-/pipelines/#{source.pipeline.id}.json"
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
- allow(store).to receive(:touch)
- expect(store).to receive(:touch).with(dependent_pipeline_path)
- end
+ expect_touched_etag_caching_paths(dependent_pipeline_path)
subject.execute(pipeline)
end
end
+
+ it 'does not do N+1 queries' do
+ subject.execute(pipeline)
+
+ control = ActiveRecord::QueryRecorder.new { subject.execute(pipeline) }
+
+ create(:ci_sources_pipeline, pipeline: pipeline)
+ create(:ci_sources_pipeline, source_job: create(:ci_build, pipeline: pipeline))
+
+ expect { subject.execute(pipeline) }.not_to exceed_query_limit(control.count)
+ end
+ end
+
+ def expect_touched_etag_caching_paths(*paths)
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
+ expect(store).to receive(:touch).and_wrap_original do |m, *args|
+ expect(args).to include(*paths)
+
+ m.call(*args)
+ end
+ end
end
end
diff --git a/spec/services/ci/generate_terraform_reports_service_spec.rb b/spec/services/ci/generate_terraform_reports_service_spec.rb
index c9ac74e050c..c32e8bcaeb8 100644
--- a/spec/services/ci/generate_terraform_reports_service_spec.rb
+++ b/spec/services/ci/generate_terraform_reports_service_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Ci::GenerateTerraformReportsService do
'create' => 0,
'delete' => 0,
'update' => 1,
- 'job_name' => build.options.dig(:artifacts, :name).to_s
+ 'job_name' => build.name
))
),
key: an_instance_of(Array)
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index 6761f052e18..e71f1a4266a 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -53,6 +53,46 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
log = ActiveRecord::QueryRecorder.new { subject }
expect(log.count).to be_within(1).of(8)
end
+
+ context 'with several locked-unknown artifact records' do
+ before do
+ stub_const("#{described_class}::LOOP_LIMIT", 10)
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ end
+
+ let!(:lockable_artifact_records) do
+ [
+ create(:ci_job_artifact, :metadata, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: locked_job),
+ create(:ci_job_artifact, :junit, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: locked_job),
+ create(:ci_job_artifact, :sast, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: locked_job),
+ create(:ci_job_artifact, :cobertura, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: locked_job),
+ create(:ci_job_artifact, :trace, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: locked_job)
+ ]
+ end
+
+ let!(:unlockable_artifact_records) do
+ [
+ create(:ci_job_artifact, :metadata, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: job),
+ create(:ci_job_artifact, :junit, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: job),
+ create(:ci_job_artifact, :sast, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: job),
+ create(:ci_job_artifact, :cobertura, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: job),
+ create(:ci_job_artifact, :trace, :expired, locked: ::Ci::JobArtifact.lockeds[:unknown], job: job),
+ artifact
+ ]
+ end
+
+ it 'updates the locked status of job artifacts from artifacts-locked pipelines' do
+ subject
+
+ expect(lockable_artifact_records).to be_all(&:persisted?)
+ expect(lockable_artifact_records).to be_all { |artifact| artifact.reload.artifact_artifacts_locked? }
+ end
+
+ it 'unlocks and then destroys job artifacts from artifacts-unlocked pipelines' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-6)
+ expect(Ci::JobArtifact.where(id: unlockable_artifact_records.map(&:id))).to be_empty
+ end
+ end
end
end
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 1cc856734fc..0e7230c042e 100644
--- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -3,59 +3,74 @@
require 'spec_helper'
RSpec.describe Ci::JobArtifacts::DestroyBatchService do
- let(:artifacts) { Ci::JobArtifact.all }
+ let(:artifacts) { Ci::JobArtifact.where(id: [artifact_with_file.id, artifact_without_file.id]) }
let(:service) { described_class.new(artifacts, pick_up_at: Time.current) }
+ let_it_be(:artifact_with_file, refind: true) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let_it_be(:artifact_without_file, refind: true) do
+ create(:ci_job_artifact)
+ end
+
+ let_it_be(:undeleted_artifact, refind: true) do
+ create(:ci_job_artifact)
+ end
+
describe '.execute' do
subject(:execute) { service.execute }
- let_it_be(:artifact, refind: true) do
- create(:ci_job_artifact)
+ it 'creates a deleted object for artifact with attached file' do
+ expect { subject }.to change { Ci::DeletedObject.count }.by(1)
end
- context 'when the artifact has a file attached to it' do
- before do
- artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
- artifact.save!
- end
+ it 'does not remove the attached file' do
+ expect { execute }.not_to change { artifact_with_file.file.exists? }
+ end
- it 'creates a deleted object' do
- expect { subject }.to change { Ci::DeletedObject.count }.by(1)
- end
+ it 'deletes the artifact records' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
+ end
- it 'does not remove the files' do
- expect { execute }.not_to change { artifact.file.exists? }
+ it 'reports metrics for destroyed artifacts' do
+ expect_next_instance_of(Gitlab::Ci::Artifacts::Metrics) do |metrics|
+ expect(metrics).to receive(:increment_destroyed_artifacts_count).with(2).and_call_original
+ expect(metrics).to receive(:increment_destroyed_artifacts_bytes).with(107464).and_call_original
end
- it 'reports metrics for destroyed artifacts' do
- expect_next_instance_of(Gitlab::Ci::Artifacts::Metrics) do |metrics|
- expect(metrics).to receive(:increment_destroyed_artifacts_count).with(1).and_call_original
- expect(metrics).to receive(:increment_destroyed_artifacts_bytes).with(107464).and_call_original
- end
+ execute
+ end
+
+ context 'ProjectStatistics' do
+ it 'resets project statistics' do
+ expect(ProjectStatistics).to receive(:increment_statistic).once
+ .with(artifact_with_file.project, :build_artifacts_size, -artifact_with_file.file.size)
+ .and_call_original
+ expect(ProjectStatistics).to receive(:increment_statistic).once
+ .with(artifact_without_file.project, :build_artifacts_size, 0)
+ .and_call_original
execute
end
- context 'ProjectStatistics' do
- it 'resets project statistics' do
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(artifact.project, :build_artifacts_size, -artifact.file.size)
- .and_call_original
+ context 'with update_stats: false' do
+ it 'does not update project statistics' do
+ expect(ProjectStatistics).not_to receive(:increment_statistic)
- execute
+ service.execute(update_stats: false)
end
- context 'with update_stats: false' do
- it 'does not update project statistics' do
- expect(ProjectStatistics).not_to receive(:increment_statistic)
-
- service.execute(update_stats: false)
- end
+ it 'returns size statistics' do
+ expected_updates = {
+ statistics_updates: {
+ artifact_with_file.project => -artifact_with_file.file.size,
+ artifact_without_file.project => 0
+ }
+ }
- it 'returns size statistics' do
- expect(service.execute(update_stats: false)).to match(
- a_hash_including(statistics_updates: { artifact.project => -artifact.file.size }))
- end
+ expect(service.execute(update_stats: false)).to match(
+ a_hash_including(expected_updates))
end
end
end
@@ -71,7 +86,7 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
it 'raises an exception and stop destroying' do
expect { execute }.to raise_error(ActiveRecord::RecordNotDestroyed)
- .and not_change { Ci::JobArtifact.count }.from(1)
+ .and not_change { Ci::JobArtifact.count }
end
end
end
diff --git a/spec/services/ci/parse_dotenv_artifact_service_spec.rb b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
index c4040a426f2..6bf22b7c8b2 100644
--- a/spec/services/ci/parse_dotenv_artifact_service_spec.rb
+++ b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
@@ -23,6 +23,46 @@ RSpec.describe Ci::ParseDotenvArtifactService do
hash_including('key' => 'KEY2', 'value' => 'VAR2'))
end
+ context 'when dotenv variables are conflicting against manual variables' do
+ before do
+ create(:ci_job_variable, job: build, key: 'KEY1')
+ end
+
+ it 'returns an error message that there is a duplicate variable' do
+ subject
+
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include("Key (key, job_id)=(KEY1, #{build.id}) already exists.")
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+ end
+
+ context 'when dotenv variables have duplicate variables' do
+ let!(:artifact) { create(:ci_job_artifact, :dotenv, job: build) }
+ let(:blob) do
+ <<~EOS
+ KEY1=VAR1
+ KEY2=VAR2
+ KEY2=VAR3
+ KEY1=VAR4
+ EOS
+ end
+
+ before do
+ allow(artifact).to receive(:each_blob).and_yield(blob)
+ end
+
+ it 'latest values get used' do
+ subject
+
+ expect(subject[:status]).to eq(:success)
+
+ expect(build.job_variables.as_json).to contain_exactly(
+ hash_including('key' => 'KEY1', 'value' => 'VAR4'),
+ hash_including('key' => 'KEY2', 'value' => 'VAR3'))
+ end
+ end
+
context 'when parse error happens' do
before do
allow(service).to receive(:scan_line!) { raise described_class::ParserError, 'Invalid Format' }
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index a66d3898c5c..02f8f2dd99f 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -1,12 +1,1106 @@
# frozen_string_literal: true
require 'spec_helper'
-require_relative 'shared_processing_service'
-require_relative 'shared_processing_service_tests_with_yaml'
RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
- it_behaves_like 'Pipeline Processing Service'
- it_behaves_like 'Pipeline Processing Service Tests With Yaml'
+ describe 'Pipeline Processing Service Tests With Yaml' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
+
+ where(:test_file_path) do
+ Dir.glob(Rails.root.join('spec/services/ci/pipeline_processing/test_cases/*.yml'))
+ end
+
+ with_them do
+ let(:test_file) { YAML.load_file(test_file_path) }
+ let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline).payload }
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(test_file['config']))
+ end
+
+ it 'follows transitions' do
+ expect(pipeline).to be_persisted
+ Sidekiq::Worker.drain_all # ensure that all async jobs are executed
+ check_expectation(test_file.dig('init', 'expect'), "init")
+
+ test_file['transitions'].each_with_index do |transition, idx|
+ event_on_jobs(transition['event'], transition['jobs'])
+ Sidekiq::Worker.drain_all # ensure that all async jobs are executed
+ check_expectation(transition['expect'], "transition:#{idx}")
+ end
+ end
+
+ private
+
+ def check_expectation(expectation, message)
+ expect(current_state.deep_stringify_keys).to eq(expectation), message
+ end
+
+ def current_state
+ # reload pipeline and all relations
+ pipeline.reload
+
+ {
+ pipeline: pipeline.status,
+ stages: pipeline.stages.pluck(:name, :status).to_h,
+ jobs: pipeline.latest_statuses.pluck(:name, :status).to_h
+ }
+ end
+
+ def event_on_jobs(event, job_names)
+ statuses = pipeline.latest_statuses.by_name(job_names).to_a
+ expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts
+
+ statuses.each do |status|
+ if event == 'play'
+ status.play(user)
+ else
+ status.public_send("#{event}!")
+ end
+ end
+ end
+ end
+ end
+
+ describe 'Pipeline Processing Service' do
+ let(:project) { create(:project, :repository) }
+ let(:user) { project.owner }
+
+ let(:pipeline) do
+ create(:ci_empty_pipeline, ref: 'master', project: project)
+ end
+
+ context 'when simple pipeline is defined' do
+ before do
+ create_build('linux', stage_idx: 0)
+ create_build('mac', stage_idx: 0)
+ create_build('rspec', stage_idx: 1)
+ create_build('rubocop', stage_idx: 1)
+ create_build('deploy', stage_idx: 2)
+ end
+
+ it 'processes a pipeline', :sidekiq_inline do
+ expect(process_pipeline).to be_truthy
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(2)
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(4)
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(5)
+ end
+
+ it 'does not process pipeline if existing stage is running' do
+ expect(process_pipeline).to be_truthy
+ expect(builds.pending.count).to eq(2)
+
+ expect(process_pipeline).to be_falsey
+ expect(builds.pending.count).to eq(2)
+ end
+ end
+
+ context 'custom stage with first job allowed to fail' do
+ before do
+ create_build('clean_job', stage_idx: 0, allow_failure: true)
+ create_build('test_job', stage_idx: 1, allow_failure: true)
+ end
+
+ it 'automatically triggers a next stage when build finishes', :sidekiq_inline do
+ expect(process_pipeline).to be_truthy
+ expect(builds_statuses).to eq ['pending']
+
+ fail_running_or_pending
+
+ expect(builds_statuses).to eq %w(failed pending)
+
+ fail_running_or_pending
+
+ expect(pipeline.reload).to be_success
+ end
+ end
+
+ context 'when optional manual actions are defined', :sidekiq_inline do
+ before do
+ create_build('build', stage_idx: 0)
+ create_build('test', stage_idx: 1)
+ create_build('test_failure', stage_idx: 2, when: 'on_failure')
+ create_build('deploy', stage_idx: 3)
+ create_build('production', stage_idx: 3, when: 'manual', allow_failure: true)
+ create_build('cleanup', stage_idx: 4, when: 'always')
+ create_build('clear:cache', stage_idx: 4, when: 'manual', allow_failure: true)
+ end
+
+ context 'when builds are successful' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production)
+ expect(builds_statuses).to eq %w(success success pending manual)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production cleanup clear:cache)
+ expect(builds_statuses).to eq %w(success success success manual pending manual)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success success success manual success manual)
+ expect(pipeline.reload.status).to eq 'success'
+ end
+ end
+
+ context 'when test job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure)
+ expect(builds_statuses).to eq %w(success failed pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success failed success success)
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+ end
+
+ context 'when test and test_failure jobs fail' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure)
+ expect(builds_statuses).to eq %w(success failed pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed failed pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed failed success)
+ expect(pipeline.reload.status).to eq('failed')
+ end
+ end
+
+ context 'when deploy job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production)
+ expect(builds_statuses).to eq %w(success success pending manual)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production cleanup)
+ expect(builds_statuses).to eq %w(success success failed manual pending)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success success failed manual success)
+ expect(pipeline.reload).to be_failed
+ end
+ end
+
+ context 'when build is canceled in the second stage' do
+ it 'does not schedule builds after build has been canceled' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds.running_or_pending).not_to be_empty
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ cancel_running_or_pending
+
+ expect(builds.running_or_pending).to be_empty
+ expect(builds_names).to eq %w[build test]
+ expect(builds_statuses).to eq %w[success canceled]
+ expect(pipeline.reload).to be_canceled
+ end
+ end
+
+ context 'when listing optional manual actions' do
+ it 'returns only for skipped builds' do
+ # currently all builds are created
+ expect(process_pipeline).to be_truthy
+ expect(manual_actions).to be_empty
+
+ # succeed stage build
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_empty
+
+ # succeed stage test
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_one # production
+
+ # succeed stage deploy
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_many # production and clear cache
+ end
+ end
+ end
+
+ context 'when delayed jobs are defined', :sidekiq_inline do
+ context 'when the scene is timed incremental rollout' do
+ before do
+ create_build('build', stage_idx: 0)
+ create_build('rollout10%', **delayed_options, stage_idx: 1)
+ create_build('rollout100%', **delayed_options, stage_idx: 2)
+ create_build('cleanup', stage_idx: 3)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ context 'when builds are successful' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ travel_to 2.minutes.from_now do
+ enqueue_scheduled('rollout10%')
+ end
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
+
+ travel_to 2.minutes.from_now do
+ enqueue_scheduled('rollout100%')
+ end
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'success' })
+ expect(pipeline.reload.status).to eq 'success'
+ end
+ end
+
+ context 'when build job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'failed' })
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+ end
+
+ context 'when rollout 10% is unscheduled' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ unschedule
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'manual' })
+ expect(pipeline.reload.status).to eq 'manual'
+ end
+
+ context 'when user plays rollout 10%' do
+ it 'schedules rollout100%' do
+ process_pipeline
+ succeed_pending
+ unschedule
+ play_manual_action('rollout10%')
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
+ expect(pipeline.reload.status).to eq 'scheduled'
+ end
+ end
+ end
+
+ context 'when rollout 10% fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ travel_to 2.minutes.from_now do
+ enqueue_scheduled('rollout10%')
+ end
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'failed' })
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+
+ context 'when user retries rollout 10%' do
+ it 'does not schedule rollout10% again' do
+ process_pipeline
+ succeed_pending
+ enqueue_scheduled('rollout10%')
+ fail_running_or_pending
+ retry_build('rollout10%')
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+ end
+
+ context 'when rollout 10% is played immidiately' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ play_manual_action('rollout10%')
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+ end
+
+ context 'when only one scheduled job exists in a pipeline' do
+ before do
+ create_build('delayed', **delayed_options, stage_idx: 0)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
+
+ expect(pipeline.reload.status).to eq 'scheduled'
+ end
+ end
+
+ context 'when there are two delayed jobs in a stage' do
+ before do
+ create_build('delayed1', **delayed_options, stage_idx: 0)
+ create_build('delayed2', **delayed_options, stage_idx: 0)
+ create_build('job', stage_idx: 1)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'blocks the stage until all scheduled jobs finished' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' })
+
+ travel_to 2.minutes.from_now do
+ enqueue_scheduled('delayed1')
+ end
+
+ expect(builds_names_and_statuses).to eq({ 'delayed1': 'pending', 'delayed2': 'scheduled' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+
+ context 'when a delayed job is allowed to fail' do
+ before do
+ create_build('delayed', **delayed_options, allow_failure: true, stage_idx: 0)
+ create_build('job', stage_idx: 1)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'blocks the stage and continues after it failed' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
+
+ travel_to 2.minutes.from_now do
+ enqueue_scheduled('delayed')
+ end
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'failed', 'job': 'pending' })
+ expect(pipeline.reload.status).to eq 'pending'
+ end
+ end
+ end
+
+ context 'when an exception is raised during a persistent ref creation' do
+ before do
+ successful_build('test', stage_idx: 0)
+
+ allow_next_instance_of(Ci::PersistentRef) do |instance|
+ allow(instance).to receive(:delete_refs) { raise ArgumentError }
+ end
+ end
+
+ it 'process the pipeline' do
+ expect { process_pipeline }.not_to raise_error
+ end
+ end
+
+ context 'when there are manual action in earlier stages' do
+ context 'when first stage has only optional manual actions' do
+ before do
+ create_build('build', stage_idx: 0, when: 'manual', allow_failure: true)
+ create_build('check', stage_idx: 1)
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'starts from the second stage' do
+ expect(all_builds_statuses).to eq %w[manual pending created]
+ end
+ end
+
+ context 'when second stage has only optional manual actions' do
+ before do
+ create_build('check', stage_idx: 0)
+ create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'skips second stage and continues on third stage', :sidekiq_inline do
+ expect(all_builds_statuses).to eq(%w[pending created created])
+
+ builds.first.success
+
+ expect(all_builds_statuses).to eq(%w[success manual pending])
+ end
+ end
+ end
+
+ context 'when there are only manual actions in stages' do
+ before do
+ create_build('image', stage_idx: 0, when: 'manual', allow_failure: true)
+ create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
+ create_build('deploy', stage_idx: 2, when: 'manual')
+ create_build('check', stage_idx: 3)
+
+ process_pipeline
+ end
+
+ it 'processes all jobs until blocking actions encountered' do
+ expect(all_builds_statuses).to eq(%w[manual manual manual created])
+ expect(all_builds_names).to eq(%w[image build deploy check])
+
+ expect(pipeline.reload).to be_blocked
+ end
+ end
+
+ context 'when there is only one manual action' do
+ before do
+ create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true)
+
+ process_pipeline
+ end
+
+ it 'skips the pipeline' do
+ expect(pipeline.reload).to be_skipped
+ end
+
+ context 'when the action was played' do
+ before do
+ play_manual_action('deploy')
+ end
+
+ it 'queues the action and pipeline', :sidekiq_inline do
+ expect(all_builds_statuses).to eq(%w[pending])
+
+ expect(pipeline.reload).to be_pending
+ end
+ end
+ end
+
+ context 'when blocking manual actions are defined', :sidekiq_inline do
+ before do
+ create_build('code:test', stage_idx: 0)
+ create_build('staging:deploy', stage_idx: 1, when: 'manual')
+ create_build('staging:test', stage_idx: 2, when: 'on_success')
+ create_build('production:deploy', stage_idx: 3, when: 'manual')
+ create_build('production:test', stage_idx: 4, when: 'always')
+ end
+
+ context 'when first stage succeeds' do
+ it 'blocks pipeline on stage with first manual action' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+ expect(pipeline.reload.status).to eq 'pending'
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy]
+ expect(builds_statuses).to eq %w[success manual]
+ expect(pipeline.reload).to be_manual
+ end
+ end
+
+ context 'when first stage fails' do
+ it 'does not take blocking action into account' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+ expect(pipeline.reload.status).to eq 'pending'
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w[code:test production:test]
+ expect(builds_statuses).to eq %w[failed pending]
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w[failed success]
+ expect(pipeline.reload).to be_failed
+ end
+ end
+
+ context 'when pipeline is promoted sequentially up to the end' do
+ before do
+ # Users need ability to merge into a branch in order to trigger
+ # protected manual actions.
+ #
+ create(:protected_branch, :developers_can_merge,
+ name: 'master', project: project)
+ end
+
+ it 'properly processes entire pipeline' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy]
+ expect(builds_statuses).to eq %w[success manual]
+ expect(pipeline.reload).to be_manual
+
+ play_manual_action('staging:deploy')
+
+ expect(builds_statuses).to eq %w[success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test]
+ expect(builds_statuses).to eq %w[success success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy]
+ expect(builds_statuses).to eq %w[success success success manual]
+
+ expect(pipeline.reload).to be_manual
+ expect(pipeline.reload).to be_blocked
+ expect(pipeline.reload).not_to be_active
+ expect(pipeline.reload).not_to be_complete
+
+ play_manual_action('production:deploy')
+
+ expect(builds_statuses).to eq %w[success success success pending]
+ expect(pipeline.reload).to be_running
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy production:test]
+ expect(builds_statuses).to eq %w[success success success success pending]
+ expect(pipeline.reload).to be_running
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy production:test]
+ expect(builds_statuses).to eq %w[success success success success success]
+ expect(pipeline.reload).to be_success
+ end
+ end
+ end
+
+ context 'when second stage has only on_failure jobs', :sidekiq_inline do
+ before do
+ create_build('check', stage_idx: 0)
+ create_build('build', stage_idx: 1, when: 'on_failure')
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'skips second stage and continues on third stage' do
+ expect(all_builds_statuses).to eq(%w[pending created created])
+
+ builds.first.success
+
+ expect(all_builds_statuses).to eq(%w[success skipped pending])
+ end
+ end
+
+ context 'when failed build in the middle stage is retried', :sidekiq_inline do
+ context 'when failed build is the only unsuccessful build in the stage' do
+ before do
+ create_build('build:1', stage_idx: 0)
+ create_build('build:2', stage_idx: 0)
+ create_build('test:1', stage_idx: 1)
+ create_build('test:2', stage_idx: 1)
+ create_build('deploy:1', stage_idx: 2)
+ create_build('deploy:2', stage_idx: 2)
+ end
+
+ it 'does trigger builds in the next stage' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build:1', 'build:2']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
+
+ pipeline.builds.find_by(name: 'test:1').success!
+ pipeline.builds.find_by(name: 'test:2').drop!
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
+
+ Ci::Build.retry(pipeline.builds.find_by(name: 'test:2'), user).reset.success!
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2',
+ 'test:2', 'deploy:1', 'deploy:2']
+ end
+ end
+ end
+
+ context 'when builds with auto-retries are configured', :sidekiq_inline do
+ before do
+ create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 })
+ create_build('test:1', stage_idx: 1, user: user, when: :on_failure)
+ create_build('test:2', stage_idx: 1, user: user, options: { script: 'aa', retry: 1 })
+ end
+
+ it 'automatically retries builds in a valid order' do
+ expect(process_pipeline).to be_truthy
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1]
+ expect(builds_statuses).to eq %w[failed pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1 test:2]
+ expect(builds_statuses).to eq %w[failed success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1 test:2]
+ expect(builds_statuses).to eq %w[failed success success]
+
+ expect(pipeline.reload).to be_success
+ end
+ end
+
+ context 'when pipeline with needs is created', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
+ let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
+ let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
+
+ let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
+ let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') }
+
+ let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') }
+ let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') }
+
+ it 'when linux:* finishes first it runs it out of order' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+
+ # we follow the single path of linux
+ linux_build.reset.success!
+
+ expect(stages).to eq(%w(running pending created))
+ expect(builds.success).to contain_exactly(linux_build)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
+
+ linux_rspec.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rubocop)
+
+ linux_rubocop.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop)
+ expect(builds.pending).to contain_exactly(mac_build)
+
+ mac_build.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success pending))
+ expect(builds.success).to contain_exactly(
+ linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+
+ context 'when one of the jobs is run on a failure' do
+ let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure', scheduling_type: :dag) }
+
+ let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
+
+ context 'when another job in build phase fails first' do
+ it 'does skip linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ mac_build.reset.drop!
+ linux_build.reset.success!
+
+ expect(linux_notify.reset).to be_skipped
+ end
+ end
+
+ context 'when linux:build job fails first' do
+ it 'does run linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ linux_build.reset.drop!
+
+ expect(linux_notify.reset).to be_pending
+ end
+ end
+ end
+
+ context 'when there is a job scheduled with dag but no need (needs: [])' do
+ let!(:deploy_pages) { create_build('deploy_pages', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
+
+ it 'runs deploy_pages without waiting prior stages' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created pending))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build, deploy_pages)
+
+ linux_build.reset.success!
+ deploy_pages.reset.success!
+
+ expect(stages).to eq(%w(running pending running))
+ expect(builds.success).to contain_exactly(linux_build, deploy_pages)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
+
+ linux_rspec.reset.success!
+ linux_rubocop.reset.success!
+ mac_build.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success running))
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+ end
+ end
+
+ context 'when a needed job is skipped', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
+ let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
+
+ before do
+ create(:ci_build_need, build: deploy, name: 'linux:build')
+ end
+
+ it 'skips the jobs depending on it' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(all_builds.pending).to contain_exactly(linux_build)
+
+ linux_build.reset.drop!
+
+ expect(stages).to eq(%w(failed skipped skipped))
+ expect(all_builds.failed).to contain_exactly(linux_build)
+ expect(all_builds.skipped).to contain_exactly(linux_rspec, deploy)
+ end
+ end
+
+ context 'when a needed job is manual', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0, when: 'manual', allow_failure: true) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 1, scheduling_type: :dag) }
+
+ before do
+ create(:ci_build_need, build: deploy, name: 'linux:build')
+ end
+
+ it 'makes deploy DAG to be skipped' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(skipped skipped))
+ expect(all_builds.manual).to contain_exactly(linux_build)
+ expect(all_builds.skipped).to contain_exactly(deploy)
+ end
+ end
+
+ context 'when a bridge job has parallel:matrix config', :sidekiq_inline do
+ let(:parent_config) do
+ <<-EOY
+ test:
+ stage: test
+ script: echo test
+
+ deploy:
+ stage: deploy
+ trigger:
+ include: .child.yml
+ parallel:
+ matrix:
+ - PROVIDER: ovh
+ STACK: [monitoring, app]
+ EOY
+ end
+
+ let(:child_config) do
+ <<-EOY
+ test:
+ stage: test
+ script: echo test
+ EOY
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ before do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository)
+ .to receive(:blob_data_at)
+ .with(an_instance_of(String), '.gitlab-ci.yml')
+ .and_return(parent_config)
+
+ allow(repository)
+ .to receive(:blob_data_at)
+ .with(an_instance_of(String), '.child.yml')
+ .and_return(child_config)
+ end
+ end
+
+ it 'creates pipeline with bridges, then passes the matrix variables to downstream jobs' do
+ expect(all_builds_names).to contain_exactly('test', 'deploy: [ovh, monitoring]', 'deploy: [ovh, app]')
+ expect(all_builds_statuses).to contain_exactly('pending', 'created', 'created')
+
+ succeed_pending
+
+ # bridge jobs directly transition to success
+ expect(all_builds_statuses).to contain_exactly('success', 'success', 'success')
+
+ bridge1 = all_builds.find_by(name: 'deploy: [ovh, monitoring]')
+ bridge2 = all_builds.find_by(name: 'deploy: [ovh, app]')
+
+ downstream_job1 = bridge1.downstream_pipeline.processables.first
+ downstream_job2 = bridge2.downstream_pipeline.processables.first
+
+ expect(downstream_job1.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'monitoring')
+ expect(downstream_job2.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'app')
+ end
+ end
+
+ context 'when a bridge job has invalid downstream project', :sidekiq_inline do
+ let(:config) do
+ <<-EOY
+ test:
+ stage: test
+ script: echo test
+
+ deploy:
+ stage: deploy
+ trigger:
+ project: invalid-project
+ EOY
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'creates a pipeline, then fails the bridge job' do
+ expect(all_builds_names).to contain_exactly('test', 'deploy')
+ expect(all_builds_statuses).to contain_exactly('pending', 'created')
+
+ succeed_pending
+
+ expect(all_builds_names).to contain_exactly('test', 'deploy')
+ expect(all_builds_statuses).to contain_exactly('success', 'failed')
+ end
+ end
+
+ private
+
+ def all_builds
+ pipeline.processables.order(:stage_idx, :id)
+ end
+
+ def builds
+ all_builds.where.not(status: [:created, :skipped])
+ end
+
+ def stages
+ pipeline.reset.stages.map(&:status)
+ end
+
+ def builds_names
+ builds.pluck(:name)
+ end
+
+ def builds_names_and_statuses
+ builds.each_with_object({}) do |b, h|
+ h[b.name.to_sym] = b.status
+ h
+ end
+ end
+
+ def all_builds_names
+ all_builds.pluck(:name)
+ end
+
+ def builds_statuses
+ builds.pluck(:status)
+ end
+
+ def all_builds_statuses
+ all_builds.pluck(:status)
+ end
+
+ def succeed_pending
+ builds.pending.each do |build|
+ build.reset.success
+ end
+ end
+
+ def succeed_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.success
+ end
+ end
+
+ def fail_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.drop
+ end
+ end
+
+ def cancel_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.cancel
+ end
+ end
+
+ def play_manual_action(name)
+ builds.find_by(name: name).play(user)
+ end
+
+ def enqueue_scheduled(name)
+ builds.scheduled.find_by(name: name).enqueue_scheduled
+ end
+
+ def retry_build(name)
+ Ci::Build.retry(builds.find_by(name: name), user)
+ end
+
+ def manual_actions
+ pipeline.manual_actions.reload
+ end
+
+ def create_build(name, **opts)
+ create(:ci_build, :created, pipeline: pipeline, name: name, **with_stage_opts(opts))
+ end
+
+ def successful_build(name, **opts)
+ create(:ci_build, :success, pipeline: pipeline, name: name, **with_stage_opts(opts))
+ end
+
+ def with_stage_opts(opts)
+ { stage: "stage-#{opts[:stage_idx].to_i}" }.merge(opts)
+ end
+
+ def delayed_options
+ { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } }
+ end
+
+ def unschedule
+ pipeline.builds.scheduled.map(&:unschedule)
+ end
+ end
private
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
deleted file mode 100644
index 8de9b308429..00000000000
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ /dev/null
@@ -1,1040 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'Pipeline Processing Service' do
- let(:project) { create(:project, :repository) }
- let(:user) { project.owner }
-
- let(:pipeline) do
- create(:ci_empty_pipeline, ref: 'master', project: project)
- end
-
- context 'when simple pipeline is defined' do
- before do
- create_build('linux', stage_idx: 0)
- create_build('mac', stage_idx: 0)
- create_build('rspec', stage_idx: 1)
- create_build('rubocop', stage_idx: 1)
- create_build('deploy', stage_idx: 2)
- end
-
- it 'processes a pipeline', :sidekiq_inline do
- expect(process_pipeline).to be_truthy
-
- succeed_pending
-
- expect(builds.success.count).to eq(2)
-
- succeed_pending
-
- expect(builds.success.count).to eq(4)
-
- succeed_pending
-
- expect(builds.success.count).to eq(5)
- end
-
- it 'does not process pipeline if existing stage is running' do
- expect(process_pipeline).to be_truthy
- expect(builds.pending.count).to eq(2)
-
- expect(process_pipeline).to be_falsey
- expect(builds.pending.count).to eq(2)
- end
- end
-
- context 'custom stage with first job allowed to fail' do
- before do
- create_build('clean_job', stage_idx: 0, allow_failure: true)
- create_build('test_job', stage_idx: 1, allow_failure: true)
- end
-
- it 'automatically triggers a next stage when build finishes', :sidekiq_inline do
- expect(process_pipeline).to be_truthy
- expect(builds_statuses).to eq ['pending']
-
- fail_running_or_pending
-
- expect(builds_statuses).to eq %w(failed pending)
-
- fail_running_or_pending
-
- expect(pipeline.reload).to be_success
- end
- end
-
- context 'when optional manual actions are defined', :sidekiq_inline do
- before do
- create_build('build', stage_idx: 0)
- create_build('test', stage_idx: 1)
- create_build('test_failure', stage_idx: 2, when: 'on_failure')
- create_build('deploy', stage_idx: 3)
- create_build('production', stage_idx: 3, when: 'manual', allow_failure: true)
- create_build('cleanup', stage_idx: 4, when: 'always')
- create_build('clear:cache', stage_idx: 4, when: 'manual', allow_failure: true)
- end
-
- context 'when builds are successful' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production)
- expect(builds_statuses).to eq %w(success success pending manual)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production cleanup clear:cache)
- expect(builds_statuses).to eq %w(success success success manual pending manual)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success success success manual success manual)
- expect(pipeline.reload.status).to eq 'success'
- end
- end
-
- context 'when test job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure)
- expect(builds_statuses).to eq %w(success failed pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed success pending)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success failed success success)
- expect(pipeline.reload.status).to eq 'failed'
- end
- end
-
- context 'when test and test_failure jobs fail' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure)
- expect(builds_statuses).to eq %w(success failed pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed failed pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed failed success)
- expect(pipeline.reload.status).to eq('failed')
- end
- end
-
- context 'when deploy job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production)
- expect(builds_statuses).to eq %w(success success pending manual)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production cleanup)
- expect(builds_statuses).to eq %w(success success failed manual pending)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success success failed manual success)
- expect(pipeline.reload).to be_failed
- end
- end
-
- context 'when build is canceled in the second stage' do
- it 'does not schedule builds after build has been canceled' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds.running_or_pending).not_to be_empty
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- cancel_running_or_pending
-
- expect(builds.running_or_pending).to be_empty
- expect(builds_names).to eq %w[build test]
- expect(builds_statuses).to eq %w[success canceled]
- expect(pipeline.reload).to be_canceled
- end
- end
-
- context 'when listing optional manual actions' do
- it 'returns only for skipped builds' do
- # currently all builds are created
- expect(process_pipeline).to be_truthy
- expect(manual_actions).to be_empty
-
- # succeed stage build
- succeed_running_or_pending
-
- expect(manual_actions).to be_empty
-
- # succeed stage test
- succeed_running_or_pending
-
- expect(manual_actions).to be_one # production
-
- # succeed stage deploy
- succeed_running_or_pending
-
- expect(manual_actions).to be_many # production and clear cache
- end
- end
- end
-
- context 'when delayed jobs are defined', :sidekiq_inline do
- context 'when the scene is timed incremental rollout' do
- before do
- create_build('build', stage_idx: 0)
- create_build('rollout10%', **delayed_options, stage_idx: 1)
- create_build('rollout100%', **delayed_options, stage_idx: 2)
- create_build('cleanup', stage_idx: 3)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- context 'when builds are successful' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- travel_to 2.minutes.from_now do
- enqueue_scheduled('rollout10%')
- end
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
-
- travel_to 2.minutes.from_now do
- enqueue_scheduled('rollout100%')
- end
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'success' })
- expect(pipeline.reload.status).to eq 'success'
- end
- end
-
- context 'when build job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'failed' })
- expect(pipeline.reload.status).to eq 'failed'
- end
- end
-
- context 'when rollout 10% is unscheduled' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- unschedule
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'manual' })
- expect(pipeline.reload.status).to eq 'manual'
- end
-
- context 'when user plays rollout 10%' do
- it 'schedules rollout100%' do
- process_pipeline
- succeed_pending
- unschedule
- play_manual_action('rollout10%')
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
- expect(pipeline.reload.status).to eq 'scheduled'
- end
- end
- end
-
- context 'when rollout 10% fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- travel_to 2.minutes.from_now do
- enqueue_scheduled('rollout10%')
- end
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'failed' })
- expect(pipeline.reload.status).to eq 'failed'
- end
-
- context 'when user retries rollout 10%' do
- it 'does not schedule rollout10% again' do
- process_pipeline
- succeed_pending
- enqueue_scheduled('rollout10%')
- fail_running_or_pending
- retry_build('rollout10%')
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
- end
-
- context 'when rollout 10% is played immidiately' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- play_manual_action('rollout10%')
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
- end
-
- context 'when only one scheduled job exists in a pipeline' do
- before do
- create_build('delayed', **delayed_options, stage_idx: 0)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
-
- expect(pipeline.reload.status).to eq 'scheduled'
- end
- end
-
- context 'when there are two delayed jobs in a stage' do
- before do
- create_build('delayed1', **delayed_options, stage_idx: 0)
- create_build('delayed2', **delayed_options, stage_idx: 0)
- create_build('job', stage_idx: 1)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'blocks the stage until all scheduled jobs finished' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' })
-
- travel_to 2.minutes.from_now do
- enqueue_scheduled('delayed1')
- end
-
- expect(builds_names_and_statuses).to eq({ 'delayed1': 'pending', 'delayed2': 'scheduled' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
-
- context 'when a delayed job is allowed to fail' do
- before do
- create_build('delayed', **delayed_options, allow_failure: true, stage_idx: 0)
- create_build('job', stage_idx: 1)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'blocks the stage and continues after it failed' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
-
- travel_to 2.minutes.from_now do
- enqueue_scheduled('delayed')
- end
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'delayed': 'failed', 'job': 'pending' })
- expect(pipeline.reload.status).to eq 'pending'
- end
- end
- end
-
- context 'when an exception is raised during a persistent ref creation' do
- before do
- successful_build('test', stage_idx: 0)
-
- allow_next_instance_of(Ci::PersistentRef) do |instance|
- allow(instance).to receive(:delete_refs) { raise ArgumentError }
- end
- end
-
- it 'process the pipeline' do
- expect { process_pipeline }.not_to raise_error
- end
- end
-
- context 'when there are manual action in earlier stages' do
- context 'when first stage has only optional manual actions' do
- before do
- create_build('build', stage_idx: 0, when: 'manual', allow_failure: true)
- create_build('check', stage_idx: 1)
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'starts from the second stage' do
- expect(all_builds_statuses).to eq %w[manual pending created]
- end
- end
-
- context 'when second stage has only optional manual actions' do
- before do
- create_build('check', stage_idx: 0)
- create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'skips second stage and continues on third stage', :sidekiq_inline do
- expect(all_builds_statuses).to eq(%w[pending created created])
-
- builds.first.success
-
- expect(all_builds_statuses).to eq(%w[success manual pending])
- end
- end
- end
-
- context 'when there are only manual actions in stages' do
- before do
- create_build('image', stage_idx: 0, when: 'manual', allow_failure: true)
- create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
- create_build('deploy', stage_idx: 2, when: 'manual')
- create_build('check', stage_idx: 3)
-
- process_pipeline
- end
-
- it 'processes all jobs until blocking actions encountered' do
- expect(all_builds_statuses).to eq(%w[manual manual manual created])
- expect(all_builds_names).to eq(%w[image build deploy check])
-
- expect(pipeline.reload).to be_blocked
- end
- end
-
- context 'when there is only one manual action' do
- before do
- create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true)
-
- process_pipeline
- end
-
- it 'skips the pipeline' do
- expect(pipeline.reload).to be_skipped
- end
-
- context 'when the action was played' do
- before do
- play_manual_action('deploy')
- end
-
- it 'queues the action and pipeline', :sidekiq_inline do
- expect(all_builds_statuses).to eq(%w[pending])
-
- expect(pipeline.reload).to be_pending
- end
- end
- end
-
- context 'when blocking manual actions are defined', :sidekiq_inline do
- before do
- create_build('code:test', stage_idx: 0)
- create_build('staging:deploy', stage_idx: 1, when: 'manual')
- create_build('staging:test', stage_idx: 2, when: 'on_success')
- create_build('production:deploy', stage_idx: 3, when: 'manual')
- create_build('production:test', stage_idx: 4, when: 'always')
- end
-
- context 'when first stage succeeds' do
- it 'blocks pipeline on stage with first manual action' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
- expect(pipeline.reload.status).to eq 'pending'
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy]
- expect(builds_statuses).to eq %w[success manual]
- expect(pipeline.reload).to be_manual
- end
- end
-
- context 'when first stage fails' do
- it 'does not take blocking action into account' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
- expect(pipeline.reload.status).to eq 'pending'
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w[code:test production:test]
- expect(builds_statuses).to eq %w[failed pending]
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w[failed success]
- expect(pipeline.reload).to be_failed
- end
- end
-
- context 'when pipeline is promoted sequentially up to the end' do
- before do
- # Users need ability to merge into a branch in order to trigger
- # protected manual actions.
- #
- create(:protected_branch, :developers_can_merge,
- name: 'master', project: project)
- end
-
- it 'properly processes entire pipeline' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy]
- expect(builds_statuses).to eq %w[success manual]
- expect(pipeline.reload).to be_manual
-
- play_manual_action('staging:deploy')
-
- expect(builds_statuses).to eq %w[success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test]
- expect(builds_statuses).to eq %w[success success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy]
- expect(builds_statuses).to eq %w[success success success manual]
-
- expect(pipeline.reload).to be_manual
- expect(pipeline.reload).to be_blocked
- expect(pipeline.reload).not_to be_active
- expect(pipeline.reload).not_to be_complete
-
- play_manual_action('production:deploy')
-
- expect(builds_statuses).to eq %w[success success success pending]
- expect(pipeline.reload).to be_running
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy production:test]
- expect(builds_statuses).to eq %w[success success success success pending]
- expect(pipeline.reload).to be_running
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy production:test]
- expect(builds_statuses).to eq %w[success success success success success]
- expect(pipeline.reload).to be_success
- end
- end
- end
-
- context 'when second stage has only on_failure jobs', :sidekiq_inline do
- before do
- create_build('check', stage_idx: 0)
- create_build('build', stage_idx: 1, when: 'on_failure')
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'skips second stage and continues on third stage' do
- expect(all_builds_statuses).to eq(%w[pending created created])
-
- builds.first.success
-
- expect(all_builds_statuses).to eq(%w[success skipped pending])
- end
- end
-
- context 'when failed build in the middle stage is retried', :sidekiq_inline do
- context 'when failed build is the only unsuccessful build in the stage' do
- before do
- create_build('build:1', stage_idx: 0)
- create_build('build:2', stage_idx: 0)
- create_build('test:1', stage_idx: 1)
- create_build('test:2', stage_idx: 1)
- create_build('deploy:1', stage_idx: 2)
- create_build('deploy:2', stage_idx: 2)
- end
-
- it 'does trigger builds in the next stage' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build:1', 'build:2']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
-
- pipeline.builds.find_by(name: 'test:1').success!
- pipeline.builds.find_by(name: 'test:2').drop!
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
-
- Ci::Build.retry(pipeline.builds.find_by(name: 'test:2'), user).reset.success!
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2',
- 'test:2', 'deploy:1', 'deploy:2']
- end
- end
- end
-
- context 'when builds with auto-retries are configured', :sidekiq_inline do
- before do
- create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 })
- create_build('test:1', stage_idx: 1, user: user, when: :on_failure)
- create_build('test:2', stage_idx: 1, user: user, options: { script: 'aa', retry: 1 })
- end
-
- it 'automatically retries builds in a valid order' do
- expect(process_pipeline).to be_truthy
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1]
- expect(builds_statuses).to eq %w[failed pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1 test:2]
- expect(builds_statuses).to eq %w[failed success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1 test:2]
- expect(builds_statuses).to eq %w[failed success success]
-
- expect(pipeline.reload).to be_success
- end
- end
-
- context 'when pipeline with needs is created', :sidekiq_inline do
- let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
- let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
- let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
- let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
- let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
- let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
- let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
-
- let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
- let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') }
-
- let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') }
- let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') }
-
- it 'when linux:* finishes first it runs it out of order' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(pending created created))
- expect(builds.pending).to contain_exactly(linux_build, mac_build)
-
- # we follow the single path of linux
- linux_build.reset.success!
-
- expect(stages).to eq(%w(running pending created))
- expect(builds.success).to contain_exactly(linux_build)
- expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
-
- linux_rspec.reset.success!
-
- expect(stages).to eq(%w(running running created))
- expect(builds.success).to contain_exactly(linux_build, linux_rspec)
- expect(builds.pending).to contain_exactly(mac_build, linux_rubocop)
-
- linux_rubocop.reset.success!
-
- expect(stages).to eq(%w(running running created))
- expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop)
- expect(builds.pending).to contain_exactly(mac_build)
-
- mac_build.reset.success!
- mac_rspec.reset.success!
- mac_rubocop.reset.success!
-
- expect(stages).to eq(%w(success success pending))
- expect(builds.success).to contain_exactly(
- linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
- expect(builds.pending).to contain_exactly(deploy)
- end
-
- context 'when one of the jobs is run on a failure' do
- let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure', scheduling_type: :dag) }
-
- let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
-
- context 'when another job in build phase fails first' do
- it 'does skip linux:notify' do
- expect(process_pipeline).to be_truthy
-
- mac_build.reset.drop!
- linux_build.reset.success!
-
- expect(linux_notify.reset).to be_skipped
- end
- end
-
- context 'when linux:build job fails first' do
- it 'does run linux:notify' do
- expect(process_pipeline).to be_truthy
-
- linux_build.reset.drop!
-
- expect(linux_notify.reset).to be_pending
- end
- end
- end
-
- context 'when there is a job scheduled with dag but no need (needs: [])' do
- let!(:deploy_pages) { create_build('deploy_pages', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
-
- it 'runs deploy_pages without waiting prior stages' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(pending created pending))
- expect(builds.pending).to contain_exactly(linux_build, mac_build, deploy_pages)
-
- linux_build.reset.success!
- deploy_pages.reset.success!
-
- expect(stages).to eq(%w(running pending running))
- expect(builds.success).to contain_exactly(linux_build, deploy_pages)
- expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
-
- linux_rspec.reset.success!
- linux_rubocop.reset.success!
- mac_build.reset.success!
- mac_rspec.reset.success!
- mac_rubocop.reset.success!
-
- expect(stages).to eq(%w(success success running))
- expect(builds.pending).to contain_exactly(deploy)
- end
- end
- end
-
- context 'when a needed job is skipped', :sidekiq_inline do
- let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
- let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
- let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
-
- before do
- create(:ci_build_need, build: deploy, name: 'linux:build')
- end
-
- it 'skips the jobs depending on it' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(pending created created))
- expect(all_builds.pending).to contain_exactly(linux_build)
-
- linux_build.reset.drop!
-
- expect(stages).to eq(%w(failed skipped skipped))
- expect(all_builds.failed).to contain_exactly(linux_build)
- expect(all_builds.skipped).to contain_exactly(linux_rspec, deploy)
- end
- end
-
- context 'when a needed job is manual', :sidekiq_inline do
- let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0, when: 'manual', allow_failure: true) }
- let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 1, scheduling_type: :dag) }
-
- before do
- create(:ci_build_need, build: deploy, name: 'linux:build')
- end
-
- it 'makes deploy DAG to be skipped' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(skipped skipped))
- expect(all_builds.manual).to contain_exactly(linux_build)
- expect(all_builds.skipped).to contain_exactly(deploy)
- end
- end
-
- context 'when a bridge job has parallel:matrix config', :sidekiq_inline do
- let(:parent_config) do
- <<-EOY
- test:
- stage: test
- script: echo test
-
- deploy:
- stage: deploy
- trigger:
- include: .child.yml
- parallel:
- matrix:
- - PROVIDER: ovh
- STACK: [monitoring, app]
- EOY
- end
-
- let(:child_config) do
- <<-EOY
- test:
- stage: test
- script: echo test
- EOY
- end
-
- let(:pipeline) do
- Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
- end
-
- before do
- allow_next_instance_of(Repository) do |repository|
- allow(repository)
- .to receive(:blob_data_at)
- .with(an_instance_of(String), '.gitlab-ci.yml')
- .and_return(parent_config)
-
- allow(repository)
- .to receive(:blob_data_at)
- .with(an_instance_of(String), '.child.yml')
- .and_return(child_config)
- end
- end
-
- it 'creates pipeline with bridges, then passes the matrix variables to downstream jobs' do
- expect(all_builds_names).to contain_exactly('test', 'deploy: [ovh, monitoring]', 'deploy: [ovh, app]')
- expect(all_builds_statuses).to contain_exactly('pending', 'created', 'created')
-
- succeed_pending
-
- # bridge jobs directly transition to success
- expect(all_builds_statuses).to contain_exactly('success', 'success', 'success')
-
- bridge1 = all_builds.find_by(name: 'deploy: [ovh, monitoring]')
- bridge2 = all_builds.find_by(name: 'deploy: [ovh, app]')
-
- downstream_job1 = bridge1.downstream_pipeline.processables.first
- downstream_job2 = bridge2.downstream_pipeline.processables.first
-
- expect(downstream_job1.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'monitoring')
- expect(downstream_job2.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'app')
- end
- end
-
- context 'when a bridge job has invalid downstream project', :sidekiq_inline do
- let(:config) do
- <<-EOY
- test:
- stage: test
- script: echo test
-
- deploy:
- stage: deploy
- trigger:
- project: invalid-project
- EOY
- end
-
- let(:pipeline) do
- Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
- end
-
- before do
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'creates a pipeline, then fails the bridge job' do
- expect(all_builds_names).to contain_exactly('test', 'deploy')
- expect(all_builds_statuses).to contain_exactly('pending', 'created')
-
- succeed_pending
-
- expect(all_builds_names).to contain_exactly('test', 'deploy')
- expect(all_builds_statuses).to contain_exactly('success', 'failed')
- end
- end
-
- private
-
- def all_builds
- pipeline.processables.order(:stage_idx, :id)
- end
-
- def builds
- all_builds.where.not(status: [:created, :skipped])
- end
-
- def stages
- pipeline.reset.stages.map(&:status)
- end
-
- def builds_names
- builds.pluck(:name)
- end
-
- def builds_names_and_statuses
- builds.each_with_object({}) do |b, h|
- h[b.name.to_sym] = b.status
- h
- end
- end
-
- def all_builds_names
- all_builds.pluck(:name)
- end
-
- def builds_statuses
- builds.pluck(:status)
- end
-
- def all_builds_statuses
- all_builds.pluck(:status)
- end
-
- def succeed_pending
- builds.pending.each do |build|
- build.reset.success
- end
- end
-
- def succeed_running_or_pending
- pipeline.builds.running_or_pending.each do |build|
- build.reset.success
- end
- end
-
- def fail_running_or_pending
- pipeline.builds.running_or_pending.each do |build|
- build.reset.drop
- end
- end
-
- def cancel_running_or_pending
- pipeline.builds.running_or_pending.each do |build|
- build.reset.cancel
- end
- end
-
- def play_manual_action(name)
- builds.find_by(name: name).play(user)
- end
-
- def enqueue_scheduled(name)
- builds.scheduled.find_by(name: name).enqueue_scheduled
- end
-
- def retry_build(name)
- Ci::Build.retry(builds.find_by(name: name), user)
- end
-
- def manual_actions
- pipeline.manual_actions.reload
- end
-
- def create_build(name, **opts)
- create(:ci_build, :created, pipeline: pipeline, name: name, **with_stage_opts(opts))
- end
-
- def successful_build(name, **opts)
- create(:ci_build, :success, pipeline: pipeline, name: name, **with_stage_opts(opts))
- end
-
- def with_stage_opts(opts)
- { stage: "stage-#{opts[:stage_idx].to_i}" }.merge(opts)
- end
-
- def delayed_options
- { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } }
- end
-
- def unschedule
- pipeline.builds.scheduled.map(&:unschedule)
- end
-end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
deleted file mode 100644
index b4ad2512593..00000000000
--- a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_context 'Pipeline Processing Service Tests With Yaml' do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { project.owner }
-
- where(:test_file_path) do
- Dir.glob(Rails.root.join('spec/services/ci/pipeline_processing/test_cases/*.yml'))
- end
-
- with_them do
- let(:test_file) { YAML.load_file(test_file_path) }
- let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline).payload }
-
- before do
- stub_ci_pipeline_yaml_file(YAML.dump(test_file['config']))
- end
-
- it 'follows transitions' do
- expect(pipeline).to be_persisted
- Sidekiq::Worker.drain_all # ensure that all async jobs are executed
- check_expectation(test_file.dig('init', 'expect'), "init")
-
- test_file['transitions'].each_with_index do |transition, idx|
- event_on_jobs(transition['event'], transition['jobs'])
- Sidekiq::Worker.drain_all # ensure that all async jobs are executed
- check_expectation(transition['expect'], "transition:#{idx}")
- end
- end
-
- private
-
- def check_expectation(expectation, message)
- expect(current_state.deep_stringify_keys).to eq(expectation), message
- end
-
- def current_state
- # reload pipeline and all relations
- pipeline.reload
-
- {
- pipeline: pipeline.status,
- stages: pipeline.stages.pluck(:name, :status).to_h,
- jobs: pipeline.latest_statuses.pluck(:name, :status).to_h
- }
- end
-
- def event_on_jobs(event, job_names)
- statuses = pipeline.latest_statuses.by_name(job_names).to_a
- expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts
-
- statuses.each do |status|
- if event == 'play'
- status.play(user)
- else
- status.public_send("#{event}!")
- end
- end
- end
- end
-end
diff --git a/spec/services/ci/play_build_service_spec.rb b/spec/services/ci/play_build_service_spec.rb
index babd601e0cf..34f77260334 100644
--- a/spec/services/ci/play_build_service_spec.rb
+++ b/spec/services/ci/play_build_service_spec.rb
@@ -79,12 +79,22 @@ RSpec.describe Ci::PlayBuildService, '#execute' do
{ key: 'second', secret_value: 'second' }]
end
+ subject { service.execute(build, job_variables) }
+
it 'assigns the variables to the build' do
- service.execute(build, job_variables)
+ subject
expect(build.reload.job_variables.map(&:key)).to contain_exactly('first', 'second')
end
+ context 'when variables are invalid' do
+ let(:job_variables) { [{}] }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
context 'when user defined variables are restricted' do
before do
project.update!(restrict_user_defined_variables: true)
@@ -96,7 +106,7 @@ RSpec.describe Ci::PlayBuildService, '#execute' do
end
it 'assigns the variables to the build' do
- service.execute(build, job_variables)
+ subject
expect(build.reload.job_variables.map(&:key)).to contain_exactly('first', 'second')
end
@@ -104,8 +114,7 @@ RSpec.describe Ci::PlayBuildService, '#execute' do
context 'when user is developer' do
it 'raises an error' do
- expect { service.execute(build, job_variables) }
- .to raise_error Gitlab::Access::AccessDeniedError
+ expect { subject }.to raise_error Gitlab::Access::AccessDeniedError
end
end
end
diff --git a/spec/services/ci/process_sync_events_service_spec.rb b/spec/services/ci/process_sync_events_service_spec.rb
new file mode 100644
index 00000000000..00b670ff54f
--- /dev/null
+++ b/spec/services/ci/process_sync_events_service_spec.rb
@@ -0,0 +1,129 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ProcessSyncEventsService do
+ let!(:group) { create(:group) }
+ let!(:project1) { create(:project, group: group) }
+ let!(:project2) { create(:project, group: group) }
+ let!(:parent_group_1) { create(:group) }
+ let!(:parent_group_2) { create(:group) }
+
+ subject(:service) { described_class.new(sync_event_class, hierarchy_class) }
+
+ describe '#perform' do
+ subject(:execute) { service.execute }
+
+ context 'for Projects::SyncEvent' do
+ let(:sync_event_class) { Projects::SyncEvent }
+ let(:hierarchy_class) { ::Ci::ProjectMirror }
+
+ before do
+ Projects::SyncEvent.delete_all
+
+ project1.update!(group: parent_group_1)
+ project2.update!(group: parent_group_2)
+ end
+
+ it 'consumes events' do
+ expect { execute }.to change(Projects::SyncEvent, :count).from(2).to(0)
+
+ expect(project1.ci_project_mirror).to have_attributes(
+ namespace_id: parent_group_1.id
+ )
+ expect(project2.ci_project_mirror).to have_attributes(
+ namespace_id: parent_group_2.id
+ )
+ end
+
+ it 'enqueues Projects::ProcessSyncEventsWorker if any left' do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ expect(Projects::ProcessSyncEventsWorker).to receive(:perform_async)
+
+ execute
+ end
+
+ it 'does not enqueue Projects::ProcessSyncEventsWorker if no left' do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ expect(Projects::ProcessSyncEventsWorker).not_to receive(:perform_async)
+
+ execute
+ end
+
+ context 'when there is no event' do
+ before do
+ Projects::SyncEvent.delete_all
+ end
+
+ it 'does nothing' do
+ expect { execute }.not_to change(Projects::SyncEvent, :count)
+ end
+ end
+
+ context 'when the FF ci_namespace_project_mirrors is disabled' do
+ before do
+ stub_feature_flags(ci_namespace_project_mirrors: false)
+ end
+
+ it 'does nothing' do
+ expect { execute }.not_to change(Projects::SyncEvent, :count)
+ end
+ end
+ end
+
+ context 'for Namespaces::SyncEvent' do
+ let(:sync_event_class) { Namespaces::SyncEvent }
+ let(:hierarchy_class) { ::Ci::NamespaceMirror }
+
+ before do
+ Namespaces::SyncEvent.delete_all
+
+ group.update!(parent: parent_group_2)
+ parent_group_2.update!(parent: parent_group_1)
+ end
+
+ shared_examples 'event consuming' do
+ it 'consumes events' do
+ expect { execute }.to change(Namespaces::SyncEvent, :count).from(2).to(0)
+
+ expect(group.ci_namespace_mirror).to have_attributes(
+ traversal_ids: [parent_group_1.id, parent_group_2.id, group.id]
+ )
+ expect(parent_group_2.ci_namespace_mirror).to have_attributes(
+ traversal_ids: [parent_group_1.id, parent_group_2.id]
+ )
+ end
+ end
+
+ context 'when the FFs sync_traversal_ids, use_traversal_ids and use_traversal_ids_for_ancestors are disabled' do
+ before do
+ stub_feature_flags(sync_traversal_ids: false,
+ use_traversal_ids: false,
+ use_traversal_ids_for_ancestors: false)
+ end
+
+ it_behaves_like 'event consuming'
+ end
+
+ it_behaves_like 'event consuming'
+
+ it 'enqueues Namespaces::ProcessSyncEventsWorker if any left' do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ expect(Namespaces::ProcessSyncEventsWorker).to receive(:perform_async)
+
+ execute
+ end
+
+ it 'does not enqueue Namespaces::ProcessSyncEventsWorker if no left' do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ expect(Namespaces::ProcessSyncEventsWorker).not_to receive(:perform_async)
+
+ execute
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 650353eb751..866015aa523 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -87,36 +87,10 @@ module Ci
end
context 'for specific runner' do
- context 'with tables decoupling disabled' do
- before do
- stub_feature_flags(
- ci_pending_builds_project_runners_decoupling: false,
- ci_queueing_builds_enabled_checks: false)
- end
-
- around do |example|
- allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
- example.run
- end
- end
-
- it 'does not pick a build' do
- expect(execute(specific_runner)).to be_nil
- end
- end
-
- context 'with tables decoupling enabled' do
- before do
- stub_feature_flags(
- ci_pending_builds_project_runners_decoupling: true,
- ci_queueing_builds_enabled_checks: true)
- end
-
- it 'does not pick a build' do
- expect(execute(specific_runner)).to be_nil
- expect(pending_job.reload).to be_failed
- expect(pending_job.queuing_entry).to be_nil
- end
+ it 'does not pick a build' do
+ expect(execute(specific_runner)).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job.queuing_entry).to be_nil
end
end
end
@@ -272,34 +246,10 @@ module Ci
context 'and uses project runner' do
let(:build) { execute(specific_runner) }
- context 'with tables decoupling disabled' do
- before do
- stub_feature_flags(
- ci_pending_builds_project_runners_decoupling: false,
- ci_queueing_builds_enabled_checks: false)
- end
-
- around do |example|
- allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
- example.run
- end
- end
-
- it { expect(build).to be_nil }
- end
-
- context 'with tables decoupling enabled' do
- before do
- stub_feature_flags(
- ci_pending_builds_project_runners_decoupling: true,
- ci_queueing_builds_enabled_checks: true)
- end
-
- it 'does not pick a build' do
- expect(build).to be_nil
- expect(pending_job.reload).to be_failed
- expect(pending_job.queuing_entry).to be_nil
- end
+ it 'does not pick a build' do
+ expect(build).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job.queuing_entry).to be_nil
end
end
end
@@ -790,17 +740,17 @@ module Ci
stub_feature_flags(ci_pending_builds_queue_source: true)
end
- context 'with ci_queueing_denormalize_shared_runners_information enabled' do
+ context 'with ci_queuing_use_denormalized_data_strategy enabled' do
before do
- stub_feature_flags(ci_queueing_denormalize_shared_runners_information: true)
+ stub_feature_flags(ci_queuing_use_denormalized_data_strategy: true)
end
include_examples 'handles runner assignment'
end
- context 'with ci_queueing_denormalize_shared_runners_information disabled' do
+ context 'with ci_queuing_use_denormalized_data_strategy disabled' do
before do
- stub_feature_flags(ci_queueing_denormalize_shared_runners_information: false)
+ stub_feature_flags(ci_queuing_use_denormalized_data_strategy: false)
end
around do |example|
@@ -812,37 +762,9 @@ module Ci
include_examples 'handles runner assignment'
end
- context 'with ci_queueing_denormalize_tags_information enabled' do
- before do
- stub_feature_flags(ci_queueing_denormalize_tags_information: true)
- end
-
- include_examples 'handles runner assignment'
- end
-
- context 'with ci_queueing_denormalize_tags_information disabled' do
+ context 'with ci_queuing_use_denormalized_data_strategy enabled' do
before do
- stub_feature_flags(ci_queueing_denormalize_tags_information: false)
- end
-
- around do |example|
- allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
- example.run
- end
- end
-
- include_examples 'handles runner assignment'
- end
-
- context 'with ci_queueing_denormalize_namespace_traversal_ids disabled' do
- before do
- stub_feature_flags(ci_queueing_denormalize_namespace_traversal_ids: false)
- end
-
- around do |example|
- allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/332952') do
- example.run
- end
+ stub_feature_flags(ci_queuing_use_denormalized_data_strategy: true)
end
include_examples 'handles runner assignment'
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 16635c64434..5d56084faa8 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -73,6 +73,8 @@ RSpec.describe Ci::RetryBuildService do
scheduled_at: 10.seconds.since)
end
+ let_it_be(:internal_job_variable) { create(:ci_job_variable, job: build) }
+
before_all do
# Make sure that build has both `stage_id` and `stage` because FactoryBot
# can reset one of the fields when assigning another. We plan to deprecate
@@ -86,7 +88,7 @@ RSpec.describe Ci::RetryBuildService do
file_type: file_type, job: build, expire_at: build.artifacts_expire_at)
end
- create(:ci_job_variable, job: build)
+ create(:ci_job_variable, :dotenv_source, job: build)
create(:ci_build_need, build: build)
create(:terraform_state_version, build: build)
end
@@ -125,6 +127,11 @@ RSpec.describe Ci::RetryBuildService do
expect(new_build.needs_attributes).to match(build.needs_attributes)
expect(new_build.needs).not_to match(build.needs)
end
+
+ it 'clones only internal job variables' do
+ expect(new_build.job_variables.count).to eq(1)
+ expect(new_build.job_variables).to contain_exactly(having_attributes(key: internal_job_variable.key, value: internal_job_variable.value))
+ end
end
describe 'reject accessors' do
@@ -147,7 +154,7 @@ RSpec.describe Ci::RetryBuildService do
Ci::Build.attribute_names.map(&:to_sym) +
Ci::Build.attribute_aliases.keys.map(&:to_sym) +
Ci::Build.reflect_on_all_associations.map(&:name) +
- [:tag_list, :needs_attributes] -
+ [:tag_list, :needs_attributes, :job_variables_attributes] -
# ee-specific accessors should be tested in ee/spec/services/ci/retry_build_service_spec.rb instead
described_class.extra_accessors -
[:dast_site_profiles_build, :dast_scanner_profiles_build] # join tables
@@ -310,7 +317,7 @@ RSpec.describe Ci::RetryBuildService do
expect(build).to be_processed
end
- context 'when build with deployment is retried' do
+ shared_examples_for 'when build with deployment is retried' do
let!(:build) do
create(:ci_build, :with_deployment, :deploy_to_production,
pipeline: pipeline, stage_id: stage.id, project: project)
@@ -329,7 +336,7 @@ RSpec.describe Ci::RetryBuildService do
end
end
- context 'when build with dynamic environment is retried' do
+ shared_examples_for 'when build with dynamic environment is retried' do
let_it_be(:other_developer) { create(:user).tap { |u| project.add_developer(other_developer) } }
let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' }
@@ -356,6 +363,18 @@ RSpec.describe Ci::RetryBuildService do
end
end
+ it_behaves_like 'when build with deployment is retried'
+ it_behaves_like 'when build with dynamic environment is retried'
+
+ context 'when create_deployment_in_separate_transaction feature flag is disabled' do
+ before do
+ stub_feature_flags(create_deployment_in_separate_transaction: false)
+ end
+
+ it_behaves_like 'when build with deployment is retried'
+ it_behaves_like 'when build with dynamic environment is retried'
+ end
+
context 'when build has needs' do
before do
create(:ci_build_need, build: build, name: 'build1')
diff --git a/spec/services/ci/stuck_builds/drop_pending_service_spec.rb b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
index aa0526edf57..ebc57af77a0 100644
--- a/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
+++ b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
@@ -3,8 +3,12 @@
require 'spec_helper'
RSpec.describe Ci::StuckBuilds::DropPendingService do
- let!(:runner) { create :ci_runner }
- let!(:job) { create :ci_build, runner: runner }
+ let_it_be(:runner) { create(:ci_runner) }
+ let_it_be(:pipeline) { create(:ci_empty_pipeline) }
+ let_it_be_with_reload(:job) do
+ create(:ci_build, pipeline: pipeline, runner: runner)
+ end
+
let(:created_at) { }
let(:updated_at) { }
@@ -14,6 +18,8 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do
job_attributes = { status: status }
job_attributes[:created_at] = created_at if created_at
job_attributes[:updated_at] = updated_at if updated_at
+ job_attributes.compact!
+
job.update!(job_attributes)
end
@@ -41,12 +47,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do
it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
end
-
- context 'when created_at is outside lookback window' do
- let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
-
- it_behaves_like 'job is unchanged'
- end
end
context 'when job was updated less than 1 day ago' do
@@ -63,12 +63,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do
it_behaves_like 'job is unchanged'
end
-
- context 'when created_at is outside lookback window' do
- let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
-
- it_behaves_like 'job is unchanged'
- end
end
context 'when job was updated more than 1 hour ago' do
@@ -85,12 +79,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do
it_behaves_like 'job is unchanged'
end
-
- context 'when created_at is outside lookback window' do
- let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
-
- it_behaves_like 'job is unchanged'
- end
end
end
@@ -115,12 +103,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do
it_behaves_like 'job is dropped with failure reason', 'stuck_or_timeout_failure'
end
-
- context 'when created_at is outside lookback window' do
- let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
-
- it_behaves_like 'job is unchanged'
- end
end
context 'when job was updated in less than 1 hour ago' do
@@ -137,12 +119,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do
it_behaves_like 'job is unchanged'
end
-
- context 'when created_at is outside lookback window' do
- let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
-
- it_behaves_like 'job is unchanged'
- end
end
end
end
@@ -179,12 +155,6 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do
it_behaves_like 'job is unchanged'
end
-
- context 'when created_at is outside lookback window' do
- let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
-
- it_behaves_like 'job is unchanged'
- end
end
end
diff --git a/spec/services/ci/update_pending_build_service_spec.rb b/spec/services/ci/update_pending_build_service_spec.rb
index d36564938c8..2bb0aded24a 100644
--- a/spec/services/ci/update_pending_build_service_spec.rb
+++ b/spec/services/ci/update_pending_build_service_spec.rb
@@ -43,9 +43,9 @@ RSpec.describe Ci::UpdatePendingBuildService do
expect(pending_build_2.instance_runners_enabled).to be_truthy
end
- context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do
+ context 'when ci_pending_builds_maintain_denormalized_data is disabled' do
before do
- stub_feature_flags(ci_pending_builds_maintain_shared_runners_data: false)
+ stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false)
end
it 'does not update all pending builds', :aggregate_failures do
@@ -67,9 +67,9 @@ RSpec.describe Ci::UpdatePendingBuildService do
expect(pending_build_2.instance_runners_enabled).to be_truthy
end
- context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do
+ context 'when ci_pending_builds_maintain_denormalized_data is disabled' do
before do
- stub_feature_flags(ci_pending_builds_maintain_shared_runners_data: false)
+ stub_feature_flags(ci_pending_builds_maintain_denormalized_data: false)
end
it 'does not update all pending builds', :aggregate_failures do
diff --git a/spec/services/clusters/agent_tokens/create_service_spec.rb b/spec/services/clusters/agent_tokens/create_service_spec.rb
index 92629af06c8..dc7abd1504b 100644
--- a/spec/services/clusters/agent_tokens/create_service_spec.rb
+++ b/spec/services/clusters/agent_tokens/create_service_spec.rb
@@ -47,6 +47,21 @@ RSpec.describe Clusters::AgentTokens::CreateService do
expect(token.name).to eq(params[:name])
end
+ it 'creates an activity event' do
+ expect { subject }.to change { ::Clusters::Agents::ActivityEvent.count }.by(1)
+
+ token = subject.payload[:token].reload
+ event = cluster_agent.activity_events.last
+
+ expect(event).to have_attributes(
+ kind: 'token_created',
+ level: 'info',
+ recorded_at: token.created_at,
+ user: token.created_by_user,
+ agent_token: token
+ )
+ end
+
context 'when params are invalid' do
let(:params) { { agent_id: 'bad_id' } }
@@ -54,6 +69,10 @@ RSpec.describe Clusters::AgentTokens::CreateService do
expect { subject }.not_to change(::Clusters::AgentToken, :count)
end
+ it 'does not create an activity event' do
+ expect { subject }.not_to change { ::Clusters::Agents::ActivityEvent.count }
+ end
+
it 'returns validation errors', :aggregate_failures do
expect(subject.status).to eq(:error)
expect(subject.message).to eq(["Agent must exist", "Name can't be blank"])
diff --git a/spec/services/clusters/cleanup/project_namespace_service_spec.rb b/spec/services/clusters/cleanup/project_namespace_service_spec.rb
index ec510b2e3c5..8d3ae217a9f 100644
--- a/spec/services/clusters/cleanup/project_namespace_service_spec.rb
+++ b/spec/services/clusters/cleanup/project_namespace_service_spec.rb
@@ -95,5 +95,31 @@ RSpec.describe Clusters::Cleanup::ProjectNamespaceService do
subject
end
end
+
+ context 'when there is a Kubeclient::HttpError' do
+ let(:kubeclient_instance_double) do
+ instance_double(Gitlab::Kubernetes::KubeClient)
+ end
+
+ ['Unauthorized', 'forbidden', 'Certificate verify Failed'].each do |message|
+ it 'schedules ::ServiceAccountWorker with accepted errors' do
+ allow(kubeclient_instance_double)
+ .to receive(:delete_namespace)
+ .and_raise(Kubeclient::HttpError.new(401, message, nil))
+
+ expect(Clusters::Cleanup::ServiceAccountWorker).to receive(:perform_async).with(cluster.id)
+
+ subject
+ end
+ end
+
+ it 'raises error with unaccepted errors' do
+ allow(kubeclient_instance_double)
+ .to receive(:delete_namespace)
+ .and_raise(Kubeclient::HttpError.new(401, 'unexpected message', nil))
+
+ expect { subject }.to raise_error(Kubeclient::HttpError)
+ end
+ end
end
end
diff --git a/spec/services/clusters/cleanup/service_account_service_spec.rb b/spec/services/clusters/cleanup/service_account_service_spec.rb
index adcdbd84da0..769762237f9 100644
--- a/spec/services/clusters/cleanup/service_account_service_spec.rb
+++ b/spec/services/clusters/cleanup/service_account_service_spec.rb
@@ -52,5 +52,19 @@ RSpec.describe Clusters::Cleanup::ServiceAccountService do
expect { subject }.to change { Clusters::Cluster.where(id: cluster.id).exists? }.from(true).to(false)
end
end
+
+ context 'when there is a Kubeclient::HttpError' do
+ ['Unauthorized', 'forbidden', 'Certificate verify Failed'].each do |message|
+ before do
+ allow(kubeclient_instance_double)
+ .to receive(:delete_service_account)
+ .and_raise(Kubeclient::HttpError.new(401, message, nil))
+ end
+
+ it 'destroys cluster' do
+ expect { subject }.to change { Clusters::Cluster.where(id: cluster.id).exists? }.from(true).to(false)
+ end
+ end
+ end
end
end
diff --git a/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb b/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb
index 9db3b9d2417..7147f1b9b28 100644
--- a/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb
+++ b/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe Clusters::Integrations::PrometheusHealthCheckService, '#execute'
let(:prometheus_enabled) { true }
before do
- client = instance_double('PrometheusClient', healthy?: client_healthy)
+ client = instance_double('Gitlab::PrometheusClient', healthy?: client_healthy)
expect(prometheus).to receive(:prometheus_client).and_return(client)
end
diff --git a/spec/services/concerns/audit_event_save_type_spec.rb b/spec/services/concerns/audit_event_save_type_spec.rb
new file mode 100644
index 00000000000..fbaebd9f85c
--- /dev/null
+++ b/spec/services/concerns/audit_event_save_type_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuditEventSaveType do
+ subject(:target) { Object.new.extend(described_class) }
+
+ describe '#should_save_database? and #should_save_stream?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:query_method, :query_param, :result) do
+ :should_save_stream? | :stream | true
+ :should_save_stream? | :database_and_stream | true
+ :should_save_database? | :database | true
+ :should_save_database? | :database_and_stream | true
+ :should_save_stream? | :database | false
+ :should_save_stream? | nil | false
+ :should_save_database? | :stream | false
+ :should_save_database? | nil | false
+ end
+
+ with_them do
+ it 'returns corresponding results according to the query_method and query_param' do
+ expect(target.send(query_method, query_param)).to eq result
+ end
+ end
+ end
+end
diff --git a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb b/spec/services/dependency_proxy/find_cached_manifest_service_spec.rb
index ef608c9b113..29bdf1f11c3 100644
--- a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/find_cached_manifest_service_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe DependencyProxy::FindOrCreateManifestService do
+RSpec.describe DependencyProxy::FindCachedManifestService do
include DependencyProxyHelpers
let_it_be(:image) { 'alpine' }
@@ -49,14 +49,6 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
end
it_behaves_like 'returning no manifest'
-
- context 'with dependency_proxy_manifest_workhorse feature disabled' do
- before do
- stub_feature_flags(dependency_proxy_manifest_workhorse: false)
- end
-
- it_behaves_like 'downloading the manifest'
- end
end
context 'failed head request' do
@@ -66,14 +58,6 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
end
it_behaves_like 'returning no manifest'
-
- context 'with dependency_proxy_manifest_workhorse feature disabled' do
- before do
- stub_feature_flags(dependency_proxy_manifest_workhorse: false)
- end
-
- it_behaves_like 'downloading the manifest'
- end
end
end
@@ -105,20 +89,6 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
end
it_behaves_like 'returning no manifest'
-
- context 'with dependency_proxy_manifest_workhorse feature disabled' do
- before do
- stub_feature_flags(dependency_proxy_manifest_workhorse: false)
- end
-
- it 'downloads the new manifest and updates the existing record', :aggregate_failures do
- expect(subject[:status]).to eq(:success)
- expect(subject[:manifest]).to eq(dependency_proxy_manifest)
- expect(subject[:manifest].content_type).to eq(content_type)
- expect(subject[:manifest].digest).to eq(digest)
- expect(subject[:from_cache]).to eq false
- end
- end
end
context 'when the cached manifest is expired' do
@@ -129,14 +99,6 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
end
it_behaves_like 'returning no manifest'
-
- context 'with dependency_proxy_manifest_workhorse feature disabled' do
- before do
- stub_feature_flags(dependency_proxy_manifest_workhorse: false)
- end
-
- it_behaves_like 'downloading the manifest'
- end
end
context 'failed connection' do
diff --git a/spec/services/dependency_proxy/pull_manifest_service_spec.rb b/spec/services/dependency_proxy/pull_manifest_service_spec.rb
deleted file mode 100644
index 6018a3229fb..00000000000
--- a/spec/services/dependency_proxy/pull_manifest_service_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe DependencyProxy::PullManifestService do
- include DependencyProxyHelpers
-
- let(:image) { 'alpine' }
- let(:tag) { '3.9' }
- let(:token) { Digest::SHA256.hexdigest('123') }
- let(:manifest) { { foo: 'bar' }.to_json }
- let(:digest) { '12345' }
- let(:content_type) { 'foo' }
- let(:headers) do
- { DependencyProxy::Manifest::DIGEST_HEADER => digest, 'content-type' => content_type }
- end
-
- subject { described_class.new(image, tag, token).execute_with_manifest(&method(:check_response)) }
-
- context 'remote request is successful' do
- before do
- stub_manifest_download(image, tag, headers: headers)
- end
-
- it 'successfully returns the manifest' do
- def check_response(response)
- response[:file].rewind
-
- expect(response[:status]).to eq(:success)
- expect(response[:file].read).to eq(manifest)
- expect(response[:digest]).to eq(digest)
- expect(response[:content_type]).to eq(content_type)
- end
-
- subject
- end
- end
-
- context 'remote request is not found' do
- before do
- stub_manifest_download(image, tag, status: 404, body: 'Not found')
- end
-
- it 'returns a 404 not found error' do
- def check_response(response)
- expect(response[:status]).to eq(:error)
- expect(response[:http_status]).to eq(404)
- expect(response[:message]).to eq('Not found')
- end
-
- subject
- end
- end
-
- context 'net timeout exception' do
- before do
- manifest_link = DependencyProxy::Registry.manifest_url(image, tag)
-
- stub_full_request(manifest_link).to_timeout
- end
-
- it 'returns a 599 error' do
- def check_response(response)
- expect(response[:status]).to eq(:error)
- expect(response[:http_status]).to eq(599)
- expect(response[:message]).to eq('execution expired')
- end
-
- subject
- end
- end
-
- context 'no block is given' do
- subject { described_class.new(image, tag, token).execute_with_manifest }
-
- it { expect { subject }.to raise_error(ArgumentError, 'Block must be provided') }
- end
-end
diff --git a/spec/services/deployments/older_deployments_drop_service_spec.rb b/spec/services/deployments/older_deployments_drop_service_spec.rb
index e6fd6725d7d..d9a512a5dd2 100644
--- a/spec/services/deployments/older_deployments_drop_service_spec.rb
+++ b/spec/services/deployments/older_deployments_drop_service_spec.rb
@@ -70,6 +70,8 @@ RSpec.describe Deployments::OlderDeploymentsDropService do
let(:older_deployment) { create(:deployment, :created, environment: environment, deployable: build) }
let(:build) { create(:ci_build, :manual) }
+ # Manual jobs should not be accounted as outdated deployment jobs.
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/255978 for more information.
it 'does not drop any builds nor track the exception' do
expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
diff --git a/spec/services/events/destroy_service_spec.rb b/spec/services/events/destroy_service_spec.rb
new file mode 100644
index 00000000000..8dcbb83eb1d
--- /dev/null
+++ b/spec/services/events/destroy_service_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Events::DestroyService do
+ subject(:service) { described_class.new(project) }
+
+ let_it_be(:project, reload: true) { create(:project, :repository) }
+ let_it_be(:another_project) { create(:project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:user) { create(:user) }
+
+ let!(:unrelated_event) { create(:event, :merged, project: another_project, target: another_project, author: user) }
+
+ before do
+ create(:event, :created, project: project, target: project, author: user)
+ create(:event, :created, project: project, target: merge_request, author: user)
+ create(:event, :merged, project: project, target: merge_request, author: user)
+ end
+
+ let(:events) { project.events }
+
+ describe '#execute', :aggregate_failures do
+ it 'deletes the events' do
+ response = nil
+
+ expect { response = subject.execute }.to change(Event, :count).by(-3)
+
+ expect(response).to be_success
+ expect(unrelated_event.reload).to be_present
+ end
+
+ context 'when an error is raised while deleting the records' do
+ before do
+ allow(project).to receive_message_chain(:events, :all, :delete_all).and_raise(ActiveRecord::ActiveRecordError)
+ end
+
+ it 'returns error' do
+ response = subject.execute
+
+ expect(response).to be_error
+ expect(response.message).to eq 'Failed to remove events.'
+ end
+
+ it 'does not delete events' do
+ expect { subject.execute }.not_to change(Event, :count)
+ end
+ end
+ end
+end
diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb
index 5a517ce6a64..e37d41562f9 100644
--- a/spec/services/feature_flags/create_service_spec.rb
+++ b/spec/services/feature_flags/create_service_spec.rb
@@ -62,10 +62,24 @@ RSpec.describe FeatureFlags::CreateService do
expect { subject }.to change { Operations::FeatureFlag.count }.by(1)
end
- it 'syncs the feature flag to Jira' do
- expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
+ context 'when Jira Connect subscription does not exist' do
+ it 'does not sync the feature flag to Jira' do
+ expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
- subject
+ subject
+ end
+ end
+
+ context 'when Jira Connect subscription exists' do
+ before do
+ create(:jira_connect_subscription, namespace: project.namespace)
+ end
+
+ it 'syncs the feature flag to Jira' do
+ expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
+
+ subject
+ end
end
it 'creates audit event' do
diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb
index 4858139d60a..abe0112b27e 100644
--- a/spec/services/feature_flags/update_service_spec.rb
+++ b/spec/services/feature_flags/update_service_spec.rb
@@ -27,10 +27,24 @@ RSpec.describe FeatureFlags::UpdateService do
expect(subject[:status]).to eq(:success)
end
- it 'syncs the feature flag to Jira' do
- expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
+ context 'when Jira Connect subscription does not exist' do
+ it 'does not sync the feature flag to Jira' do
+ expect(::JiraConnect::SyncFeatureFlagsWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
- subject
+ context 'when Jira Connect subscription exists' do
+ before do
+ create(:jira_connect_subscription, namespace: project.namespace)
+ end
+
+ it 'syncs the feature flag to Jira' do
+ expect(::JiraConnect::SyncFeatureFlagsWorker).to receive(:perform_async).with(Integer, Integer)
+
+ subject
+ end
end
it 'creates audit event with correct message' do
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index d70e458ba5e..5a637b0956b 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -554,44 +554,6 @@ RSpec.describe Git::BranchPushService, services: true do
end
end
- describe "housekeeping", :clean_gitlab_redis_cache, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
- let(:housekeeping) { Repositories::HousekeepingService.new(project) }
-
- before do
- allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping)
- end
-
- it 'does not perform housekeeping when not needed' do
- expect(housekeeping).not_to receive(:execute)
-
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
- end
-
- context 'when housekeeping is needed' do
- before do
- allow(housekeeping).to receive(:needed?).and_return(true)
- end
-
- it 'performs housekeeping' do
- expect(housekeeping).to receive(:execute)
-
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
- end
-
- it 'does not raise an exception' do
- allow(housekeeping).to receive(:try_obtain_lease).and_return(false)
-
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
- end
- end
-
- it 'increments the push counter' do
- expect(housekeeping).to receive(:increment!)
-
- execute_service(project, user, oldrev: oldrev, newrev: newrev, ref: ref)
- end
- end
-
describe "CI environments" do
context 'create branch' do
let(:oldrev) { blankrev }
diff --git a/spec/services/git/process_ref_changes_service_spec.rb b/spec/services/git/process_ref_changes_service_spec.rb
index 2a223091d0c..f52df9b0073 100644
--- a/spec/services/git/process_ref_changes_service_spec.rb
+++ b/spec/services/git/process_ref_changes_service_spec.rb
@@ -161,6 +161,50 @@ RSpec.describe Git::ProcessRefChangesService do
end
end
end
+
+ describe "housekeeping", :clean_gitlab_redis_cache, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
+ let(:housekeeping) { Repositories::HousekeepingService.new(project) }
+
+ before do
+ allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping)
+
+ allow(push_service_class)
+ .to receive(:new)
+ .with(project, project.owner, hash_including(execute_project_hooks: true, create_push_event: true))
+ .exactly(changes.count).times
+ .and_return(service)
+ end
+
+ it 'does not perform housekeeping when not needed' do
+ expect(housekeeping).not_to receive(:execute)
+
+ subject.execute
+ end
+
+ context 'when housekeeping is needed' do
+ before do
+ allow(housekeeping).to receive(:needed?).and_return(true)
+ end
+
+ it 'performs housekeeping' do
+ expect(housekeeping).to receive(:execute)
+
+ subject.execute
+ end
+
+ it 'does not raise an exception' do
+ allow(housekeeping).to receive(:try_obtain_lease).and_return(false)
+
+ subject.execute
+ end
+ end
+
+ it 'increments the push counter' do
+ expect(housekeeping).to receive(:increment!)
+
+ subject.execute
+ end
+ end
end
context 'branch changes' do
diff --git a/spec/services/google_cloud/service_accounts_service_spec.rb b/spec/services/google_cloud/service_accounts_service_spec.rb
index a0d09affa72..505c623c02a 100644
--- a/spec/services/google_cloud/service_accounts_service_spec.rb
+++ b/spec/services/google_cloud/service_accounts_service_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe GoogleCloud::ServiceAccountsService do
- let_it_be(:project) { create(:project) }
-
let(:service) { described_class.new(project) }
describe 'find_for_project' do
+ let_it_be(:project) { create(:project) }
+
context 'when a project does not have GCP service account vars' do
before do
project.variables.build(key: 'blah', value: 'foo', environment_scope: 'world')
@@ -21,13 +21,13 @@ RSpec.describe GoogleCloud::ServiceAccountsService do
context 'when a project has GCP service account ci vars' do
before do
- project.variables.build(environment_scope: '*', key: 'GCP_PROJECT_ID', value: 'prj1')
- project.variables.build(environment_scope: '*', key: 'GCP_SERVICE_ACCOUNT_KEY', value: 'mock')
- project.variables.build(environment_scope: 'staging', key: 'GCP_PROJECT_ID', value: 'prj2')
- project.variables.build(environment_scope: 'staging', key: 'GCP_SERVICE_ACCOUNT', value: 'mock')
- project.variables.build(environment_scope: 'production', key: 'GCP_PROJECT_ID', value: 'prj3')
- project.variables.build(environment_scope: 'production', key: 'GCP_SERVICE_ACCOUNT', value: 'mock')
- project.variables.build(environment_scope: 'production', key: 'GCP_SERVICE_ACCOUNT_KEY', value: 'mock')
+ project.variables.build(protected: true, environment_scope: '*', key: 'GCP_PROJECT_ID', value: 'prj1')
+ project.variables.build(protected: true, environment_scope: '*', key: 'GCP_SERVICE_ACCOUNT_KEY', value: 'mock')
+ project.variables.build(protected: true, environment_scope: 'staging', key: 'GCP_PROJECT_ID', value: 'prj2')
+ project.variables.build(protected: true, environment_scope: 'staging', key: 'GCP_SERVICE_ACCOUNT', value: 'mock')
+ project.variables.build(protected: true, environment_scope: 'production', key: 'GCP_PROJECT_ID', value: 'prj3')
+ project.variables.build(protected: true, environment_scope: 'production', key: 'GCP_SERVICE_ACCOUNT', value: 'mock')
+ project.variables.build(protected: true, environment_scope: 'production', key: 'GCP_SERVICE_ACCOUNT_KEY', value: 'mock')
project.save!
end
@@ -55,4 +55,55 @@ RSpec.describe GoogleCloud::ServiceAccountsService do
end
end
end
+
+ describe 'add_for_project' do
+ let_it_be(:project) { create(:project) }
+
+ it 'saves GCP creds as project CI vars' do
+ service.add_for_project('env_1', 'gcp_prj_id_1', 'srv_acc_1', 'srv_acc_key_1')
+ service.add_for_project('env_2', 'gcp_prj_id_2', 'srv_acc_2', 'srv_acc_key_2')
+
+ list = service.find_for_project
+
+ aggregate_failures 'testing list of service accounts' do
+ expect(list.length).to eq(2)
+
+ expect(list.first[:environment]).to eq('env_1')
+ expect(list.first[:gcp_project]).to eq('gcp_prj_id_1')
+ expect(list.first[:service_account_exists]).to eq(true)
+ expect(list.first[:service_account_key_exists]).to eq(true)
+
+ expect(list.second[:environment]).to eq('env_2')
+ expect(list.second[:gcp_project]).to eq('gcp_prj_id_2')
+ expect(list.second[:service_account_exists]).to eq(true)
+ expect(list.second[:service_account_key_exists]).to eq(true)
+ end
+ end
+
+ it 'replaces previously stored CI vars with new CI vars' do
+ service.add_for_project('env_1', 'new_project', 'srv_acc_1', 'srv_acc_key_1')
+
+ list = service.find_for_project
+
+ aggregate_failures 'testing list of service accounts' do
+ expect(list.length).to eq(2)
+
+ # asserting that the first service account is replaced
+ expect(list.first[:environment]).to eq('env_1')
+ expect(list.first[:gcp_project]).to eq('new_project')
+ expect(list.first[:service_account_exists]).to eq(true)
+ expect(list.first[:service_account_key_exists]).to eq(true)
+
+ expect(list.second[:environment]).to eq('env_2')
+ expect(list.second[:gcp_project]).to eq('gcp_prj_id_2')
+ expect(list.second[:service_account_exists]).to eq(true)
+ expect(list.second[:service_account_key_exists]).to eq(true)
+ end
+ end
+
+ it 'underlying project CI vars must be protected' do
+ expect(project.variables.first.protected).to eq(true)
+ expect(project.variables.second.protected).to eq(true)
+ end
+ end
end
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index 7ea08131419..81cab973b30 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -24,6 +24,16 @@ RSpec.describe Groups::CreateService, '#execute' do
end
end
+ context 'when `setup_for_company:true` is passed' do
+ let(:params) { group_params.merge(setup_for_company: true) }
+ let(:service) { described_class.new(user, params) }
+ let(:created_group) { service.execute }
+
+ it 'creates group with the specified setup_for_company' do
+ expect(created_group.setup_for_company).to eq(true)
+ end
+ end
+
context 'creating a group with `default_branch_protection` attribute' do
let(:params) { group_params.merge(default_branch_protection: Gitlab::Access::PROTECTION_NONE) }
let(:service) { described_class.new(user, params) }
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 35d46884f4d..3a696228382 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -792,7 +792,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
end
end
- context 'when group has pending builds' do
+ context 'when group has pending builds', :sidekiq_inline do
let_it_be(:project) { create(:project, :public, namespace: group.reload) }
let_it_be(:other_project) { create(:project) }
let_it_be(:pending_build) { create(:ci_pending_build, project: project) }
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index 53870e810b1..6e938984052 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -63,6 +63,8 @@ RSpec.describe Groups::UpdateSharedRunnersService do
let_it_be(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: false) }
it 'updates pending builds for the group' do
+ expect(::Ci::UpdatePendingBuildService).to receive(:new).and_call_original
+
subject
expect(pending_build_1.reload.instance_runners_enabled).to be_truthy
@@ -73,6 +75,8 @@ RSpec.describe Groups::UpdateSharedRunnersService do
let(:params) { { shared_runners_setting: 'invalid_enabled' } }
it 'does not update pending builds for the group' do
+ expect(::Ci::UpdatePendingBuildService).not_to receive(:new).and_call_original
+
subject
expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
@@ -99,6 +103,8 @@ RSpec.describe Groups::UpdateSharedRunnersService do
let_it_be(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
it 'updates pending builds for the group' do
+ expect(::Ci::UpdatePendingBuildService).to receive(:new).and_call_original
+
subject
expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 776df01d399..04a94d96f67 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Import::GithubService do
let_it_be(:access_params) { { github_access_token: 'github-complex-token' } }
let_it_be(:params) { { repo_id: 123, new_name: 'new_repo', target_namespace: 'root' } }
- let(:subject) { described_class.new(client, user, params) }
+ subject(:github_importer) { described_class.new(client, user, params) }
before do
allow(subject).to receive(:authorized?).and_return(true)
@@ -110,6 +110,29 @@ RSpec.describe Import::GithubService do
end
end
end
+
+ context 'when a blocked/local URL is used as github_hostname' do
+ let(:message) { 'Error while attempting to import from GitHub' }
+ let(:error) { "Invalid URL: #{url}" }
+
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+ end
+
+ where(url: %w[https://localhost https://10.0.0.1])
+
+ with_them do
+ it 'returns and logs an error' do
+ allow(github_importer).to receive(:url).and_return(url)
+
+ expect(Gitlab::Import::Logger).to receive(:error).with({
+ message: message,
+ error: error
+ }).and_call_original
+ expect(github_importer.execute(access_params, :github)).to include(blocked_url_error(url))
+ end
+ end
+ end
end
context 'when remove_legacy_github_client feature flag is enabled' do
@@ -135,4 +158,12 @@ RSpec.describe Import::GithubService do
message: '"repository" size (101 Bytes) is larger than the limit of 100 Bytes.'
}
end
+
+ def blocked_url_error(url)
+ {
+ status: :error,
+ http_status: :bad_request,
+ message: "Invalid URL: #{url}"
+ }
+ end
end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb
new file mode 100644
index 00000000000..8fbab361ec4
--- /dev/null
+++ b/spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::IssuableEscalationStatuses::CreateService do
+ let_it_be(:project) { create(:project) }
+
+ let(:incident) { create(:incident, project: project) }
+ let(:service) { described_class.new(incident) }
+
+ subject(:execute) { service.execute}
+
+ it 'creates an escalation status for the incident with no policy set' do
+ expect { execute }.to change { incident.reload.incident_management_issuable_escalation_status }.from(nil)
+
+ status = incident.incident_management_issuable_escalation_status
+
+ expect(status.policy_id).to eq(nil)
+ expect(status.escalations_started_at).to eq(nil)
+ expect(status.status_name).to eq(:triggered)
+ end
+
+ context 'existing escalation status' do
+ let!(:existing_status) { create(:incident_management_issuable_escalation_status, issue: incident) }
+
+ it 'exits without changing anything' do
+ expect { execute }.not_to change { incident.reload.incident_management_issuable_escalation_status }
+ end
+ end
+end
diff --git a/spec/services/admin/propagate_integration_service_spec.rb b/spec/services/integrations/propagate_service_spec.rb
index b379286ba4f..7ae843f6aeb 100644
--- a/spec/services/admin/propagate_integration_service_spec.rb
+++ b/spec/services/integrations/propagate_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Admin::PropagateIntegrationService do
+RSpec.describe Integrations::PropagateService do
describe '.propagate' do
include JiraServiceHelper
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 18e03db11dc..8496bd31e00 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe Issues::CreateService do
include AfterNextHelpers
- let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be_with_reload(:project) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
let(:spam_params) { double }
@@ -107,6 +108,13 @@ RSpec.describe Issues::CreateService do
.to change { Label.where(incident_label_attributes).count }.by(1)
end
+ it 'calls IncidentManagement::Incidents::CreateEscalationStatusService' do
+ expect_next(::IncidentManagement::IssuableEscalationStatuses::CreateService, a_kind_of(Issue))
+ .to receive(:execute)
+
+ issue
+ end
+
context 'when invalid' do
before do
opts.merge!(title: '')
@@ -154,7 +162,7 @@ RSpec.describe Issues::CreateService do
end
it 'moves the issue to the end, in an asynchronous worker' do
- expect(IssuePlacementWorker).to receive(:perform_async).with(be_nil, Integer)
+ expect(Issues::PlacementWorker).to receive(:perform_async).with(be_nil, Integer)
described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
end
@@ -430,25 +438,29 @@ RSpec.describe Issues::CreateService do
end
context 'Quick actions' do
- context 'with assignee and milestone in params and command' do
+ context 'with assignee, milestone, and contact in params and command' do
+ let_it_be(:contact) { create(:contact, group: group) }
+
let(:opts) do
{
assignee_ids: [create(:user).id],
milestone_id: 1,
title: 'Title',
- description: %(/assign @#{assignee.username}\n/milestone %"#{milestone.name}")
+ description: %(/assign @#{assignee.username}\n/milestone %"#{milestone.name}"),
+ add_contacts: [contact.email]
}
end
before_all do
- project.add_maintainer(user)
+ group.add_maintainer(user)
project.add_maintainer(assignee)
end
- it 'assigns and sets milestone to issuable from command' do
+ it 'assigns, sets milestone, and sets contact to issuable from command' do
expect(issue).to be_persisted
expect(issue.assignees).to eq([assignee])
expect(issue.milestone).to eq(milestone)
+ expect(issue.issue_customer_relations_contacts.last.contact).to eq(contact)
end
end
end
diff --git a/spec/services/issues/set_crm_contacts_service_spec.rb b/spec/services/issues/set_crm_contacts_service_spec.rb
index 65b22fe3b35..628f70efad6 100644
--- a/spec/services/issues/set_crm_contacts_service_spec.rb
+++ b/spec/services/issues/set_crm_contacts_service_spec.rb
@@ -22,13 +22,13 @@ RSpec.describe Issues::SetCrmContactsService do
describe '#execute' do
context 'when the user has no permission' do
- let(:params) { { crm_contact_ids: [contacts[1].id, contacts[2].id] } }
+ let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array(['You have insufficient permissions to set customer relations contacts for this issue'])
+ expect(response.message).to eq('You have insufficient permissions to set customer relations contacts for this issue')
end
end
@@ -38,20 +38,20 @@ RSpec.describe Issues::SetCrmContactsService do
end
context 'when the contact does not exist' do
- let(:params) { { crm_contact_ids: [non_existing_record_id] } }
+ let(:params) { { replace_ids: [non_existing_record_id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array(["Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}"])
+ expect(response.message).to eq("Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}")
end
end
context 'when the contact belongs to a different group' do
let(:group2) { create(:group) }
let(:contact) { create(:contact, group: group2) }
- let(:params) { { crm_contact_ids: [contact.id] } }
+ let(:params) { { replace_ids: [contact.id] } }
before do
group2.add_reporter(user)
@@ -61,12 +61,12 @@ RSpec.describe Issues::SetCrmContactsService do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array(["Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}"])
+ expect(response.message).to eq("Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}")
end
end
context 'replace' do
- let(:params) { { crm_contact_ids: [contacts[1].id, contacts[2].id] } }
+ let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
@@ -77,7 +77,18 @@ RSpec.describe Issues::SetCrmContactsService do
end
context 'add' do
- let(:params) { { add_crm_contact_ids: [contacts[3].id] } }
+ let(:params) { { add_ids: [contacts[3].id] } }
+
+ it 'updates the issue with correct contacts' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ expect(issue.customer_relations_contacts).to match_array([contacts[0], contacts[1], contacts[3]])
+ end
+ end
+
+ context 'add by email' do
+ let(:params) { { add_emails: [contacts[3].email] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
@@ -88,7 +99,18 @@ RSpec.describe Issues::SetCrmContactsService do
end
context 'remove' do
- let(:params) { { remove_crm_contact_ids: [contacts[0].id] } }
+ let(:params) { { remove_ids: [contacts[0].id] } }
+
+ it 'updates the issue with correct contacts' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ expect(issue.customer_relations_contacts).to match_array([contacts[1]])
+ end
+ end
+
+ context 'remove by email' do
+ let(:params) { { remove_emails: [contacts[0].email] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
@@ -100,18 +122,18 @@ RSpec.describe Issues::SetCrmContactsService do
context 'when attempting to add more than 6' do
let(:id) { contacts[0].id }
- let(:params) { { add_crm_contact_ids: [id, id, id, id, id, id, id] } }
+ let(:params) { { add_ids: [id, id, id, id, id, id, id] } }
it 'returns expected error message' do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array(['You can only add up to 6 contacts at one time'])
+ expect(response.message).to eq('You can only add up to 6 contacts at one time')
end
end
context 'when trying to remove non-existent contact' do
- let(:params) { { remove_crm_contact_ids: [non_existing_record_id] } }
+ let(:params) { { remove_ids: [non_existing_record_id] } }
it 'returns expected error message' do
response = set_crm_contacts
@@ -122,10 +144,10 @@ RSpec.describe Issues::SetCrmContactsService do
end
context 'when combining params' do
- let(:error_invalid_params) { 'You cannot combine crm_contact_ids with add_crm_contact_ids or remove_crm_contact_ids' }
+ let(:error_invalid_params) { 'You cannot combine replace_ids with add_ids or remove_ids' }
context 'add and remove' do
- let(:params) { { remove_crm_contact_ids: [contacts[1].id], add_crm_contact_ids: [contacts[3].id] } }
+ let(:params) { { remove_ids: [contacts[1].id], add_ids: [contacts[3].id] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
@@ -136,27 +158,57 @@ RSpec.describe Issues::SetCrmContactsService do
end
context 'replace and remove' do
- let(:params) { { crm_contact_ids: [contacts[3].id], remove_crm_contact_ids: [contacts[0].id] } }
+ let(:params) { { replace_ids: [contacts[3].id], remove_ids: [contacts[0].id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array([error_invalid_params])
+ expect(response.message).to eq(error_invalid_params)
end
end
context 'replace and add' do
- let(:params) { { crm_contact_ids: [contacts[3].id], add_crm_contact_ids: [contacts[1].id] } }
+ let(:params) { { replace_ids: [contacts[3].id], add_ids: [contacts[1].id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array([error_invalid_params])
+ expect(response.message).to eq(error_invalid_params)
end
end
end
+
+ context 'when trying to add an existing issue contact' do
+ let(:params) { { add_ids: [contacts[0].id] } }
+
+ it 'does not return an error' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ end
+ end
+
+ context 'when trying to add the same contact twice' do
+ let(:params) { { add_ids: [contacts[3].id, contacts[3].id] } }
+
+ it 'does not return an error' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ end
+ end
+
+ context 'when trying to remove a contact not attached to the issue' do
+ let(:params) { { remove_ids: [contacts[3].id] } }
+
+ it 'does not return an error' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ end
+ end
end
end
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 85b8fef685e..4739b7e0f28 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -29,6 +29,8 @@ RSpec.describe Issues::UpdateService, :mailer do
end
describe 'execute' do
+ let_it_be(:contact) { create(:contact, group: group) }
+
def find_note(starting_with)
issue.notes.find do |note|
note && note.note.start_with?(starting_with)
@@ -57,7 +59,8 @@ RSpec.describe Issues::UpdateService, :mailer do
due_date: Date.tomorrow,
discussion_locked: true,
severity: 'low',
- milestone_id: milestone.id
+ milestone_id: milestone.id,
+ add_contacts: [contact.email]
}
end
@@ -76,6 +79,7 @@ RSpec.describe Issues::UpdateService, :mailer do
expect(issue.discussion_locked).to be_truthy
expect(issue.confidential).to be_falsey
expect(issue.milestone).to eq milestone
+ expect(issue.issue_customer_relations_contacts.last.contact).to eq contact
end
it 'updates issue milestone when passing `milestone` param' do
@@ -319,7 +323,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts[:move_between_ids] = [issue1.id, issue2.id]
- expect(IssueRebalancingWorker).not_to receive(:perform_async)
+ expect(Issues::RebalancingWorker).not_to receive(:perform_async)
update_issue(opts)
expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
@@ -335,7 +339,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts[:move_between_ids] = [issue1.id, issue2.id]
- expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id)
+ expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id)
update_issue(opts)
expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
@@ -349,7 +353,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts[:move_between_ids] = [issue1.id, issue2.id]
- expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id)
+ expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id)
update_issue(opts)
expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
@@ -363,7 +367,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts[:move_between_ids] = [issue1.id, issue2.id]
- expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id)
+ expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id)
update_issue(opts)
expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
diff --git a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
index bdb3d0f6700..d3d57ea2444 100644
--- a/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
+++ b/spec/services/loose_foreign_keys/batch_cleaner_service_spec.rb
@@ -21,33 +21,34 @@ RSpec.describe LooseForeignKeys::BatchCleanerService do
migration.track_record_deletions(:_test_loose_fk_parent_table)
end
- let(:parent_model) do
- Class.new(ApplicationRecord) do
- self.table_name = '_test_loose_fk_parent_table'
-
- include LooseForeignKey
-
- loose_foreign_key :_test_loose_fk_child_table_1, :parent_id, on_delete: :async_delete
- loose_foreign_key :_test_loose_fk_child_table_2, :parent_id_with_different_column, on_delete: :async_nullify
- end
- end
-
- let(:child_model_1) do
- Class.new(ApplicationRecord) do
- self.table_name = '_test_loose_fk_child_table_1'
- end
- end
-
- let(:child_model_2) do
- Class.new(ApplicationRecord) do
- self.table_name = '_test_loose_fk_child_table_2'
- end
+ let(:loose_foreign_key_definitions) do
+ [
+ ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
+ '_test_loose_fk_child_table_1',
+ '_test_loose_fk_parent_table',
+ {
+ column: 'parent_id',
+ on_delete: :async_delete,
+ gitlab_schema: :gitlab_main
+ }
+ ),
+ ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
+ '_test_loose_fk_child_table_2',
+ '_test_loose_fk_parent_table',
+ {
+ column: 'parent_id_with_different_column',
+ on_delete: :async_nullify,
+ gitlab_schema: :gitlab_main
+ }
+ )
+ ]
end
+ let(:loose_fk_parent_table) { table(:_test_loose_fk_parent_table) }
let(:loose_fk_child_table_1) { table(:_test_loose_fk_child_table_1) }
let(:loose_fk_child_table_2) { table(:_test_loose_fk_child_table_2) }
- let(:parent_record_1) { parent_model.create! }
- let(:other_parent_record) { parent_model.create! }
+ let(:parent_record_1) { loose_fk_parent_table.create! }
+ let(:other_parent_record) { loose_fk_parent_table.create! }
before(:all) do
create_table_structure
@@ -87,12 +88,10 @@ RSpec.describe LooseForeignKeys::BatchCleanerService do
expect(loose_fk_child_table_1.count).to eq(4)
expect(loose_fk_child_table_2.count).to eq(4)
- described_class.new(parent_klass: parent_model,
- deleted_parent_records: LooseForeignKeys::DeletedRecord.status_pending.all,
- models_by_table_name: {
- '_test_loose_fk_child_table_1' => child_model_1,
- '_test_loose_fk_child_table_2' => child_model_2
- }).execute
+ described_class.new(parent_table: '_test_loose_fk_parent_table',
+ loose_foreign_key_definitions: loose_foreign_key_definitions,
+ deleted_parent_records: LooseForeignKeys::DeletedRecord.load_batch_for_table('public._test_loose_fk_parent_table', 100)
+ ).execute
end
it 'cleans up the child records' do
@@ -108,7 +107,7 @@ RSpec.describe LooseForeignKeys::BatchCleanerService do
it 'records the DeletedRecord status updates', :prometheus do
counter = Gitlab::Metrics.registry.get(:loose_foreign_key_processed_deleted_records)
- expect(counter.get(table: parent_model.table_name, db_config_name: 'main')).to eq(1)
+ expect(counter.get(table: loose_fk_parent_table.table_name, db_config_name: 'main')).to eq(1)
end
it 'does not delete unrelated records' do
diff --git a/spec/services/loose_foreign_keys/cleaner_service_spec.rb b/spec/services/loose_foreign_keys/cleaner_service_spec.rb
index 6f37ac49435..2cfd8385953 100644
--- a/spec/services/loose_foreign_keys/cleaner_service_spec.rb
+++ b/spec/services/loose_foreign_keys/cleaner_service_spec.rb
@@ -13,21 +13,21 @@ RSpec.describe LooseForeignKeys::CleanerService do
let(:loose_fk_definition) do
ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
- 'projects',
'issues',
+ 'projects',
{
column: 'project_id',
- on_delete: :async_nullify
+ on_delete: :async_nullify,
+ gitlab_schema: :gitlab_main
}
)
end
subject(:cleaner_service) do
described_class.new(
- model: Issue,
- foreign_key_definition: loose_fk_definition,
- deleted_parent_records: deleted_records
- )
+ loose_foreign_key_definition: loose_fk_definition,
+ connection: ApplicationRecord.connection,
+ deleted_parent_records: deleted_records)
end
context 'when invalid foreign key definition is passed' do
@@ -80,11 +80,12 @@ RSpec.describe LooseForeignKeys::CleanerService do
let(:loose_fk_definition) do
ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
- 'users',
'project_authorizations',
+ 'users',
{
column: 'user_id',
- on_delete: :async_delete
+ on_delete: :async_delete,
+ gitlab_schema: :gitlab_main
}
)
end
@@ -97,8 +98,8 @@ RSpec.describe LooseForeignKeys::CleanerService do
subject(:cleaner_service) do
described_class.new(
- model: ProjectAuthorization,
- foreign_key_definition: loose_fk_definition,
+ loose_foreign_key_definition: loose_fk_definition,
+ connection: ApplicationRecord.connection,
deleted_parent_records: deleted_records
)
end
@@ -130,8 +131,8 @@ RSpec.describe LooseForeignKeys::CleanerService do
context 'when with_skip_locked parameter is true' do
subject(:cleaner_service) do
described_class.new(
- model: Issue,
- foreign_key_definition: loose_fk_definition,
+ loose_foreign_key_definition: loose_fk_definition,
+ connection: ApplicationRecord.connection,
deleted_parent_records: deleted_records,
with_skip_locked: true
)
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index fe866d73215..13f56fe7458 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -127,85 +127,11 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
end
end
- context 'when tracking the areas of focus', :snowplow do
- context 'when areas_of_focus is not passed' do
- it 'does not track' do
- execute_service
-
- expect_no_snowplow_event(category: described_class.name, action: 'area_of_focus')
- end
- end
-
- context 'when 1 areas_of_focus is passed' do
- let(:additional_params) { { invite_source: '_invite_source_', areas_of_focus: ['no_selection'] } }
-
- it 'tracks the areas_of_focus from params' do
- execute_service
-
- expect_snowplow_event(
- category: described_class.name,
- action: 'area_of_focus',
- label: 'no_selection',
- property: source.members.last.id.to_s
- )
- end
-
- context 'when passing many user ids' do
- let(:another_user) { create(:user) }
- let(:user_ids) { [member.id, another_user.id].join(',') }
-
- it 'tracks the areas_of_focus from params' do
- execute_service
-
- members = source.members.last(2)
-
- expect_snowplow_event(
- category: described_class.name,
- action: 'area_of_focus',
- label: 'no_selection',
- property: members.first.id.to_s
- )
- expect_snowplow_event(
- category: described_class.name,
- action: 'area_of_focus',
- label: 'no_selection',
- property: members.last.id.to_s
- )
- end
- end
- end
-
- context 'when multiple areas_of_focus are passed' do
- let(:additional_params) { { invite_source: '_invite_source_', areas_of_focus: %w[no_selection Other] } }
-
- it 'tracks the areas_of_focus from params' do
- execute_service
-
- expect_snowplow_event(
- category: described_class.name,
- action: 'area_of_focus',
- label: 'no_selection',
- property: source.members.last.id.to_s
- )
- expect_snowplow_event(
- category: described_class.name,
- action: 'area_of_focus',
- label: 'Other',
- property: source.members.last.id.to_s
- )
- end
- end
- end
-
context 'when assigning tasks to be done' do
let(:additional_params) do
{ invite_source: '_invite_source_', tasks_to_be_done: %w(ci code), tasks_project_id: source.id }
end
- before do
- stub_experiments(invite_members_for_task: true)
- end
-
it 'creates 2 task issues', :aggregate_failures do
expect(TasksToBeDone::CreateWorker)
.to receive(:perform_async)
diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb
index cbbd193a411..781be57d709 100644
--- a/spec/services/merge_requests/after_create_service_spec.rb
+++ b/spec/services/merge_requests/after_create_service_spec.rb
@@ -85,13 +85,67 @@ RSpec.describe MergeRequests::AfterCreateService do
context 'when merge request is in preparing state' do
before do
+ merge_request.mark_as_unchecked! unless merge_request.unchecked?
merge_request.mark_as_preparing!
- execute_service
end
it 'marks the merge request as unchecked' do
+ execute_service
+
expect(merge_request.reload).to be_unchecked
end
+
+ context 'when preparing for mergeability fails' do
+ before do
+ # This is only one of the possible cases that can fail. This is to
+ # simulate a failure that happens during the service call.
+ allow(merge_request)
+ .to receive(:update_head_pipeline)
+ .and_raise(StandardError)
+ end
+
+ it 'does not mark the merge request as unchecked' do
+ expect { execute_service }.to raise_error(StandardError)
+ expect(merge_request.reload).to be_preparing
+ end
+
+ context 'when early_prepare_for_mergeability feature flag is disabled' do
+ before do
+ stub_feature_flags(early_prepare_for_mergeability: false)
+ end
+
+ it 'does not mark the merge request as unchecked' do
+ expect { execute_service }.to raise_error(StandardError)
+ expect(merge_request.reload).to be_preparing
+ end
+ end
+ end
+
+ context 'when preparing merge request fails' do
+ before do
+ # This is only one of the possible cases that can fail. This is to
+ # simulate a failure that happens during the service call.
+ allow(merge_request)
+ .to receive_message_chain(:diffs, :write_cache)
+ .and_raise(StandardError)
+ end
+
+ it 'still marks the merge request as unchecked' do
+ expect { execute_service }.to raise_error(StandardError)
+ expect(merge_request.reload).to be_unchecked
+ end
+
+ context 'when early_prepare_for_mergeability feature flag is disabled' do
+ before do
+ stub_feature_flags(early_prepare_for_mergeability: false)
+ end
+
+ it 'does not mark the merge request as unchecked' do
+ expect { execute_service }.to raise_error(StandardError)
+ expect(merge_request.reload).to be_preparing
+ end
+ end
+ end
end
it 'increments the usage data counter of create event' do
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index d30b2721a36..4d20d62b864 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe MergeRequests::ApprovalService do
describe '#execute' do
let(:user) { create(:user) }
- let(:merge_request) { create(:merge_request) }
+ let(:merge_request) { create(:merge_request, reviewers: [user]) }
let(:project) { merge_request.project }
let!(:todo) { create(:todo, user: user, project: project, target: merge_request) }
@@ -59,6 +59,14 @@ RSpec.describe MergeRequests::ApprovalService do
service.execute(merge_request)
end
+ it 'removes attention requested state' do
+ expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
+ .with(project: project, current_user: user, merge_request: merge_request, user: user)
+ .and_call_original
+
+ service.execute(merge_request)
+ end
+
context 'with remaining approvals' do
it 'fires an approval webhook' do
expect(service).to receive(:execute_hooks).with(merge_request, 'approved')
diff --git a/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb b/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb
new file mode 100644
index 00000000000..fe4ce0dab5e
--- /dev/null
+++ b/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::BulkRemoveAttentionRequestedService do
+ let(:current_user) { create(:user) }
+ let(:user) { create(:user) }
+ let(:assignee_user) { create(:user) }
+ let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [assignee_user]) }
+ let(:reviewer) { merge_request.find_reviewer(user) }
+ let(:assignee) { merge_request.find_assignee(assignee_user) }
+ let(:project) { merge_request.project }
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request) }
+ let(:result) { service.execute }
+
+ before do
+ project.add_developer(current_user)
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ context 'invalid permissions' do
+ let(:service) { described_class.new(project: project, current_user: create(:user), merge_request: merge_request) }
+
+ it 'returns an error' do
+ expect(result[:status]).to eq :error
+ end
+ end
+
+ context 'updates reviewers and assignees' do
+ it 'returns success' do
+ expect(result[:status]).to eq :success
+ end
+
+ it 'updates reviewers state' do
+ service.execute
+ reviewer.reload
+ assignee.reload
+
+ expect(reviewer.state).to eq 'reviewed'
+ expect(assignee.state).to eq 'reviewed'
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index 86d972bc516..d36a2f75cfe 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -54,6 +54,10 @@ RSpec.describe MergeRequests::CloseService do
expect(todo.reload).to be_done
end
+ it 'removes attention requested state' do
+ expect(merge_request.find_assignee(user2).attention_requested?).to eq(false)
+ end
+
context 'when auto merge is enabled' do
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
diff --git a/spec/services/merge_requests/handle_assignees_change_service_spec.rb b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
index c43f5db6059..fa3b1614e21 100644
--- a/spec/services/merge_requests/handle_assignees_change_service_spec.rb
+++ b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
@@ -87,6 +87,14 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do
expect(todo).to be_pending
end
+ it 'removes attention requested state' do
+ expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
+ .with(project: project, current_user: user, merge_request: merge_request, user: user)
+ .and_call_original
+
+ execute
+ end
+
it 'tracks users assigned event' do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
.to receive(:track_users_assigned_to_mr).once.with(users: [assignee])
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index e3f33304aab..127c94763d9 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -151,7 +151,7 @@ RSpec.describe MergeRequests::MergeService do
it 'closes GitLab issue tracker issues' do
issue = create :issue, project: project
- commit = instance_double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
+ commit = double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
allow(merge_request).to receive(:commits).and_return([commit])
merge_request.cache_merge_request_closes_issues!
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 0a781aee704..19fac3b5095 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -150,7 +150,10 @@ RSpec.describe MergeRequests::MergeToRefService do
merge_request.update!(squash: true)
end
- it_behaves_like 'MergeService for target ref'
+ it_behaves_like 'successfully merges to ref with merge method' do
+ let(:first_parent_ref) { 'refs/heads/master' }
+ let(:target_ref) { merge_request.merge_ref_path }
+ end
it 'does not squash before merging' do
expect(MergeRequests::SquashService).not_to receive(:new)
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index ca561376581..e671bbf2cd6 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -80,6 +80,27 @@ RSpec.describe MergeRequests::RebaseService do
end
end
+ context 'with a pre-receive failure' do
+ let(:pre_receive_error) { "Commit message does not follow the pattern 'ACME'" }
+ let(:merge_error) { "Something went wrong during the rebase pre-receive hook: #{pre_receive_error}." }
+
+ before do
+ allow(repository).to receive(:gitaly_operation_client).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{pre_receive_error}")
+ end
+
+ it 'saves a specific message' do
+ subject.execute(merge_request)
+
+ expect(merge_request.reload.merge_error).to eq merge_error
+ end
+
+ it 'returns an error' do
+ expect(service.execute(merge_request)).to match(
+ status: :error,
+ message: merge_error)
+ end
+ end
+
context 'with git command failure' do
before do
allow(repository).to receive(:gitaly_operation_client).and_raise(Gitlab::Git::Repository::GitError, 'Something went wrong')
diff --git a/spec/services/merge_requests/remove_attention_requested_service_spec.rb b/spec/services/merge_requests/remove_attention_requested_service_spec.rb
new file mode 100644
index 00000000000..875afc2dc7e
--- /dev/null
+++ b/spec/services/merge_requests/remove_attention_requested_service_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::RemoveAttentionRequestedService do
+ let(:current_user) { create(:user) }
+ let(:user) { create(:user) }
+ let(:assignee_user) { create(:user) }
+ let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [assignee_user]) }
+ let(:reviewer) { merge_request.find_reviewer(user) }
+ let(:assignee) { merge_request.find_assignee(assignee_user) }
+ let(:project) { merge_request.project }
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) }
+ let(:result) { service.execute }
+
+ before do
+ project.add_developer(current_user)
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ context 'invalid permissions' do
+ let(:service) { described_class.new(project: project, current_user: create(:user), merge_request: merge_request, user: user) }
+
+ it 'returns an error' do
+ expect(result[:status]).to eq :error
+ end
+ end
+
+ context 'reviewer does not exist' do
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: create(:user)) }
+
+ it 'returns an error' do
+ expect(result[:status]).to eq :error
+ end
+ end
+
+ context 'reviewer exists' do
+ it 'returns success' do
+ expect(result[:status]).to eq :success
+ end
+
+ it 'updates reviewers state' do
+ service.execute
+ reviewer.reload
+
+ expect(reviewer.state).to eq 'reviewed'
+ end
+ end
+
+ context 'assignee exists' do
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: assignee_user) }
+
+ before do
+ assignee.update!(state: :reviewed)
+ end
+
+ it 'returns success' do
+ expect(result[:status]).to eq :success
+ end
+
+ it 'updates assignees state' do
+ service.execute
+ assignee.reload
+
+ expect(assignee.state).to eq 'reviewed'
+ end
+ end
+
+ context 'assignee is the same as reviewer' do
+ let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [user]) }
+ let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) }
+ let(:assignee) { merge_request.find_assignee(user) }
+
+ it 'updates reviewers and assignees state' do
+ service.execute
+ reviewer.reload
+ assignee.reload
+
+ expect(reviewer.state).to eq 'reviewed'
+ expect(assignee.state).to eq 'reviewed'
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb b/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb
index 74f3a1b06fc..2f191f2ee44 100644
--- a/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb
+++ b/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb
@@ -26,6 +26,12 @@ RSpec.describe MergeRequests::ResolvedDiscussionNotificationService do
subject.execute(merge_request)
end
+
+ it "doesn't send a webhook" do
+ expect_any_instance_of(MergeRequests::BaseService).not_to receive(:execute_hooks)
+
+ subject.execute(merge_request)
+ end
end
context "when all discussions are resolved" do
@@ -44,6 +50,12 @@ RSpec.describe MergeRequests::ResolvedDiscussionNotificationService do
subject.execute(merge_request)
end
+
+ it "sends a webhook" do
+ expect_any_instance_of(MergeRequests::BaseService).to receive(:execute_hooks).with(merge_request, 'update')
+
+ subject.execute(merge_request)
+ end
end
end
end
diff --git a/spec/services/merge_requests/squash_service_spec.rb b/spec/services/merge_requests/squash_service_spec.rb
index 09f83624e05..af48e8f5dae 100644
--- a/spec/services/merge_requests/squash_service_spec.rb
+++ b/spec/services/merge_requests/squash_service_spec.rb
@@ -55,18 +55,26 @@ RSpec.describe MergeRequests::SquashService do
expect(merge_request).to receive(:commits_count).at_least(:once).and_return(1)
end
- it 'will skip performing the squash, as the outcome would be the same' do
- expect(merge_request.target_project.repository).not_to receive(:squash)
+ it 'will still perform the squash' do
+ expect(merge_request.target_project.repository).to receive(:squash).and_return('sha')
service.execute
end
- it 'will still perform the squash when a custom squash commit message has been provided' do
- service = described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: 'A custom commit message' })
+ context 'when squash message matches commit message' do
+ let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: merge_request.first_commit.safe_message }) }
- expect(merge_request.target_project.repository).to receive(:squash).and_return('sha')
+ it 'returns that commit SHA' do
+ result = service.execute
- service.execute
+ expect(result).to match(status: :success, squash_sha: merge_request.diff_head_sha)
+ end
+
+ it 'does not perform any git actions' do
+ expect(repository).not_to receive(:squash)
+
+ service.execute
+ end
end
end
@@ -113,17 +121,7 @@ RSpec.describe MergeRequests::SquashService do
context 'when there is only one commit in the merge request' do
let(:merge_request) { merge_request_with_one_commit }
- it 'returns that commit SHA' do
- result = service.execute
-
- expect(result).to match(status: :success, squash_sha: merge_request.diff_head_sha)
- end
-
- it 'does not perform any git actions' do
- expect(repository).not_to receive(:popen)
-
- service.execute
- end
+ include_examples 'the squash succeeds'
end
context 'when squashing only new files' do
diff --git a/spec/services/merge_requests/toggle_attention_requested_service_spec.rb b/spec/services/merge_requests/toggle_attention_requested_service_spec.rb
index a26b1be529e..63fa61b8097 100644
--- a/spec/services/merge_requests/toggle_attention_requested_service_spec.rb
+++ b/spec/services/merge_requests/toggle_attention_requested_service_spec.rb
@@ -13,9 +13,14 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do
let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) }
let(:result) { service.execute }
let(:todo_service) { spy('todo service') }
+ let(:notification_service) { spy('notification service') }
before do
+ allow(NotificationService).to receive(:new) { notification_service }
allow(service).to receive(:todo_service).and_return(todo_service)
+ allow(service).to receive(:notification_service).and_return(notification_service)
+ allow(SystemNoteService).to receive(:request_attention)
+ allow(SystemNoteService).to receive(:remove_attention_request)
project.add_developer(current_user)
project.add_developer(user)
@@ -59,6 +64,20 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do
service.execute
end
+
+ it 'sends email to reviewer' do
+ expect(notification_service).to receive_message_chain(:async, :attention_requested_of_merge_request).with(merge_request, current_user, user)
+
+ service.execute
+ end
+
+ it 'removes attention requested state' do
+ expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
+ .with(project: project, current_user: current_user, merge_request: merge_request, user: current_user)
+ .and_call_original
+
+ service.execute
+ end
end
context 'assignee exists' do
@@ -84,6 +103,20 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do
service.execute
end
+
+ it 'creates a request attention system note' do
+ expect(SystemNoteService).to receive(:request_attention).with(merge_request, merge_request.project, current_user, assignee_user)
+
+ service.execute
+ end
+
+ it 'removes attention requested state' do
+ expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
+ .with(project: project, current_user: current_user, merge_request: merge_request, user: current_user)
+ .and_call_original
+
+ service.execute
+ end
end
context 'assignee is the same as reviewer' do
@@ -123,6 +156,12 @@ RSpec.describe MergeRequests::ToggleAttentionRequestedService do
service.execute
end
+
+ it 'creates a remove attention request system note' do
+ expect(SystemNoteService).to receive(:remove_attention_request).with(merge_request, merge_request.project, current_user, user)
+
+ service.execute
+ end
end
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index fbf5b183365..24775ce06a4 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -2425,6 +2425,45 @@ RSpec.describe NotificationService, :mailer do
let(:notification_trigger) { notification.review_requested_of_merge_request(merge_request, current_user, reviewer) }
end
end
+
+ describe '#attention_requested_of_merge_request' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:reviewer) { create(:user) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, reviewers: [reviewer]) }
+
+ it 'sends email to reviewer', :aggregate_failures do
+ notification.attention_requested_of_merge_request(merge_request, current_user, reviewer)
+
+ merge_request.reviewers.each { |reviewer| should_email(reviewer) }
+ should_not_email(merge_request.author)
+ should_not_email(@u_watcher)
+ should_not_email(@u_participant_mentioned)
+ should_not_email(@subscriber)
+ should_not_email(@watcher_and_subscriber)
+ should_not_email(@u_guest_watcher)
+ should_not_email(@u_guest_custom)
+ should_not_email(@u_custom_global)
+ should_not_email(@unsubscriber)
+ should_not_email(@u_participating)
+ should_not_email(@u_disabled)
+ should_not_email(@u_lazy_participant)
+ end
+
+ it 'adds "attention requested" reason' do
+ notification.attention_requested_of_merge_request(merge_request, current_user, [reviewer])
+
+ merge_request.reviewers.each do |reviewer|
+ email = find_email_for(reviewer)
+
+ expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::ATTENTION_REQUESTED)
+ end
+ end
+
+ it_behaves_like 'project emails are disabled' do
+ let(:notification_target) { merge_request }
+ let(:notification_trigger) { notification.attention_requested_of_merge_request(merge_request, current_user, reviewer) }
+ end
+ end
end
describe 'Projects', :deliver_mails_inline do
diff --git a/spec/services/packages/debian/update_distribution_service_spec.rb b/spec/services/packages/debian/update_distribution_service_spec.rb
index 2aa34a62111..3dff2754cec 100644
--- a/spec/services/packages/debian/update_distribution_service_spec.rb
+++ b/spec/services/packages/debian/update_distribution_service_spec.rb
@@ -61,9 +61,9 @@ RSpec.describe Packages::Debian::UpdateDistributionService do
let_it_be(:architecture0) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'all') }
let_it_be(:architecture1) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'architecture1') }
let_it_be(:architecture2) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'architecture2') }
- let_it_be(:component_file1) { create("debian_#{container_type}_component_file", :source, component: component1) }
+ let_it_be(:component_file1) { create("debian_#{container_type}_component_file", :sources, component: component1) }
let_it_be(:component_file2) { create("debian_#{container_type}_component_file", component: component1, architecture: architecture1) }
- let_it_be(:component_file3) { create("debian_#{container_type}_component_file", :source, component: component2) }
+ let_it_be(:component_file3) { create("debian_#{container_type}_component_file", :sources, component: component2) }
let_it_be(:component_file4) { create("debian_#{container_type}_component_file", component: component2, architecture: architecture2) }
let(:original_params) do
diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb
index b1beb2adb3b..3bb675058df 100644
--- a/spec/services/packages/npm/create_package_service_spec.rb
+++ b/spec/services/packages/npm/create_package_service_spec.rb
@@ -89,17 +89,6 @@ RSpec.describe Packages::Npm::CreatePackageService do
end
end
end
-
- context 'with packages_npm_abbreviated_metadata disabled' do
- before do
- stub_feature_flags(packages_npm_abbreviated_metadata: false)
- end
-
- it 'creates a package without metadatum' do
- expect { subject }
- .not_to change { Packages::Npm::Metadatum.count }
- end
- end
end
describe '#execute' do
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index ac84614121a..b22f276ee1f 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -55,48 +55,16 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
.and change { Ci::Pipeline.count }.by(-1)
end
- context 'with abort_deleted_project_pipelines disabled' do
- stub_feature_flags(abort_deleted_project_pipelines: false)
+ it 'avoids N+1 queries' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
- it 'avoids N+1 queries' do
- recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create(:ci_pipeline_artifact, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
- project = create(:project, :repository, namespace: user.namespace)
- pipeline = create(:ci_pipeline, project: project)
- builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
- create(:ci_pipeline_artifact, pipeline: pipeline)
- create_list(:ci_build_trace_chunk, 3, build: builds[0])
-
- expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
- end
- end
-
- context 'with ci_optimize_project_records_destruction disabled' do
- stub_feature_flags(ci_optimize_project_records_destruction: false)
-
- it 'avoids N+1 queries' do
- recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
-
- project = create(:project, :repository, namespace: user.namespace)
- pipeline = create(:ci_pipeline, project: project)
- builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
- create_list(:ci_build_trace_chunk, 3, build: builds[0])
-
- expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
- end
- end
-
- context 'with ci_optimize_project_records_destruction and abort_deleted_project_pipelines enabled' do
- it 'avoids N+1 queries' do
- recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
-
- project = create(:project, :repository, namespace: user.namespace)
- pipeline = create(:ci_pipeline, project: project)
- builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
- create_list(:ci_build_trace_chunk, 3, build: builds[0])
-
- expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
- end
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
end
it_behaves_like 'deleting the project'
@@ -132,64 +100,22 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
destroy_project(project, user, {})
end
- context 'with abort_deleted_project_pipelines feature disabled' do
- before do
- stub_feature_flags(abort_deleted_project_pipelines: false)
- end
-
- it 'does not bulk-fail project ci pipelines' do
- expect(::Ci::AbortPipelinesService).not_to receive(:new)
-
- destroy_project(project, user, {})
- end
-
- it 'does not destroy CI records via DestroyPipelineService' do
- expect(::Ci::DestroyPipelineService).not_to receive(:new)
-
- destroy_project(project, user, {})
- end
- end
-
- context 'with abort_deleted_project_pipelines feature enabled' do
+ context 'with running pipelines' do
let!(:pipelines) { create_list(:ci_pipeline, 3, :running, project: project) }
let(:destroy_pipeline_service) { double('DestroyPipelineService', execute: nil) }
- context 'with ci_optimize_project_records_destruction disabled' do
- before do
- stub_feature_flags(ci_optimize_project_records_destruction: false)
- end
-
- it 'bulk-fails project ci pipelines' do
- expect(::Ci::AbortPipelinesService)
- .to receive_message_chain(:new, :execute)
- .with(project.all_pipelines, :project_deleted)
-
- destroy_project(project, user, {})
- end
+ it 'bulks-fails with AbortPipelineService and then executes DestroyPipelineService for each pipelines' do
+ allow(::Ci::DestroyPipelineService).to receive(:new).and_return(destroy_pipeline_service)
- it 'does not destroy CI records via DestroyPipelineService' do
- expect(::Ci::DestroyPipelineService).not_to receive(:new)
+ expect(::Ci::AbortPipelinesService)
+ .to receive_message_chain(:new, :execute)
+ .with(project.all_pipelines, :project_deleted)
- destroy_project(project, user, {})
+ pipelines.each do |pipeline|
+ expect(destroy_pipeline_service).to receive(:execute).with(pipeline)
end
- end
-
- context 'with ci_optimize_project_records_destruction enabled' do
- it 'executes DestroyPipelineService for project ci pipelines' do
- allow(::Ci::DestroyPipelineService).to receive(:new).and_return(destroy_pipeline_service)
- expect(::Ci::AbortPipelinesService)
- .to receive_message_chain(:new, :execute)
- .with(project.all_pipelines, :project_deleted)
-
- pipelines.each do |pipeline|
- expect(destroy_pipeline_service)
- .to receive(:execute)
- .with(pipeline)
- end
-
- destroy_project(project, user, {})
- end
+ destroy_project(project, user, {})
end
end
@@ -545,6 +471,27 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end
end
+ context 'when project has events' do
+ let!(:event) { create(:event, :created, project: project, target: project, author: user) }
+
+ it 'deletes events from the project' do
+ expect do
+ destroy_project(project, user)
+ end.to change(Event, :count).by(-1)
+ end
+
+ context 'when an error is returned while deleting events' do
+ it 'does not delete project' do
+ allow_next_instance_of(Events::DestroyService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
+ end
+
+ expect(destroy_project(project, user)).to be_falsey
+ expect(project.delete_error).to include('Failed to remove events')
+ end
+ end
+ end
+
context 'error while destroying', :sidekiq_inline do
let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:builds) { create_list(:ci_build, 2, :artifacts, pipeline: pipeline) }
diff --git a/spec/services/projects/prometheus/alerts/create_service_spec.rb b/spec/services/projects/prometheus/alerts/create_service_spec.rb
index c0bc9336558..6b9d43e4e81 100644
--- a/spec/services/projects/prometheus/alerts/create_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/create_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Projects::Prometheus::Alerts::CreateService do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
- let(:service) { described_class.new(project, user, params) }
+ let(:service) { described_class.new(project: project, current_user: user, params: params) }
subject { service.execute }
diff --git a/spec/services/projects/prometheus/alerts/destroy_service_spec.rb b/spec/services/projects/prometheus/alerts/destroy_service_spec.rb
index 573711051b7..a3e9c3516c2 100644
--- a/spec/services/projects/prometheus/alerts/destroy_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/destroy_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Projects::Prometheus::Alerts::DestroyService do
let_it_be(:user) { create(:user) }
let_it_be(:alert) { create(:prometheus_alert, project: project) }
- let(:service) { described_class.new(project, user, nil) }
+ let(:service) { described_class.new(project: project, current_user: user, params: nil) }
describe '#execute' do
subject { service.execute(alert) }
diff --git a/spec/services/projects/prometheus/alerts/update_service_spec.rb b/spec/services/projects/prometheus/alerts/update_service_spec.rb
index e831d001838..ec6766221f6 100644
--- a/spec/services/projects/prometheus/alerts/update_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/update_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Projects::Prometheus::Alerts::UpdateService do
create(:prometheus_alert, project: project, environment: environment)
end
- let(:service) { described_class.new(project, user, params) }
+ let(:service) { described_class.new(project: project, current_user: user, params: params) }
let(:params) do
{
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index c47d44002cc..ddd16100b40 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -169,7 +169,7 @@ RSpec.describe Projects::TransferService do
end
end
- context 'when project has pending builds' do
+ context 'when project has pending builds', :sidekiq_inline do
let!(:other_project) { create(:project) }
let!(:pending_build) { create(:ci_pending_build, project: project.reload) }
let!(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
@@ -251,7 +251,7 @@ RSpec.describe Projects::TransferService do
)
end
- context 'when project has pending builds' do
+ context 'when project has pending builds', :sidekiq_inline do
let!(:other_project) { create(:project) }
let!(:pending_build) { create(:ci_pending_build, project: project.reload) }
let!(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
diff --git a/spec/services/protected_tags/create_service_spec.rb b/spec/services/protected_tags/create_service_spec.rb
index e85a43eb51c..3d06cc9fb6c 100644
--- a/spec/services/protected_tags/create_service_spec.rb
+++ b/spec/services/protected_tags/create_service_spec.rb
@@ -7,17 +7,54 @@ RSpec.describe ProtectedTags::CreateService do
let(:user) { project.owner }
let(:params) do
{
- name: 'master',
+ name: name,
create_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }]
}
end
describe '#execute' do
+ let(:name) { 'tag' }
+
subject(:service) { described_class.new(project, user, params) }
it 'creates a new protected tag' do
expect { service.execute }.to change(ProtectedTag, :count).by(1)
expect(project.protected_tags.last.create_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
end
+
+ context 'when name has escaped HTML' do
+ let(:name) { 'tag-&gt;test' }
+
+ it 'creates the new protected tag matching the unescaped version' do
+ expect { service.execute }.to change(ProtectedTag, :count).by(1)
+ expect(project.protected_tags.last.name).to eq('tag->test')
+ end
+
+ context 'and name contains HTML tags' do
+ let(:name) { '&lt;b&gt;tag&lt;/b&gt;' }
+
+ it 'creates the new protected tag with sanitized name' do
+ expect { service.execute }.to change(ProtectedTag, :count).by(1)
+ expect(project.protected_tags.last.name).to eq('tag')
+ end
+
+ context 'and contains unsafe HTML' do
+ let(:name) { '&lt;script&gt;alert(&#39;foo&#39;);&lt;/script&gt;' }
+
+ it 'does not create the new protected tag' do
+ expect { service.execute }.not_to change(ProtectedTag, :count)
+ end
+ end
+ end
+
+ context 'when name contains unescaped HTML tags' do
+ let(:name) { '<b>tag</b>' }
+
+ it 'creates the new protected tag with sanitized name' do
+ expect { service.execute }.to change(ProtectedTag, :count).by(1)
+ expect(project.protected_tags.last.name).to eq('tag')
+ end
+ end
+ end
end
end
diff --git a/spec/services/protected_tags/update_service_spec.rb b/spec/services/protected_tags/update_service_spec.rb
index ed151ca2347..22005bb9b89 100644
--- a/spec/services/protected_tags/update_service_spec.rb
+++ b/spec/services/protected_tags/update_service_spec.rb
@@ -6,17 +6,50 @@ RSpec.describe ProtectedTags::UpdateService do
let(:protected_tag) { create(:protected_tag) }
let(:project) { protected_tag.project }
let(:user) { project.owner }
- let(:params) { { name: 'new protected tag name' } }
+ let(:params) { { name: new_name } }
describe '#execute' do
+ let(:new_name) { 'new protected tag name' }
+ let(:result) { service.execute(protected_tag) }
+
subject(:service) { described_class.new(project, user, params) }
it 'updates a protected tag' do
- result = service.execute(protected_tag)
-
expect(result.reload.name).to eq(params[:name])
end
+ context 'when name has escaped HTML' do
+ let(:new_name) { 'tag-&gt;test' }
+
+ it 'updates protected tag name with unescaped HTML' do
+ expect(result.reload.name).to eq('tag->test')
+ end
+
+ context 'and name contains HTML tags' do
+ let(:new_name) { '&lt;b&gt;tag&lt;/b&gt;' }
+
+ it 'updates protected tag name with sanitized name' do
+ expect(result.reload.name).to eq('tag')
+ end
+
+ context 'and contains unsafe HTML' do
+ let(:new_name) { '&lt;script&gt;alert(&#39;foo&#39;);&lt;/script&gt;' }
+
+ it 'does not update the protected tag' do
+ expect(result.reload.name).to eq(protected_tag.name)
+ end
+ end
+ end
+ end
+
+ context 'when name contains unescaped HTML tags' do
+ let(:new_name) { '<b>tag</b>' }
+
+ it 'updates protected tag name with sanitized name' do
+ expect(result.reload.name).to eq('tag')
+ end
+ end
+
context 'without admin_project permissions' do
let(:user) { create(:user) }
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 611261cd92c..77d263f4b70 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe QuickActions::InterpretService do
- let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:public_project) { create(:project, :public, group: group) }
let_it_be(:repository_project) { create(:project, :repository) }
let_it_be(:project) { public_project }
let_it_be(:developer) { create(:user) }
@@ -2233,6 +2234,51 @@ RSpec.describe QuickActions::InterpretService do
end
end
end
+
+ context 'crm_contact commands' do
+ let_it_be(:new_contact) { create(:contact, group: group) }
+ let_it_be(:existing_contact) { create(:contact, group: group) }
+
+ let(:add_command) { service.execute("/add_contacts #{new_contact.email}", issue) }
+ let(:remove_command) { service.execute("/remove_contacts #{existing_contact.email}", issue) }
+
+ before do
+ issue.project.group.add_developer(developer)
+ create(:issue_customer_relations_contact, issue: issue, contact: existing_contact)
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(customer_relations: false)
+ end
+
+ it 'add_contacts command does not add the contact' do
+ _, updates, _ = add_command
+
+ expect(updates).to be_empty
+ end
+
+ it 'remove_contacts command does not remove the contact' do
+ _, updates, _ = remove_command
+
+ expect(updates).to be_empty
+ end
+ end
+
+ it 'add_contacts command adds the contact' do
+ _, updates, message = add_command
+
+ expect(updates).to eq(add_contacts: [new_contact.email])
+ expect(message).to eq('One or more contacts were successfully added.')
+ end
+
+ it 'remove_contacts command removes the contact' do
+ _, updates, message = remove_command
+
+ expect(updates).to eq(remove_contacts: [existing_contact.email])
+ expect(message).to eq('One or more contacts were successfully removed.')
+ end
+ end
end
describe '#explain' do
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index b547ae17317..ddb8e7e1182 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -61,6 +61,8 @@ RSpec.describe Repositories::ChangelogService do
let!(:commit2) { project.commit(sha3) }
let!(:commit3) { project.commit(sha4) }
+ let(:commit_to_changelog) { true }
+
it 'generates and commits a changelog section' do
allow(MergeRequestDiffCommit)
.to receive(:oldest_merge_request_id_per_commit)
@@ -73,7 +75,7 @@ RSpec.describe Repositories::ChangelogService do
service = described_class
.new(project, creator, version: '1.0.0', from: sha1, to: sha3)
- recorder = ActiveRecord::QueryRecorder.new { service.execute }
+ recorder = ActiveRecord::QueryRecorder.new { service.execute(commit_to_changelog: commit_to_changelog) }
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
expect(recorder.count).to eq(9)
@@ -90,7 +92,7 @@ RSpec.describe Repositories::ChangelogService do
described_class
.new(project, creator, version: '1.0.0', from: sha1)
- .execute
+ .execute(commit_to_changelog: commit_to_changelog)
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
@@ -108,7 +110,7 @@ RSpec.describe Repositories::ChangelogService do
described_class
.new(project, creator, version: '1.0.0', from: sha1)
- .execute
+ .execute(commit_to_changelog: commit_to_changelog)
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
@@ -119,12 +121,33 @@ RSpec.describe Repositories::ChangelogService do
it 'uses the target branch when "to" is unspecified' do
described_class
.new(project, creator, version: '1.0.0', from: sha1)
- .execute
+ .execute(commit_to_changelog: commit_to_changelog)
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
expect(changelog).to include('Title 1', 'Title 2', 'Title 3')
end
+
+ describe 'with commit_to_changelog: false' do
+ let(:commit_to_changelog) { false }
+
+ it 'generates changelog section' do
+ allow(MergeRequestDiffCommit)
+ .to receive(:oldest_merge_request_id_per_commit)
+ .with(project.id, [commit2.id, commit1.id])
+ .and_return([
+ { sha: sha2, merge_request_id: mr1.id },
+ { sha: sha3, merge_request_id: mr2.id }
+ ])
+
+ service = described_class
+ .new(project, creator, version: '1.0.0', from: sha1, to: sha3)
+
+ changelog = service.execute(commit_to_changelog: commit_to_changelog)
+
+ expect(changelog).to include('Title 1', 'Title 2')
+ end
+ end
end
describe '#start_of_commit_range' do
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index 40fb257b23e..d7a36ff370e 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe SearchService do
let(:page) { 1 }
let(:per_page) { described_class::DEFAULT_PER_PAGE }
+ let(:valid_search) { "what is love?" }
subject(:search_service) { described_class.new(user, search: search, scope: scope, page: page, per_page: per_page) }
@@ -30,7 +31,7 @@ RSpec.describe SearchService do
describe '#project' do
context 'when the project is accessible' do
it 'returns the project' do
- project = described_class.new(user, project_id: accessible_project.id).project
+ project = described_class.new(user, project_id: accessible_project.id, search: valid_search).project
expect(project).to eq accessible_project
end
@@ -39,7 +40,7 @@ RSpec.describe SearchService do
search_project = create :project
search_project.add_guest(user)
- project = described_class.new(user, project_id: search_project.id).project
+ project = described_class.new(user, project_id: search_project.id, search: valid_search).project
expect(project).to eq search_project
end
@@ -47,7 +48,7 @@ RSpec.describe SearchService do
context 'when the project is not accessible' do
it 'returns nil' do
- project = described_class.new(user, project_id: inaccessible_project.id).project
+ project = described_class.new(user, project_id: inaccessible_project.id, search: valid_search).project
expect(project).to be_nil
end
@@ -55,7 +56,7 @@ RSpec.describe SearchService do
context 'when there is no project_id' do
it 'returns nil' do
- project = described_class.new(user).project
+ project = described_class.new(user, search: valid_search).project
expect(project).to be_nil
end
@@ -65,7 +66,7 @@ RSpec.describe SearchService do
describe '#group' do
context 'when the group is accessible' do
it 'returns the group' do
- group = described_class.new(user, group_id: accessible_group.id).group
+ group = described_class.new(user, group_id: accessible_group.id, search: valid_search).group
expect(group).to eq accessible_group
end
@@ -73,7 +74,7 @@ RSpec.describe SearchService do
context 'when the group is not accessible' do
it 'returns nil' do
- group = described_class.new(user, group_id: inaccessible_group.id).group
+ group = described_class.new(user, group_id: inaccessible_group.id, search: valid_search).group
expect(group).to be_nil
end
@@ -81,7 +82,7 @@ RSpec.describe SearchService do
context 'when there is no group_id' do
it 'returns nil' do
- group = described_class.new(user).group
+ group = described_class.new(user, search: valid_search).group
expect(group).to be_nil
end
@@ -118,7 +119,7 @@ RSpec.describe SearchService do
context 'with accessible project_id' do
context 'and allowed scope' do
it 'returns the specified scope' do
- scope = described_class.new(user, project_id: accessible_project.id, scope: 'notes').scope
+ scope = described_class.new(user, project_id: accessible_project.id, scope: 'notes', search: valid_search).scope
expect(scope).to eq 'notes'
end
@@ -126,7 +127,7 @@ RSpec.describe SearchService do
context 'and disallowed scope' do
it 'returns the default scope' do
- scope = described_class.new(user, project_id: accessible_project.id, scope: 'projects').scope
+ scope = described_class.new(user, project_id: accessible_project.id, scope: 'projects', search: valid_search).scope
expect(scope).to eq 'blobs'
end
@@ -134,7 +135,7 @@ RSpec.describe SearchService do
context 'and no scope' do
it 'returns the default scope' do
- scope = described_class.new(user, project_id: accessible_project.id).scope
+ scope = described_class.new(user, project_id: accessible_project.id, search: valid_search).scope
expect(scope).to eq 'blobs'
end
@@ -552,4 +553,87 @@ RSpec.describe SearchService do
end
end
end
+
+ describe '#valid_request?' do
+ let(:scope) { 'issues' }
+ let(:search) { 'foobar' }
+ let(:params) { instance_double(Gitlab::Search::Params) }
+
+ before do
+ allow(Gitlab::Search::Params).to receive(:new).and_return(params)
+ allow(params).to receive(:valid?).and_return double(:valid?)
+ end
+
+ it 'is the return value of params.valid?' do
+ expect(subject.valid_request?).to eq(params.valid?)
+ end
+ end
+
+ describe '#abuse_messages' do
+ let(:scope) { 'issues' }
+ let(:search) { 'foobar' }
+ let(:params) { instance_double(Gitlab::Search::Params) }
+
+ before do
+ allow(Gitlab::Search::Params).to receive(:new).and_return(params)
+ end
+
+ it 'returns an empty array when not abusive' do
+ allow(params).to receive(:abusive?).and_return false
+ expect(subject.abuse_messages).to match_array([])
+ end
+
+ it 'calls on abuse_detection.errors.full_messages when abusive' do
+ allow(params).to receive(:abusive?).and_return true
+ expect(params).to receive_message_chain(:abuse_detection, :errors, :full_messages)
+ subject.abuse_messages
+ end
+ end
+
+ describe 'abusive search handling' do
+ subject { described_class.new(user, raw_params) }
+
+ let(:raw_params) { { search: search, scope: scope } }
+ let(:search) { 'foobar' }
+
+ let(:search_service) { double(:search_service) }
+
+ before do
+ stub_feature_flags(prevent_abusive_searches: should_detect_abuse)
+ expect(Gitlab::Search::Params).to receive(:new)
+ .with(raw_params, detect_abuse: should_detect_abuse).and_call_original
+
+ allow(subject).to receive(:search_service).and_return search_service
+ end
+
+ context 'when abusive search but prevent_abusive_searches FF is disabled' do
+ let(:should_detect_abuse) { false }
+ let(:scope) { '1;drop%20table' }
+
+ it 'executes search even if params are abusive' do
+ expect(search_service).to receive(:execute)
+ subject.search_results
+ end
+ end
+
+ context 'a search is abusive' do
+ let(:should_detect_abuse) { true }
+ let(:scope) { '1;drop%20table' }
+
+ it 'does NOT execute search service' do
+ expect(search_service).not_to receive(:execute)
+ subject.search_results
+ end
+ end
+
+ context 'a search is NOT abusive' do
+ let(:should_detect_abuse) { true }
+ let(:scope) { 'issues' }
+
+ it 'executes search service' do
+ expect(search_service).to receive(:execute)
+ subject.search_results
+ end
+ end
+ end
end
diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index d8672eec682..ca387690e83 100644
--- a/spec/services/service_ping/submit_service_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -322,6 +322,25 @@ RSpec.describe ServicePing::SubmitService do
expect { subject.execute }.to raise_error(described_class::SubmissionError)
end
end
+
+ context 'when skip_db_write passed to service' do
+ let(:subject) { ServicePing::SubmitService.new(skip_db_write: true) }
+
+ before do
+ stub_response(body: with_dev_ops_score_params)
+ end
+
+ it 'does not save RawUsageData' do
+ expect { subject.execute }
+ .not_to change { RawUsageData.count }
+ end
+
+ it 'does not call DevOpsReport service' do
+ expect(ServicePing::DevopsReportService).not_to receive(:new)
+
+ subject.execute
+ end
+ end
end
describe '#url' do
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index ce0122ae301..3ec2c71b20c 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -146,6 +146,30 @@ RSpec.describe SystemNoteService do
end
end
+ describe '.request_attention' do
+ let(:user) { double }
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:request_attention).with(user)
+ end
+
+ described_class.request_attention(noteable, project, author, user)
+ end
+ end
+
+ describe '.remove_attention_request' do
+ let(:user) { double }
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:remove_attention_request).with(user)
+ end
+
+ described_class.remove_attention_request(noteable, project, author, user)
+ end
+ end
+
describe '.merge_when_pipeline_succeeds' do
it 'calls MergeRequestsService' do
sha = double
@@ -287,38 +311,38 @@ RSpec.describe SystemNoteService do
end
describe '.cross_reference' do
- let(:mentioner) { double }
+ let(:mentioned_in) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
- expect(service).to receive(:cross_reference).with(mentioner)
+ expect(service).to receive(:cross_reference).with(mentioned_in)
end
- described_class.cross_reference(double, mentioner, double)
+ described_class.cross_reference(double, mentioned_in, double)
end
end
describe '.cross_reference_disallowed?' do
- let(:mentioner) { double }
+ let(:mentioned_in) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
- expect(service).to receive(:cross_reference_disallowed?).with(mentioner)
+ expect(service).to receive(:cross_reference_disallowed?).with(mentioned_in)
end
- described_class.cross_reference_disallowed?(double, mentioner)
+ described_class.cross_reference_disallowed?(double, mentioned_in)
end
end
describe '.cross_reference_exists?' do
- let(:mentioner) { double }
+ let(:mentioned_in) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
- expect(service).to receive(:cross_reference_exists?).with(mentioner)
+ expect(service).to receive(:cross_reference_exists?).with(mentioned_in)
end
- described_class.cross_reference_exists?(double, mentioner)
+ described_class.cross_reference_exists?(double, mentioned_in)
end
end
diff --git a/spec/services/system_notes/commit_service_spec.rb b/spec/services/system_notes/commit_service_spec.rb
index bd6b3ec953a..0399603980d 100644
--- a/spec/services/system_notes/commit_service_spec.rb
+++ b/spec/services/system_notes/commit_service_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe SystemNotes::CommitService do
end
context 'with multiple existing commits' do
- let(:old_commits) { noteable.commits[3..-1] }
+ let(:old_commits) { noteable.commits[3..] }
context 'with oldrev' do
let(:oldrev) { noteable.commits[2].id }
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index fd481aa6ddb..7e53e66303b 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -199,6 +199,42 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
end
+ describe '#request_attention' do
+ subject { service.request_attention(user) }
+
+ let(:user) { create(:user) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'attention_requested' }
+ end
+
+ context 'when attention requested' do
+ it_behaves_like 'a note with overridable created_at'
+
+ it 'sets the note text' do
+ expect(subject.note).to eq "requested attention from @#{user.username}"
+ end
+ end
+ end
+
+ describe '#remove_attention_request' do
+ subject { service.remove_attention_request(user) }
+
+ let(:user) { create(:user) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'attention_request_removed' }
+ end
+
+ context 'when attention request is removed' do
+ it_behaves_like 'a note with overridable created_at'
+
+ it 'sets the note text' do
+ expect(subject.note).to eq "removed attention request from @#{user.username}"
+ end
+ end
+ end
+
describe '#change_title' do
let(:noteable) { create(:issue, project: project, title: 'Lorem ipsum') }
@@ -274,9 +310,9 @@ RSpec.describe ::SystemNotes::IssuablesService do
describe '#cross_reference' do
let(:service) { described_class.new(noteable: noteable, author: author) }
- let(:mentioner) { create(:issue, project: project) }
+ let(:mentioned_in) { create(:issue, project: project) }
- subject { service.cross_reference(mentioner) }
+ subject { service.cross_reference(mentioned_in) }
it_behaves_like 'a system note' do
let(:action) { 'cross_reference' }
@@ -314,35 +350,35 @@ RSpec.describe ::SystemNotes::IssuablesService do
describe 'note_body' do
context 'cross-project' do
let(:project2) { create(:project, :repository) }
- let(:mentioner) { create(:issue, project: project2) }
+ let(:mentioned_in) { create(:issue, project: project2) }
context 'from Commit' do
- let(:mentioner) { project2.repository.commit }
+ let(:mentioned_in) { project2.repository.commit }
it 'references the mentioning commit' do
- expect(subject.note).to eq "mentioned in commit #{mentioner.to_reference(project)}"
+ expect(subject.note).to eq "mentioned in commit #{mentioned_in.to_reference(project)}"
end
end
context 'from non-Commit' do
it 'references the mentioning object' do
- expect(subject.note).to eq "mentioned in issue #{mentioner.to_reference(project)}"
+ expect(subject.note).to eq "mentioned in issue #{mentioned_in.to_reference(project)}"
end
end
end
context 'within the same project' do
context 'from Commit' do
- let(:mentioner) { project.repository.commit }
+ let(:mentioned_in) { project.repository.commit }
it 'references the mentioning commit' do
- expect(subject.note).to eq "mentioned in commit #{mentioner.to_reference}"
+ expect(subject.note).to eq "mentioned in commit #{mentioned_in.to_reference}"
end
end
context 'from non-Commit' do
it 'references the mentioning object' do
- expect(subject.note).to eq "mentioned in issue #{mentioner.to_reference}"
+ expect(subject.note).to eq "mentioned in issue #{mentioned_in.to_reference}"
end
end
end
@@ -350,14 +386,14 @@ RSpec.describe ::SystemNotes::IssuablesService do
context 'with external issue' do
let(:noteable) { ExternalIssue.new('JIRA-123', project) }
- let(:mentioner) { project.commit }
+ let(:mentioned_in) { project.commit }
it 'queues a background worker' do
expect(Integrations::CreateExternalCrossReferenceWorker).to receive(:perform_async).with(
project.id,
'JIRA-123',
'Commit',
- mentioner.id,
+ mentioned_in.id,
author.id
)
@@ -716,28 +752,28 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
describe '#cross_reference_disallowed?' do
- context 'when mentioner is not a MergeRequest' do
+ context 'when mentioned_in is not a MergeRequest' do
it 'is falsey' do
- mentioner = noteable.dup
+ mentioned_in = noteable.dup
- expect(service.cross_reference_disallowed?(mentioner)).to be_falsey
+ expect(service.cross_reference_disallowed?(mentioned_in)).to be_falsey
end
end
- context 'when mentioner is a MergeRequest' do
- let(:mentioner) { create(:merge_request, :simple, source_project: project) }
- let(:noteable) { project.commit }
+ context 'when mentioned_in is a MergeRequest' do
+ let(:mentioned_in) { create(:merge_request, :simple, source_project: project) }
+ let(:noteable) { project.commit }
it 'is truthy when noteable is in commits' do
- expect(mentioner).to receive(:commits).and_return([noteable])
+ expect(mentioned_in).to receive(:commits).and_return([noteable])
- expect(service.cross_reference_disallowed?(mentioner)).to be_truthy
+ expect(service.cross_reference_disallowed?(mentioned_in)).to be_truthy
end
it 'is falsey when noteable is not in commits' do
- expect(mentioner).to receive(:commits).and_return([])
+ expect(mentioned_in).to receive(:commits).and_return([])
- expect(service.cross_reference_disallowed?(mentioner)).to be_falsey
+ expect(service.cross_reference_disallowed?(mentioned_in)).to be_falsey
end
end
diff --git a/spec/services/todos/destroy/private_features_service_spec.rb b/spec/services/todos/destroy/unauthorized_features_service_spec.rb
index 6dbd7574b80..5f6c9b0cdf0 100644
--- a/spec/services/todos/destroy/private_features_service_spec.rb
+++ b/spec/services/todos/destroy/unauthorized_features_service_spec.rb
@@ -2,13 +2,17 @@
require 'spec_helper'
-RSpec.describe Todos::Destroy::PrivateFeaturesService do
- let(:project) { create(:project, :public) }
- let(:user) { create(:user) }
- let(:another_user) { create(:user) }
- let(:project_member) { create(:user) }
- let(:issue) { create(:issue, project: project) }
- let(:mr) { create(:merge_request, source_project: project) }
+RSpec.describe Todos::Destroy::UnauthorizedFeaturesService do
+ let_it_be(:project, reload: true) { create(:project, :public, :repository) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:mr) { create(:merge_request, source_project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:another_user) { create(:user) }
+ let_it_be(:project_member) do
+ create(:user).tap do |user|
+ project.add_developer(user)
+ end
+ end
let!(:todo_mr_non_member) { create(:todo, user: user, target: mr, project: project) }
let!(:todo_mr_non_member2) { create(:todo, user: another_user, target: mr, project: project) }
@@ -20,10 +24,6 @@ RSpec.describe Todos::Destroy::PrivateFeaturesService do
let!(:commit_todo_non_member2) { create(:on_commit_todo, user: another_user, project: project) }
let!(:commit_todo_member) { create(:on_commit_todo, user: project_member, project: project) }
- before do
- project.add_developer(project_member)
- end
-
context 'when user_id is provided' do
subject { described_class.new(project.id, user.id).execute }
diff --git a/spec/services/users/dismiss_user_callout_service_spec.rb b/spec/services/users/dismiss_callout_service_spec.rb
index 6bf9961eb74..6ba9f180444 100644
--- a/spec/services/users/dismiss_user_callout_service_spec.rb
+++ b/spec/services/users/dismiss_callout_service_spec.rb
@@ -2,12 +2,12 @@
require 'spec_helper'
-RSpec.describe Users::DismissUserCalloutService do
+RSpec.describe Users::DismissCalloutService do
describe '#execute' do
let_it_be(:user) { create(:user) }
let(:params) { { feature_name: feature_name } }
- let(:feature_name) { UserCallout.feature_names.each_key.first }
+ let(:feature_name) { Users::Callout.feature_names.each_key.first }
subject(:execute) do
described_class.new(
@@ -15,6 +15,6 @@ RSpec.describe Users::DismissUserCalloutService do
).execute
end
- it_behaves_like 'dismissing user callout', UserCallout
+ it_behaves_like 'dismissing user callout', Users::Callout
end
end
diff --git a/spec/services/users/refresh_authorized_projects_service_spec.rb b/spec/services/users/refresh_authorized_projects_service_spec.rb
index a8ad0d02f60..aa4df93a241 100644
--- a/spec/services/users/refresh_authorized_projects_service_spec.rb
+++ b/spec/services/users/refresh_authorized_projects_service_spec.rb
@@ -67,11 +67,17 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
it 'updates the authorized projects of the user' do
project2 = create(:project)
- to_remove = user.project_authorizations
+ project_authorization = user.project_authorizations
.create!(project: project2, access_level: Gitlab::Access::MAINTAINER)
+ to_be_removed = [project_authorization.project_id]
+
+ to_be_added = [
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ ]
+
expect(service).to receive(:update_authorizations)
- .with([to_remove.project_id], [[user.id, project.id, Gitlab::Access::MAINTAINER]])
+ .with(to_be_removed, to_be_added)
service.execute_without_lease
end
@@ -81,9 +87,14 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
user.project_authorizations.create!(project: project, access_level: access_level)
end
+ to_be_removed = [project.id]
+
+ to_be_added = [
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ ]
expect(service).to(
receive(:update_authorizations)
- .with([project.id], [[user.id, project.id, Gitlab::Access::MAINTAINER]])
+ .with(to_be_removed, to_be_added)
.and_call_original)
service.execute_without_lease
@@ -99,11 +110,17 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
it 'sets the access level of a project to the highest available level' do
user.project_authorizations.delete_all
- to_remove = user.project_authorizations
+ project_authorization = user.project_authorizations
.create!(project: project, access_level: Gitlab::Access::DEVELOPER)
+ to_be_removed = [project_authorization.project_id]
+
+ to_be_added = [
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ ]
+
expect(service).to receive(:update_authorizations)
- .with([to_remove.project_id], [[user.id, project.id, Gitlab::Access::MAINTAINER]])
+ .with(to_be_removed, to_be_added)
service.execute_without_lease
end
@@ -134,7 +151,11 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
it 'inserts authorizations that should be added' do
user.project_authorizations.delete_all
- service.update_authorizations([], [[user.id, project.id, Gitlab::Access::MAINTAINER]])
+ to_be_added = [
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ ]
+
+ service.update_authorizations([], to_be_added)
authorizations = user.project_authorizations
@@ -160,7 +181,11 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
'authorized_projects_refresh.rows_added_slice': [[user.id, project.id, Gitlab::Access::MAINTAINER]])
)
- service.update_authorizations([], [[user.id, project.id, Gitlab::Access::MAINTAINER]])
+ to_be_added = [
+ { user_id: user.id, project_id: project.id, access_level: Gitlab::Access::MAINTAINER }
+ ]
+
+ service.update_authorizations([], to_be_added)
end
end
end
diff --git a/spec/services/verify_pages_domain_service_spec.rb b/spec/services/verify_pages_domain_service_spec.rb
index ae079229891..2a3b3814065 100644
--- a/spec/services/verify_pages_domain_service_spec.rb
+++ b/spec/services/verify_pages_domain_service_spec.rb
@@ -372,7 +372,8 @@ RSpec.describe VerifyPagesDomainService do
allow(resolver).to receive(:getresources) { [] }
stubbed_lookups.each do |domain, records|
records = Array(records).map { |txt| Resolv::DNS::Resource::IN::TXT.new(txt) }
- allow(resolver).to receive(:getresources).with(domain, Resolv::DNS::Resource::IN::TXT) { records }
+ # Append '.' to domain_name, indicating absolute FQDN
+ allow(resolver).to receive(:getresources).with(domain + '.', Resolv::DNS::Resource::IN::TXT) { records }
end
resolver