summaryrefslogtreecommitdiff
path: root/spec/services
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-09-20 13:18:24 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2021-09-20 13:18:24 +0000
commit0653e08efd039a5905f3fa4f6e9cef9f5d2f799c (patch)
tree4dcc884cf6d81db44adae4aa99f8ec1233a41f55 /spec/services
parent744144d28e3e7fddc117924fef88de5d9674fe4c (diff)
downloadgitlab-ce-0653e08efd039a5905f3fa4f6e9cef9f5d2f799c.tar.gz
Add latest changes from gitlab-org/gitlab@14-3-stable-eev14.3.0-rc42
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/application_settings/update_service_spec.rb71
-rw-r--r--spec/services/auth/container_registry_authentication_service_spec.rb31
-rw-r--r--spec/services/boards/issues/list_service_spec.rb47
-rw-r--r--spec/services/ci/after_requeue_job_service_spec.rb10
-rw-r--r--spec/services/ci/archive_trace_service_spec.rb62
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb26
-rw-r--r--spec/services/ci/create_pipeline_service/tags_spec.rb38
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb99
-rw-r--r--spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb67
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb134
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb33
-rw-r--r--spec/services/ci/pipeline_trigger_service_spec.rb22
-rw-r--r--spec/services/ci/pipelines/add_job_service_spec.rb12
-rw-r--r--spec/services/ci/register_job_service_spec.rb84
-rw-r--r--spec/services/ci/stuck_builds/drop_service_spec.rb284
-rw-r--r--spec/services/ci/update_pending_build_service_spec.rb82
-rw-r--r--spec/services/clusters/agents/refresh_authorization_service_spec.rb132
-rw-r--r--spec/services/customer_relations/organizations/create_service_spec.rb33
-rw-r--r--spec/services/customer_relations/organizations/update_service_spec.rb56
-rw-r--r--spec/services/dependency_proxy/image_ttl_group_policies/update_service_spec.rb119
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb12
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb27
-rw-r--r--spec/services/environments/auto_stop_service_spec.rb11
-rw-r--r--spec/services/environments/stop_service_spec.rb54
-rw-r--r--spec/services/error_tracking/collect_error_service_spec.rb26
-rw-r--r--spec/services/feature_flags/create_service_spec.rb16
-rw-r--r--spec/services/git/base_hooks_service_spec.rb6
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb38
-rw-r--r--spec/services/groups/group_links/create_service_spec.rb18
-rw-r--r--spec/services/groups/open_issues_count_service_spec.rb64
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb38
-rw-r--r--spec/services/issue_rebalancing_service_spec.rb173
-rw-r--r--spec/services/issues/build_service_spec.rb52
-rw-r--r--spec/services/issues/close_service_spec.rb9
-rw-r--r--spec/services/issues/create_service_spec.rb15
-rw-r--r--spec/services/issues/relative_position_rebalancing_service_spec.rb166
-rw-r--r--spec/services/issues/reopen_service_spec.rb7
-rw-r--r--spec/services/issues/update_service_spec.rb37
-rw-r--r--spec/services/members/groups/bulk_creator_service_spec.rb10
-rw-r--r--spec/services/members/mailgun/process_webhook_service_spec.rb42
-rw-r--r--spec/services/members/projects/bulk_creator_service_spec.rb10
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb15
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb32
-rw-r--r--spec/services/merge_requests/mergeability_check_service_spec.rb9
-rw-r--r--spec/services/merge_requests/squash_service_spec.rb17
-rw-r--r--spec/services/notification_service_spec.rb2
-rw-r--r--spec/services/packages/composer/version_parser_service_spec.rb1
-rw-r--r--spec/services/packages/generic/create_package_file_service_spec.rb31
-rw-r--r--spec/services/packages/maven/find_or_create_package_service_spec.rb13
-rw-r--r--spec/services/packages/nuget/update_package_from_metadata_service_spec.rb341
-rw-r--r--spec/services/pages/delete_service_spec.rb21
-rw-r--r--spec/services/pages/legacy_storage_lease_spec.rb65
-rw-r--r--spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb8
-rw-r--r--spec/services/projects/batch_open_issues_count_service_spec.rb45
-rw-r--r--spec/services/projects/create_service_spec.rb24
-rw-r--r--spec/services/projects/fork_service_spec.rb4
-rw-r--r--spec/services/projects/group_links/destroy_service_spec.rb60
-rw-r--r--spec/services/projects/open_issues_count_service_spec.rb109
-rw-r--r--spec/services/projects/operations/update_service_spec.rb4
-rw-r--r--spec/services/projects/transfer_service_spec.rb33
-rw-r--r--spec/services/projects/update_pages_service_spec.rb158
-rw-r--r--spec/services/projects/update_service_spec.rb24
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb72
-rw-r--r--spec/services/repositories/changelog_service_spec.rb2
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb28
-rw-r--r--spec/services/suggestions/apply_service_spec.rb16
-rw-r--r--spec/services/suggestions/create_service_spec.rb2
-rw-r--r--spec/services/system_note_service_spec.rb12
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb12
-rw-r--r--spec/services/todos/destroy/design_service_spec.rb40
-rw-r--r--spec/services/users/ban_service_spec.rb2
-rw-r--r--spec/services/users/dismiss_group_callout_service_spec.rb25
-rw-r--r--spec/services/users/dismiss_user_callout_service_spec.rb25
-rw-r--r--spec/services/users/migrate_to_ghost_user_service_spec.rb18
-rw-r--r--spec/services/users/reject_service_spec.rb4
-rw-r--r--spec/services/users/unban_service_spec.rb2
-rw-r--r--spec/services/wiki_pages/event_create_service_spec.rb4
77 files changed, 2450 insertions, 1103 deletions
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index 56c1284927d..a1fd89bcad7 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -336,6 +336,31 @@ RSpec.describe ApplicationSettings::UpdateService do
end
end
+ context 'when general rate limits are passed' do
+ let(:params) do
+ {
+ throttle_authenticated_api_enabled: true,
+ throttle_authenticated_api_period_in_seconds: 10,
+ throttle_authenticated_api_requests_per_period: 20,
+ throttle_authenticated_web_enabled: true,
+ throttle_authenticated_web_period_in_seconds: 30,
+ throttle_authenticated_web_requests_per_period: 40,
+ throttle_unauthenticated_api_enabled: true,
+ throttle_unauthenticated_api_period_in_seconds: 50,
+ throttle_unauthenticated_api_requests_per_period: 60,
+ throttle_unauthenticated_enabled: true,
+ throttle_unauthenticated_period_in_seconds: 50,
+ throttle_unauthenticated_requests_per_period: 60
+ }
+ end
+
+ it 'updates general throttle settings' do
+ subject.execute
+
+ expect(application_settings.reload).to have_attributes(params)
+ end
+ end
+
context 'when package registry rate limits are passed' do
let(:params) do
{
@@ -362,6 +387,52 @@ RSpec.describe ApplicationSettings::UpdateService do
end
end
+ context 'when files API rate limits are passed' do
+ let(:params) do
+ {
+ throttle_unauthenticated_files_api_enabled: 1,
+ throttle_unauthenticated_files_api_period_in_seconds: 500,
+ throttle_unauthenticated_files_api_requests_per_period: 20,
+ throttle_authenticated_files_api_enabled: 1,
+ throttle_authenticated_files_api_period_in_seconds: 600,
+ throttle_authenticated_files_api_requests_per_period: 10
+ }
+ end
+
+ it 'updates files API throttle settings' do
+ subject.execute
+
+ application_settings.reload
+
+ expect(application_settings.throttle_unauthenticated_files_api_enabled).to be_truthy
+ expect(application_settings.throttle_unauthenticated_files_api_period_in_seconds).to eq(500)
+ expect(application_settings.throttle_unauthenticated_files_api_requests_per_period).to eq(20)
+ expect(application_settings.throttle_authenticated_files_api_enabled).to be_truthy
+ expect(application_settings.throttle_authenticated_files_api_period_in_seconds).to eq(600)
+ expect(application_settings.throttle_authenticated_files_api_requests_per_period).to eq(10)
+ end
+ end
+
+ context 'when git lfs rate limits are passed' do
+ let(:params) do
+ {
+ throttle_authenticated_git_lfs_enabled: 1,
+ throttle_authenticated_git_lfs_period_in_seconds: 600,
+ throttle_authenticated_git_lfs_requests_per_period: 10
+ }
+ end
+
+ it 'updates git lfs throttle settings' do
+ subject.execute
+
+ application_settings.reload
+
+ expect(application_settings.throttle_authenticated_git_lfs_enabled).to be_truthy
+ expect(application_settings.throttle_authenticated_git_lfs_period_in_seconds).to eq(600)
+ expect(application_settings.throttle_authenticated_git_lfs_requests_per_period).to eq(10)
+ end
+ end
+
context 'when issues_create_limit is passed' do
let(:params) do
{
diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb
index b456f7a2745..46cc027fcb3 100644
--- a/spec/services/auth/container_registry_authentication_service_spec.rb
+++ b/spec/services/auth/container_registry_authentication_service_spec.rb
@@ -84,5 +84,36 @@ RSpec.describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'a modified token'
end
+
+ describe '#access_token' do
+ let(:token) { described_class.access_token(%w[push], [project.full_path]) }
+
+ subject { { token: token } }
+
+ it_behaves_like 'a modified token'
+ end
+ end
+
+ context 'when not in migration mode' do
+ include_context 'container registry auth service context'
+
+ let_it_be(:project) { create(:project) }
+
+ before do
+ stub_feature_flags(container_registry_migration_phase1: false)
+ end
+
+ shared_examples 'an unmodified token' do
+ it_behaves_like 'a valid token'
+ it { expect(payload['access']).not_to include(have_key('migration_eligible')) }
+ end
+
+ describe '#access_token' do
+ let(:token) { described_class.access_token(%w[push], [project.full_path]) }
+
+ subject { { token: token } }
+
+ it_behaves_like 'an unmodified token'
+ end
end
end
diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb
index bbdc178b234..d1f854f72bc 100644
--- a/spec/services/boards/issues/list_service_spec.rb
+++ b/spec/services/boards/issues/list_service_spec.rb
@@ -139,4 +139,51 @@ RSpec.describe Boards::Issues::ListService do
end
# rubocop: enable RSpec/MultipleMemoizedHelpers
end
+
+ describe '.initialize_relative_positions' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:backlog) { create(:backlog_list, board: board) }
+
+ let(:issue) { create(:issue, project: project, relative_position: nil) }
+
+ context "when 'Gitlab::Database::read_write?' is true" do
+ before do
+ allow(Gitlab::Database).to receive(:read_write?).and_return(true)
+ end
+
+ context 'user cannot move issues' do
+ it 'does not initialize the relative positions of issues' do
+ described_class.initialize_relative_positions(board, user, [issue])
+
+ expect(issue.relative_position).to eq nil
+ end
+ end
+
+ context 'user can move issues' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'initializes the relative positions of issues' do
+ described_class.initialize_relative_positions(board, user, [issue])
+
+ expect(issue.relative_position).not_to eq nil
+ end
+ end
+ end
+
+ context "when 'Gitlab::Database::read_write?' is false" do
+ before do
+ allow(Gitlab::Database).to receive(:read_write?).and_return(false)
+ end
+
+ it 'does not initialize the relative positions of issues' do
+ described_class.initialize_relative_positions(board, user, [issue])
+
+ expect(issue.relative_position).to eq nil
+ end
+ end
+ end
end
diff --git a/spec/services/ci/after_requeue_job_service_spec.rb b/spec/services/ci/after_requeue_job_service_spec.rb
index df5ddcafb37..2a5a971fdac 100644
--- a/spec/services/ci/after_requeue_job_service_spec.rb
+++ b/spec/services/ci/after_requeue_job_service_spec.rb
@@ -44,16 +44,6 @@ RSpec.describe Ci::AfterRequeueJobService do
it 'marks subsequent skipped jobs as processable' do
expect { execute_service }.to change { test4.reload.status }.from('skipped').to('created')
end
-
- context 'with ci_same_stage_job_needs FF disabled' do
- before do
- stub_feature_flags(ci_same_stage_job_needs: false)
- end
-
- it 'does nothing with the build' do
- expect { execute_service }.not_to change { test4.reload.status }
- end
- end
end
context 'when the pipeline is a downstream pipeline and the bridge is depended' do
diff --git a/spec/services/ci/archive_trace_service_spec.rb b/spec/services/ci/archive_trace_service_spec.rb
index 12804efc28c..071b5c3b2f9 100644
--- a/spec/services/ci/archive_trace_service_spec.rb
+++ b/spec/services/ci/archive_trace_service_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
expect { subject }.not_to raise_error
expect(job.reload.job_artifacts_trace).to be_exist
+ expect(job.trace_metadata.trace_artifact).to eq(job.job_artifacts_trace)
end
context 'when trace is already archived' do
@@ -27,7 +28,7 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
context 'when live trace chunks still exist' do
before do
- create(:ci_build_trace_chunk, build: job)
+ create(:ci_build_trace_chunk, build: job, chunk_index: 0)
end
it 'removes the trace chunks' do
@@ -39,8 +40,14 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
job.job_artifacts_trace.file.remove!
end
- it 'removes the trace artifact' do
- expect { subject }.to change { job.reload.job_artifacts_trace }.to(nil)
+ it 'removes the trace artifact and builds a new one' do
+ existing_trace = job.job_artifacts_trace
+ expect(existing_trace).to receive(:destroy!).and_call_original
+
+ subject
+
+ expect(job.reload.job_artifacts_trace).to be_present
+ expect(job.reload.job_artifacts_trace.file.file).to be_present
end
end
end
@@ -59,6 +66,54 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
end
end
+ context 'when the job is out of archival attempts' do
+ before do
+ create(:ci_build_trace_metadata,
+ build: job,
+ archival_attempts: Ci::BuildTraceMetadata::MAX_ATTEMPTS + 1,
+ last_archival_attempt_at: 1.week.ago)
+ end
+
+ it 'skips archiving' do
+ expect(job.trace).not_to receive(:archive!)
+
+ subject
+ end
+
+ it 'leaves a warning message in sidekiq log' do
+ expect(Sidekiq.logger).to receive(:warn).with(
+ class: Ci::ArchiveTraceWorker.name,
+ message: 'The job is out of archival attempts.',
+ job_id: job.id)
+
+ subject
+ end
+ end
+
+ context 'when the archival process is backed off' do
+ before do
+ create(:ci_build_trace_metadata,
+ build: job,
+ archival_attempts: Ci::BuildTraceMetadata::MAX_ATTEMPTS - 1,
+ last_archival_attempt_at: 1.hour.ago)
+ end
+
+ it 'skips archiving' do
+ expect(job.trace).not_to receive(:archive!)
+
+ subject
+ end
+
+ it 'leaves a warning message in sidekiq log' do
+ expect(Sidekiq.logger).to receive(:warn).with(
+ class: Ci::ArchiveTraceWorker.name,
+ message: 'The job can not be archived right now.',
+ job_id: job.id)
+
+ subject
+ end
+ end
+
context 'when job failed to archive trace but did not raise an exception' do
before do
allow_next_instance_of(Gitlab::Ci::Trace) do |instance|
@@ -98,6 +153,7 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
.and_call_original
expect { subject }.not_to raise_error
+ expect(job.trace_metadata.archival_attempts).to eq(1)
end
end
end
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index 6eb1315fff4..4326fa5533f 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -127,6 +127,32 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
end
end
end
+
+ context 'when resource group key includes a variable' do
+ let(:config) do
+ <<~YAML
+ instrumentation_test:
+ stage: test
+ resource_group: $CI_ENVIRONMENT_NAME
+ trigger:
+ include: path/to/child.yml
+ strategy: depend
+ YAML
+ end
+
+ it 'ignores the resource group keyword because it fails to expand the variable', :aggregate_failures do
+ pipeline = create_pipeline!
+ Ci::InitialPipelineProcessWorker.new.perform(pipeline.id)
+
+ test = pipeline.statuses.find_by(name: 'instrumentation_test')
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.triggered_pipelines).not_to be_exist
+ expect(project.resource_groups.count).to eq(0)
+ expect(test).to be_a Ci::Bridge
+ expect(test).to be_pending
+ expect(test.resource_group).to be_nil
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service/tags_spec.rb b/spec/services/ci/create_pipeline_service/tags_spec.rb
new file mode 100644
index 00000000000..335d35010c8
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/tags_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService do
+ describe 'tags:' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
+
+ let(:ref) { 'refs/heads/master' }
+ let(:source) { :push }
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+ let(:pipeline) { service.execute(source).payload }
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ context 'with valid config' do
+ let(:config) { YAML.dump({ test: { script: 'ls', tags: %w[tag1 tag2] } }) }
+
+ it 'creates a pipeline', :aggregate_failures do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.builds.first.tag_list).to match_array(%w[tag1 tag2])
+ end
+ end
+
+ context 'with too many tags' do
+ let(:tags) { Array.new(50) {|i| "tag-#{i}" } }
+ let(:config) { YAML.dump({ test: { script: 'ls', tags: tags } }) }
+
+ it 'creates a pipeline without builds', :aggregate_failures do
+ expect(pipeline).not_to be_created_successfully
+ expect(pipeline.builds).to be_empty
+ expect(pipeline.yaml_errors).to eq("jobs:test:tags config must be less than the limit of #{Gitlab::Ci::Config::Entry::Tags::TAGS_LIMIT} tags")
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 2fdb0ed3c0d..78646665539 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref_name) { 'refs/heads/master' }
before do
- stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
+ stub_ci_pipeline_to_return_yaml_file
end
describe '#execute' do
@@ -991,6 +991,58 @@ RSpec.describe Ci::CreatePipelineService do
end
end
+ context 'when resource group is defined for review app deployment' do
+ before do
+ config = YAML.dump(
+ review_app: {
+ stage: 'test',
+ script: 'deploy',
+ environment: {
+ name: 'review/$CI_COMMIT_REF_SLUG',
+ on_stop: 'stop_review_app'
+ },
+ resource_group: '$CI_ENVIRONMENT_NAME'
+ },
+ stop_review_app: {
+ stage: 'test',
+ script: 'stop',
+ when: 'manual',
+ environment: {
+ name: 'review/$CI_COMMIT_REF_SLUG',
+ action: 'stop'
+ },
+ resource_group: '$CI_ENVIRONMENT_NAME'
+ }
+ )
+
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'persists the association correctly' do
+ result = execute_service.payload
+ deploy_job = result.builds.find_by_name!(:review_app)
+ stop_job = result.builds.find_by_name!(:stop_review_app)
+
+ expect(result).to be_persisted
+ expect(deploy_job.resource_group.key).to eq('review/master')
+ expect(stop_job.resource_group.key).to eq('review/master')
+ expect(project.resource_groups.count).to eq(1)
+ end
+
+ it 'initializes scoped variables only once for each build' do
+ # Bypassing `stub_build` hack because it distrubs the expectations below.
+ allow_next_instances_of(Gitlab::Ci::Build::Context::Build, 2) do |build_context|
+ allow(build_context).to receive(:variables) { Gitlab::Ci::Variables::Collection.new }
+ end
+
+ expect_next_instances_of(::Ci::Build, 2) do |ci_build|
+ expect(ci_build).to receive(:scoped_variables).once.and_call_original
+ end
+
+ expect(execute_service.payload).to be_created_successfully
+ end
+ end
+
context 'with timeout' do
context 'when builds with custom timeouts are configured' do
before do
@@ -1248,16 +1300,47 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'when pipeline variables are specified' do
- let(:variables_attributes) do
- [{ key: 'first', secret_value: 'world' },
- { key: 'second', secret_value: 'second_world' }]
+ subject(:pipeline) { execute_service(variables_attributes: variables_attributes).payload }
+
+ context 'with valid pipeline variables' do
+ let(:variables_attributes) do
+ [{ key: 'first', secret_value: 'world' },
+ { key: 'second', secret_value: 'second_world' }]
+ end
+
+ it 'creates a pipeline with specified variables' do
+ expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
+ .to eq variables_attributes.map(&:with_indifferent_access)
+ end
end
- subject(:pipeline) { execute_service(variables_attributes: variables_attributes).payload }
+ context 'with duplicate pipeline variables' do
+ let(:variables_attributes) do
+ [{ key: 'hello', secret_value: 'world' },
+ { key: 'hello', secret_value: 'second_world' }]
+ end
- it 'creates a pipeline with specified variables' do
- expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
- .to eq variables_attributes.map(&:with_indifferent_access)
+ it 'fails to create the pipeline' do
+ expect(pipeline).to be_failed
+ expect(pipeline.variables).to be_empty
+ expect(pipeline.errors[:base]).to eq(['Duplicate variable name: hello'])
+ end
+ end
+
+ context 'with more than one duplicate pipeline variable' do
+ let(:variables_attributes) do
+ [{ key: 'hello', secret_value: 'world' },
+ { key: 'hello', secret_value: 'second_world' },
+ { key: 'single', secret_value: 'variable' },
+ { key: 'other', secret_value: 'value' },
+ { key: 'other', secret_value: 'other value' }]
+ end
+
+ it 'fails to create the pipeline' do
+ expect(pipeline).to be_failed
+ expect(pipeline.variables).to be_empty
+ expect(pipeline.errors[:base]).to eq(['Duplicate variable names: hello, other'])
+ end
end
end
diff --git a/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb b/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
index 2b310443b37..04d75630295 100644
--- a/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
+++ b/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
project.add_maintainer(user)
end
- subject(:response) { described_class.new(project, user).execute(pull_request) }
+ subject(:execute) { described_class.new(project, user).execute(pull_request) }
context 'when pull request is open' do
before do
@@ -21,26 +21,43 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
context 'when source sha is the head of the source branch' do
let(:source_branch) { project.repository.branches.last }
- let(:create_pipeline_service) { instance_double(Ci::CreatePipelineService) }
before do
pull_request.update!(source_branch: source_branch.name, source_sha: source_branch.target)
end
- it 'creates a pipeline for external pull request', :aggregate_failures do
- pipeline = response.payload
-
- expect(response).to be_success
- expect(pipeline).to be_valid
- expect(pipeline).to be_persisted
- expect(pipeline).to be_external_pull_request_event
- expect(pipeline).to eq(project.ci_pipelines.last)
- expect(pipeline.external_pull_request).to eq(pull_request)
- expect(pipeline.user).to eq(user)
- expect(pipeline.status).to eq('created')
- expect(pipeline.ref).to eq(pull_request.source_branch)
- expect(pipeline.sha).to eq(pull_request.source_sha)
- expect(pipeline.source_sha).to eq(pull_request.source_sha)
+ context 'when the FF ci_create_external_pr_pipeline_async is disabled' do
+ before do
+ stub_feature_flags(ci_create_external_pr_pipeline_async: false)
+ end
+
+ it 'creates a pipeline for external pull request', :aggregate_failures do
+ pipeline = execute.payload
+
+ expect(execute).to be_success
+ expect(pipeline).to be_valid
+ expect(pipeline).to be_persisted
+ expect(pipeline).to be_external_pull_request_event
+ expect(pipeline).to eq(project.ci_pipelines.last)
+ expect(pipeline.external_pull_request).to eq(pull_request)
+ expect(pipeline.user).to eq(user)
+ expect(pipeline.status).to eq('created')
+ expect(pipeline.ref).to eq(pull_request.source_branch)
+ expect(pipeline.sha).to eq(pull_request.source_sha)
+ expect(pipeline.source_sha).to eq(pull_request.source_sha)
+ end
+ end
+
+ it 'enqueues Ci::ExternalPullRequests::CreatePipelineWorker' do
+ expect { execute }
+ .to change { ::Ci::ExternalPullRequests::CreatePipelineWorker.jobs.count }
+ .by(1)
+
+ args = ::Ci::ExternalPullRequests::CreatePipelineWorker.jobs.last['args']
+
+ expect(args[0]).to eq(project.id)
+ expect(args[1]).to eq(user.id)
+ expect(args[2]).to eq(pull_request.id)
end
end
@@ -53,11 +70,12 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
end
it 'does nothing', :aggregate_failures do
- expect(Ci::CreatePipelineService).not_to receive(:new)
+ expect { execute }
+ .not_to change { ::Ci::ExternalPullRequests::CreatePipelineWorker.jobs.count }
- expect(response).to be_error
- expect(response.message).to eq('The source sha is not the head of the source branch')
- expect(response.payload).to be_nil
+ expect(execute).to be_error
+ expect(execute.message).to eq('The source sha is not the head of the source branch')
+ expect(execute.payload).to be_nil
end
end
end
@@ -68,11 +86,12 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
end
it 'does nothing', :aggregate_failures do
- expect(Ci::CreatePipelineService).not_to receive(:new)
+ expect { execute }
+ .not_to change { ::Ci::ExternalPullRequests::CreatePipelineWorker.jobs.count }
- expect(response).to be_error
- expect(response.message).to eq('The pull request is not opened')
- expect(response.payload).to be_nil
+ expect(execute).to be_error
+ expect(execute.message).to eq('The pull request is not opened')
+ expect(execute.payload).to be_nil
end
end
end
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index 04fa55068f2..7a91ad9dcc1 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -10,20 +10,16 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
describe '.execute' do
subject { service.execute }
- let_it_be(:artifact, refind: true) do
- create(:ci_job_artifact, expire_at: 1.day.ago)
- end
-
- before(:all) do
- artifact.job.pipeline.unlocked!
- end
+ let_it_be(:locked_pipeline) { create(:ci_pipeline, :artifacts_locked) }
+ let_it_be(:pipeline) { create(:ci_pipeline, :unlocked) }
+ let_it_be(:locked_job) { create(:ci_build, :success, pipeline: locked_pipeline) }
+ let_it_be(:job) { create(:ci_build, :success, pipeline: pipeline) }
context 'when artifact is expired' do
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
+
context 'with preloaded relationships' do
before do
- job = create(:ci_build, pipeline: artifact.job.pipeline)
- create(:ci_job_artifact, :archive, :expired, job: job)
-
stub_const("#{described_class}::LOOP_LIMIT", 1)
end
@@ -39,7 +35,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
# COMMIT
# SELECT next expired ci_job_artifacts
- expect(log.count).to be_within(1).of(11)
+ expect(log.count).to be_within(1).of(10)
end
end
@@ -48,7 +44,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
end
- context 'when the artifact does not a file attached to it' do
+ context 'when the artifact does not have a file attached to it' do
it 'does not create deleted objects' do
expect(artifact.exists?).to be_falsy # sanity check
@@ -57,10 +53,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when the artifact has a file attached to it' do
- before do
- artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
- artifact.save!
- end
+ let!(:artifact) { create(:ci_job_artifact, :expired, :zip, job: job) }
it 'creates a deleted object' do
expect { subject }.to change { Ci::DeletedObject.count }.by(1)
@@ -81,9 +74,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when artifact is locked' do
- before do
- artifact.job.pipeline.artifacts_locked!
- end
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: locked_job) }
it 'does not destroy job artifact' do
expect { subject }.not_to change { Ci::JobArtifact.count }
@@ -92,9 +83,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when artifact is not expired' do
- before do
- artifact.update_column(:expire_at, 1.day.since)
- end
+ let!(:artifact) { create(:ci_job_artifact, job: job) }
it 'does not destroy expired job artifacts' do
expect { subject }.not_to change { Ci::JobArtifact.count }
@@ -102,9 +91,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when artifact is permanent' do
- before do
- artifact.update_column(:expire_at, nil)
- end
+ let!(:artifact) { create(:ci_job_artifact, expire_at: nil, job: job) }
it 'does not destroy expired job artifacts' do
expect { subject }.not_to change { Ci::JobArtifact.count }
@@ -112,6 +99,8 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when failed to destroy artifact' do
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
+
before do
stub_const("#{described_class}::LOOP_LIMIT", 10)
end
@@ -146,58 +135,67 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
context 'when exclusive lease has already been taken by the other instance' do
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
+
before do
stub_exclusive_lease_taken(described_class::EXCLUSIVE_LOCK_KEY, timeout: described_class::LOCK_TIMEOUT)
end
it 'raises an error and does not start destroying' do
expect { subject }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ .and not_change { Ci::JobArtifact.count }.from(1)
end
end
- context 'when timeout happens' do
- let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+ context 'with a second artifact and batch size of 1' do
+ let(:second_job) { create(:ci_build, :success, pipeline: pipeline) }
+ let!(:second_artifact) { create(:ci_job_artifact, :archive, expire_at: 1.day.ago, job: second_job) }
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
before do
- stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
stub_const("#{described_class}::BATCH_SIZE", 1)
-
- second_artifact.job.pipeline.unlocked!
end
- it 'destroys one artifact' do
- expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
- end
-
- it 'reports the number of destroyed artifacts' do
- is_expected.to eq(1)
- end
- end
+ context 'when timeout happens' do
+ before do
+ stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
+ end
- context 'when loop reached loop limit' do
- before do
- stub_const("#{described_class}::LOOP_LIMIT", 1)
- stub_const("#{described_class}::BATCH_SIZE", 1)
+ it 'destroys one artifact' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ end
- second_artifact.job.pipeline.unlocked!
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(1)
+ end
end
- let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+ context 'when loop reached loop limit' do
+ before do
+ stub_const("#{described_class}::LOOP_LIMIT", 1)
+ end
- it 'destroys one artifact' do
- expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ it 'destroys one artifact' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(1)
+ end
end
- it 'reports the number of destroyed artifacts' do
- is_expected.to eq(1)
+ context 'when the number of artifacts is greater than than batch size' do
+ it 'destroys all expired artifacts' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(2)
+ end
end
end
context 'when there are no artifacts' do
- before do
- artifact.destroy!
- end
-
it 'does not raise error' do
expect { subject }.not_to raise_error
end
@@ -207,42 +205,18 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
end
end
- context 'when there are artifacts more than batch sizes' do
- before do
- stub_const("#{described_class}::BATCH_SIZE", 1)
-
- second_artifact.job.pipeline.unlocked!
- end
-
- let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
-
- it 'destroys all expired artifacts' do
- expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
- end
-
- it 'reports the number of destroyed artifacts' do
- is_expected.to eq(2)
- end
- end
-
context 'when some artifacts are locked' do
- before do
- pipeline = create(:ci_pipeline, locked: :artifacts_locked)
- job = create(:ci_build, pipeline: pipeline)
- create(:ci_job_artifact, expire_at: 1.day.ago, job: job)
- end
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: job) }
+ let!(:locked_artifact) { create(:ci_job_artifact, :expired, job: locked_job) }
it 'destroys only unlocked artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ expect(locked_artifact).to be_persisted
end
end
context 'when all artifacts are locked' do
- before do
- pipeline = create(:ci_pipeline, locked: :artifacts_locked)
- job = create(:ci_build, pipeline: pipeline)
- artifact.update!(job: job)
- end
+ let!(:artifact) { create(:ci_job_artifact, :expired, job: locked_job) }
it 'destroys no artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(0)
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
index a4bc8e68b2d..8de9b308429 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -908,6 +908,39 @@ RSpec.shared_examples 'Pipeline Processing Service' do
end
end
+ context 'when a bridge job has invalid downstream project', :sidekiq_inline do
+ let(:config) do
+ <<-EOY
+ test:
+ stage: test
+ script: echo test
+
+ deploy:
+ stage: deploy
+ trigger:
+ project: invalid-project
+ EOY
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'creates a pipeline, then fails the bridge job' do
+ expect(all_builds_names).to contain_exactly('test', 'deploy')
+ expect(all_builds_statuses).to contain_exactly('pending', 'created')
+
+ succeed_pending
+
+ expect(all_builds_names).to contain_exactly('test', 'deploy')
+ expect(all_builds_statuses).to contain_exactly('success', 'failed')
+ end
+ end
+
private
def all_builds
diff --git a/spec/services/ci/pipeline_trigger_service_spec.rb b/spec/services/ci/pipeline_trigger_service_spec.rb
index 2f93b1ecd3c..29d12b0dd0e 100644
--- a/spec/services/ci/pipeline_trigger_service_spec.rb
+++ b/spec/services/ci/pipeline_trigger_service_spec.rb
@@ -103,6 +103,17 @@ RSpec.describe Ci::PipelineTriggerService do
end
end
+ context 'when params have duplicate variables' do
+ let(:params) { { token: trigger.token, ref: 'master', variables: variables } }
+ let(:variables) { { 'TRIGGER_PAYLOAD' => 'duplicate value' } }
+
+ it 'creates a failed pipeline without variables' do
+ expect { result }.to change { Ci::Pipeline.count }
+ expect(result).to be_error
+ expect(result.message[:base]).to eq(['Duplicate variable name: TRIGGER_PAYLOAD'])
+ end
+ end
+
it_behaves_like 'detecting an unprocessable pipeline trigger'
end
@@ -201,6 +212,17 @@ RSpec.describe Ci::PipelineTriggerService do
end
end
+ context 'when params have duplicate variables' do
+ let(:params) { { token: job.token, ref: 'master', variables: variables } }
+ let(:variables) { { 'TRIGGER_PAYLOAD' => 'duplicate value' } }
+
+ it 'creates a failed pipeline without variables' do
+ expect { result }.to change { Ci::Pipeline.count }
+ expect(result).to be_error
+ expect(result.message[:base]).to eq(['Duplicate variable name: TRIGGER_PAYLOAD'])
+ end
+ end
+
it_behaves_like 'detecting an unprocessable pipeline trigger'
end
diff --git a/spec/services/ci/pipelines/add_job_service_spec.rb b/spec/services/ci/pipelines/add_job_service_spec.rb
index bdf7e577fa8..3a77d26dd9e 100644
--- a/spec/services/ci/pipelines/add_job_service_spec.rb
+++ b/spec/services/ci/pipelines/add_job_service_spec.rb
@@ -59,18 +59,6 @@ RSpec.describe Ci::Pipelines::AddJobService do
end
end
- context 'when the FF ci_fix_commit_status_retried is disabled' do
- before do
- stub_feature_flags(ci_fix_commit_status_retried: false)
- end
-
- it 'does not call update_older_statuses_retried!' do
- expect(job).not_to receive(:update_older_statuses_retried!)
-
- execute
- end
- end
-
context 'exclusive lock' do
let(:lock_uuid) { 'test' }
let(:lock_key) { "ci:pipelines:#{pipeline.id}:add-job" }
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 2f37d0ea42d..73ff15ec393 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -40,12 +40,16 @@ module Ci
context 'runner follow tag list' do
it "picks build with the same tag" do
pending_job.update!(tag_list: ["linux"])
+ pending_job.reload
+ pending_job.create_queuing_entry!
specific_runner.update!(tag_list: ["linux"])
expect(execute(specific_runner)).to eq(pending_job)
end
it "does not pick build with different tag" do
pending_job.update!(tag_list: ["linux"])
+ pending_job.reload
+ pending_job.create_queuing_entry!
specific_runner.update!(tag_list: ["win32"])
expect(execute(specific_runner)).to be_falsey
end
@@ -56,6 +60,8 @@ module Ci
it "does not pick build with tag" do
pending_job.update!(tag_list: ["linux"])
+ pending_job.reload
+ pending_job.create_queuing_entry!
expect(execute(specific_runner)).to be_falsey
end
@@ -81,8 +87,30 @@ module Ci
end
context 'for specific runner' do
- it 'does not pick a build' do
- expect(execute(specific_runner)).to be_nil
+ context 'with FF disabled' do
+ before do
+ stub_feature_flags(
+ ci_pending_builds_project_runners_decoupling: false,
+ ci_queueing_builds_enabled_checks: false)
+ end
+
+ it 'does not pick a build' do
+ expect(execute(specific_runner)).to be_nil
+ end
+ end
+
+ context 'with FF enabled' do
+ before do
+ stub_feature_flags(
+ ci_pending_builds_project_runners_decoupling: true,
+ ci_queueing_builds_enabled_checks: true)
+ end
+
+ it 'does not pick a build' do
+ expect(execute(specific_runner)).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job.queuing_entry).to be_nil
+ end
end
end
end
@@ -219,6 +247,8 @@ module Ci
before do
project.update!(shared_runners_enabled: true, group_runners_enabled: true)
project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
+
+ pending_job.reload.create_queuing_entry!
end
context 'and uses shared runner' do
@@ -236,7 +266,29 @@ module Ci
context 'and uses project runner' do
let(:build) { execute(specific_runner) }
- it { expect(build).to be_nil }
+ context 'with FF disabled' do
+ before do
+ stub_feature_flags(
+ ci_pending_builds_project_runners_decoupling: false,
+ ci_queueing_builds_enabled_checks: false)
+ end
+
+ it { expect(build).to be_nil }
+ end
+
+ context 'with FF enabled' do
+ before do
+ stub_feature_flags(
+ ci_pending_builds_project_runners_decoupling: true,
+ ci_queueing_builds_enabled_checks: true)
+ end
+
+ it 'does not pick a build' do
+ expect(build).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job.queuing_entry).to be_nil
+ end
+ end
end
end
@@ -304,6 +356,8 @@ module Ci
context 'disallow group runners' do
before do
project.update!(group_runners_enabled: false)
+
+ pending_job.reload.create_queuing_entry!
end
context 'group runner' do
@@ -739,6 +793,30 @@ module Ci
include_examples 'handles runner assignment'
end
+
+ context 'with ci_queueing_denormalize_tags_information enabled' do
+ before do
+ stub_feature_flags(ci_queueing_denormalize_tags_information: true)
+ end
+
+ include_examples 'handles runner assignment'
+ end
+
+ context 'with ci_queueing_denormalize_tags_information disabled' do
+ before do
+ stub_feature_flags(ci_queueing_denormalize_tags_information: false)
+ end
+
+ include_examples 'handles runner assignment'
+ end
+
+ context 'with ci_queueing_denormalize_namespace_traversal_ids disabled' do
+ before do
+ stub_feature_flags(ci_queueing_denormalize_namespace_traversal_ids: false)
+ end
+
+ include_examples 'handles runner assignment'
+ end
end
context 'when not using pending builds table' do
diff --git a/spec/services/ci/stuck_builds/drop_service_spec.rb b/spec/services/ci/stuck_builds/drop_service_spec.rb
new file mode 100644
index 00000000000..8dfd1bc1b3d
--- /dev/null
+++ b/spec/services/ci/stuck_builds/drop_service_spec.rb
@@ -0,0 +1,284 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::StuckBuilds::DropService do
+ let!(:runner) { create :ci_runner }
+ let!(:job) { create :ci_build, runner: runner }
+ let(:created_at) { }
+ let(:updated_at) { }
+
+ subject(:service) { described_class.new }
+
+ before do
+ job_attributes = { status: status }
+ job_attributes[:created_at] = created_at if created_at
+ job_attributes[:updated_at] = updated_at if updated_at
+ job.update!(job_attributes)
+ end
+
+ shared_examples 'job is dropped' do
+ it 'changes status' do
+ expect(service).to receive(:drop).exactly(3).times.and_call_original
+ expect(service).to receive(:drop_stuck).exactly(:once).and_call_original
+
+ service.execute
+ job.reload
+
+ expect(job).to be_failed
+ expect(job).to be_stuck_or_timeout_failure
+ end
+
+ context 'when job have data integrity problem' do
+ it "does drop the job and logs the reason" do
+ job.update_columns(yaml_variables: '[{"key" => "value"}]')
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(anything, a_hash_including(build_id: job.id))
+ .once
+ .and_call_original
+
+ service.execute
+ job.reload
+
+ expect(job).to be_failed
+ expect(job).to be_data_integrity_failure
+ end
+ end
+ end
+
+ shared_examples 'job is unchanged' do
+ it 'does not change status' do
+ expect(service).to receive(:drop).exactly(3).times.and_call_original
+ expect(service).to receive(:drop_stuck).exactly(:once).and_call_original
+
+ service.execute
+ job.reload
+
+ expect(job.status).to eq(status)
+ end
+ end
+
+ context 'when job is pending' do
+ let(:status) { 'pending' }
+
+ context 'when job is not stuck' do
+ before do
+ allow_next_found_instance_of(Ci::Build) do |build|
+ allow(build).to receive(:stuck?).and_return(false)
+ end
+ end
+
+ context 'when job was updated_at more than 1 day ago' do
+ let(:updated_at) { 1.5.days.ago }
+
+ context 'when created_at is the same as updated_at' do
+ let(:created_at) { 1.5.days.ago }
+
+ it_behaves_like 'job is dropped'
+ end
+
+ context 'when created_at is before updated_at' do
+ let(:created_at) { 3.days.ago }
+
+ it_behaves_like 'job is dropped'
+ end
+
+ context 'when created_at is outside lookback window' do
+ let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+
+ context 'when job was updated less than 1 day ago' do
+ let(:updated_at) { 6.hours.ago }
+
+ context 'when created_at is the same as updated_at' do
+ let(:created_at) { 1.5.days.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when created_at is before updated_at' do
+ let(:created_at) { 3.days.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when created_at is outside lookback window' do
+ let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+
+ context 'when job was updated more than 1 hour ago' do
+ let(:updated_at) { 2.hours.ago }
+
+ context 'when created_at is the same as updated_at' do
+ let(:created_at) { 2.hours.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when created_at is before updated_at' do
+ let(:created_at) { 3.days.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when created_at is outside lookback window' do
+ let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+ end
+
+ context 'when job is stuck' do
+ before do
+ allow_next_found_instance_of(Ci::Build) do |build|
+ allow(build).to receive(:stuck?).and_return(true)
+ end
+ end
+
+ context 'when job was updated_at more than 1 hour ago' do
+ let(:updated_at) { 1.5.hours.ago }
+
+ context 'when created_at is the same as updated_at' do
+ let(:created_at) { 1.5.hours.ago }
+
+ it_behaves_like 'job is dropped'
+ end
+
+ context 'when created_at is before updated_at' do
+ let(:created_at) { 3.days.ago }
+
+ it_behaves_like 'job is dropped'
+ end
+
+ context 'when created_at is outside lookback window' do
+ let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+
+ context 'when job was updated in less than 1 hour ago' do
+ let(:updated_at) { 30.minutes.ago }
+
+ context 'when created_at is the same as updated_at' do
+ let(:created_at) { 30.minutes.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when created_at is before updated_at' do
+ let(:created_at) { 2.days.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when created_at is outside lookback window' do
+ let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+ end
+ end
+
+ context 'when job is running' do
+ let(:status) { 'running' }
+
+ context 'when job was updated_at more than an hour ago' do
+ let(:updated_at) { 2.hours.ago }
+
+ it_behaves_like 'job is dropped'
+ end
+
+ context 'when job was updated in less than 1 hour ago' do
+ let(:updated_at) { 30.minutes.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+
+ %w(success skipped failed canceled).each do |status|
+ context "when job is #{status}" do
+ let(:status) { status }
+ let(:updated_at) { 2.days.ago }
+
+ context 'when created_at is the same as updated_at' do
+ let(:created_at) { 2.days.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when created_at is before updated_at' do
+ let(:created_at) { 3.days.ago }
+
+ it_behaves_like 'job is unchanged'
+ end
+
+ context 'when created_at is outside lookback window' do
+ let(:created_at) { described_class::BUILD_LOOKBACK - 1.day }
+
+ it_behaves_like 'job is unchanged'
+ end
+ end
+ end
+
+ context 'for deleted project' do
+ let(:status) { 'running' }
+ let(:updated_at) { 2.days.ago }
+
+ before do
+ job.project.update!(pending_delete: true)
+ end
+
+ it_behaves_like 'job is dropped'
+ end
+
+ describe 'drop stale scheduled builds' do
+ let(:status) { 'scheduled' }
+ let(:updated_at) { }
+
+ context 'when scheduled at 2 hours ago but it is not executed yet' do
+ let!(:job) { create(:ci_build, :scheduled, scheduled_at: 2.hours.ago) }
+
+ it 'drops the stale scheduled build' do
+ expect(Ci::Build.scheduled.count).to eq(1)
+ expect(job).to be_scheduled
+
+ service.execute
+ job.reload
+
+ expect(Ci::Build.scheduled.count).to eq(0)
+ expect(job).to be_failed
+ expect(job).to be_stale_schedule
+ end
+ end
+
+ context 'when scheduled at 30 minutes ago but it is not executed yet' do
+ let!(:job) { create(:ci_build, :scheduled, scheduled_at: 30.minutes.ago) }
+
+ it 'does not drop the stale scheduled build yet' do
+ expect(Ci::Build.scheduled.count).to eq(1)
+ expect(job).to be_scheduled
+
+ service.execute
+
+ expect(Ci::Build.scheduled.count).to eq(1)
+ expect(job).to be_scheduled
+ end
+ end
+
+ context 'when there are no stale scheduled builds' do
+ it 'does not drop the stale scheduled build yet' do
+ expect { service.execute }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/update_pending_build_service_spec.rb b/spec/services/ci/update_pending_build_service_spec.rb
new file mode 100644
index 00000000000..d842042de40
--- /dev/null
+++ b/spec/services/ci/update_pending_build_service_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::UpdatePendingBuildService do
+ describe '#execute' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:pending_build_1) { create(:ci_pending_build, project: project, instance_runners_enabled: false) }
+ let_it_be(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
+ let_it_be(:update_params) { { instance_runners_enabled: true } }
+
+ subject(:service) { described_class.new(model, update_params).execute }
+
+ context 'validations' do
+ context 'when model is invalid' do
+ let(:model) { pending_build_1 }
+
+ it 'raises an error' do
+ expect { service }.to raise_error(described_class::InvalidModelError)
+ end
+ end
+
+ context 'when params is invalid' do
+ let(:model) { group }
+ let(:update_params) { { minutes_exceeded: true } }
+
+ it 'raises an error' do
+ expect { service }.to raise_error(described_class::InvalidParamsError)
+ end
+ end
+ end
+
+ context 'when model is a group with pending builds' do
+ let(:model) { group }
+
+ it 'updates all pending builds', :aggregate_failures do
+ service
+
+ expect(pending_build_1.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ end
+
+ context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do
+ before do
+ stub_feature_flags(ci_pending_builds_maintain_shared_runners_data: false)
+ end
+
+ it 'does not update all pending builds', :aggregate_failures do
+ service
+
+ expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
+ expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ end
+ end
+ end
+
+ context 'when model is a project with pending builds' do
+ let(:model) { project }
+
+ it 'updates all pending builds', :aggregate_failures do
+ service
+
+ expect(pending_build_1.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ end
+
+ context 'when ci_pending_builds_maintain_shared_runners_data is disabled' do
+ before do
+ stub_feature_flags(ci_pending_builds_maintain_shared_runners_data: false)
+ end
+
+ it 'does not update all pending builds', :aggregate_failures do
+ service
+
+ expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
+ expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/agents/refresh_authorization_service_spec.rb b/spec/services/clusters/agents/refresh_authorization_service_spec.rb
new file mode 100644
index 00000000000..77ba81ea9c0
--- /dev/null
+++ b/spec/services/clusters/agents/refresh_authorization_service_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Agents::RefreshAuthorizationService do
+ describe '#execute' do
+ let_it_be(:root_ancestor) { create(:group) }
+
+ let_it_be(:removed_group) { create(:group, parent: root_ancestor) }
+ let_it_be(:modified_group) { create(:group, parent: root_ancestor) }
+ let_it_be(:added_group) { create(:group, parent: root_ancestor) }
+
+ let_it_be(:removed_project) { create(:project, namespace: root_ancestor) }
+ let_it_be(:modified_project) { create(:project, namespace: root_ancestor) }
+ let_it_be(:added_project) { create(:project, namespace: root_ancestor) }
+
+ let(:project) { create(:project, namespace: root_ancestor) }
+ let(:agent) { create(:cluster_agent, project: project) }
+
+ let(:config) do
+ {
+ ci_access: {
+ groups: [
+ { id: added_group.full_path, default_namespace: 'default' },
+ { id: modified_group.full_path, default_namespace: 'new-namespace' }
+ ],
+ projects: [
+ { id: added_project.full_path, default_namespace: 'default' },
+ { id: modified_project.full_path, default_namespace: 'new-namespace' }
+ ]
+ }
+ }.deep_stringify_keys
+ end
+
+ subject { described_class.new(agent, config: config).execute }
+
+ before do
+ default_config = { default_namespace: 'default' }
+
+ agent.group_authorizations.create!(group: removed_group, config: default_config)
+ agent.group_authorizations.create!(group: modified_group, config: default_config)
+
+ agent.project_authorizations.create!(project: removed_project, config: default_config)
+ agent.project_authorizations.create!(project: modified_project, config: default_config)
+ end
+
+ shared_examples 'removing authorization' do
+ context 'config contains no groups' do
+ let(:config) { {} }
+
+ it 'removes all authorizations' do
+ expect(subject).to be_truthy
+ expect(authorizations).to be_empty
+ end
+ end
+
+ context 'config contains groups outside of the configuration project hierarchy' do
+ let(:project) { create(:project, namespace: create(:group)) }
+
+ it 'removes all authorizations' do
+ expect(subject).to be_truthy
+ expect(authorizations).to be_empty
+ end
+ end
+
+ context 'configuration project does not belong to a group' do
+ let(:project) { create(:project) }
+
+ it 'removes all authorizations' do
+ expect(subject).to be_truthy
+ expect(authorizations).to be_empty
+ end
+ end
+ end
+
+ describe 'group authorization' do
+ it 'refreshes authorizations for the agent' do
+ expect(subject).to be_truthy
+ expect(agent.authorized_groups).to contain_exactly(added_group, modified_group)
+
+ added_authorization = agent.group_authorizations.find_by(group: added_group)
+ expect(added_authorization.config).to eq({ 'default_namespace' => 'default' })
+
+ modified_authorization = agent.group_authorizations.find_by(group: modified_group)
+ expect(modified_authorization.config).to eq({ 'default_namespace' => 'new-namespace' })
+ end
+
+ context 'config contains too many groups' do
+ before do
+ stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 1)
+ end
+
+ it 'authorizes groups up to the limit' do
+ expect(subject).to be_truthy
+ expect(agent.authorized_groups).to contain_exactly(added_group)
+ end
+ end
+
+ include_examples 'removing authorization' do
+ let(:authorizations) { agent.authorized_groups }
+ end
+ end
+
+ describe 'project authorization' do
+ it 'refreshes authorizations for the agent' do
+ expect(subject).to be_truthy
+ expect(agent.authorized_projects).to contain_exactly(added_project, modified_project)
+
+ added_authorization = agent.project_authorizations.find_by(project: added_project)
+ expect(added_authorization.config).to eq({ 'default_namespace' => 'default' })
+
+ modified_authorization = agent.project_authorizations.find_by(project: modified_project)
+ expect(modified_authorization.config).to eq({ 'default_namespace' => 'new-namespace' })
+ end
+
+ context 'config contains too many projects' do
+ before do
+ stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 1)
+ end
+
+ it 'authorizes projects up to the limit' do
+ expect(subject).to be_truthy
+ expect(agent.authorized_projects).to contain_exactly(added_project)
+ end
+ end
+
+ include_examples 'removing authorization' do
+ let(:authorizations) { agent.authorized_projects }
+ end
+ end
+ end
+end
diff --git a/spec/services/customer_relations/organizations/create_service_spec.rb b/spec/services/customer_relations/organizations/create_service_spec.rb
new file mode 100644
index 00000000000..b4764f6b97a
--- /dev/null
+++ b/spec/services/customer_relations/organizations/create_service_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CustomerRelations::Organizations::CreateService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+
+ let(:group) { create(:group) }
+ let(:params) { attributes_for(:organization, group: group) }
+
+ subject(:response) { described_class.new(group: group, current_user: user, params: params).execute }
+
+ it 'creates an organization' do
+ group.add_reporter(user)
+
+ expect(response).to be_success
+ end
+
+ it 'returns an error when user does not have permission' do
+ expect(response).to be_error
+ expect(response.message).to eq('You have insufficient permissions to create an organization for this group')
+ end
+
+ it 'returns an error when the organization is not persisted' do
+ group.add_reporter(user)
+ params[:name] = nil
+
+ expect(response).to be_error
+ expect(response.message).to eq(["Name can't be blank"])
+ end
+ end
+end
diff --git a/spec/services/customer_relations/organizations/update_service_spec.rb b/spec/services/customer_relations/organizations/update_service_spec.rb
new file mode 100644
index 00000000000..eb253540863
--- /dev/null
+++ b/spec/services/customer_relations/organizations/update_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CustomerRelations::Organizations::UpdateService do
+ let_it_be(:user) { create(:user) }
+
+ let(:organization) { create(:organization, name: 'Test', group: group) }
+
+ subject(:update) { described_class.new(group: group, current_user: user, params: params).execute(organization) }
+
+ describe '#execute' do
+ context 'when the user has no permission' do
+ let_it_be(:group) { create(:group) }
+
+ let(:params) { { name: 'GitLab' } }
+
+ it 'returns an error' do
+ response = update
+
+ expect(response).to be_error
+ expect(response.message).to eq('You have insufficient permissions to update an organization for this group')
+ end
+ end
+
+ context 'when user has permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_reporter(user)
+ end
+
+ context 'when name is changed' do
+ let(:params) { { name: 'GitLab' } }
+
+ it 'updates the organization' do
+ response = update
+
+ expect(response).to be_success
+ expect(response.payload.name).to eq('GitLab')
+ end
+ end
+
+ context 'when the organization is invalid' do
+ let(:params) { { name: nil } }
+
+ it 'returns an error' do
+ response = update
+
+ expect(response).to be_error
+ expect(response.message).to eq(["Name can't be blank"])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/dependency_proxy/image_ttl_group_policies/update_service_spec.rb b/spec/services/dependency_proxy/image_ttl_group_policies/update_service_spec.rb
new file mode 100644
index 00000000000..ceac8985c8e
--- /dev/null
+++ b/spec/services/dependency_proxy/image_ttl_group_policies/update_service_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::DependencyProxy::ImageTtlGroupPolicies::UpdateService do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:params) { {} }
+
+ describe '#execute' do
+ subject { described_class.new(container: group, current_user: user, params: params).execute }
+
+ shared_examples 'returning a success' do
+ it 'returns a success' do
+ result = subject
+
+ expect(result.payload[:dependency_proxy_image_ttl_policy]).to be_present
+ expect(result).to be_success
+ end
+ end
+
+ shared_examples 'returning an error' do |message, http_status|
+ it 'returns an error' do
+ result = subject
+
+ expect(result).to have_attributes(
+ message: message,
+ status: :error,
+ http_status: http_status
+ )
+ end
+ end
+
+ shared_examples 'updating the dependency proxy image ttl policy' do
+ it_behaves_like 'updating the dependency proxy image ttl policy attributes',
+ from: { enabled: true, ttl: 90 },
+ to: { enabled: false, ttl: 2 }
+
+ it_behaves_like 'returning a success'
+
+ context 'with invalid params' do
+ let_it_be(:params) { { enabled: nil } }
+
+ it_behaves_like 'not creating the dependency proxy image ttl policy'
+
+ it "doesn't update" do
+ expect { subject }
+ .not_to change { ttl_policy.reload.enabled }
+ end
+
+ it_behaves_like 'returning an error', 'Enabled is not included in the list', 400
+ end
+ end
+
+ shared_examples 'denying access to dependency proxy image ttl policy' do
+ context 'with existing dependency proxy image ttl policy' do
+ it_behaves_like 'not creating the dependency proxy image ttl policy'
+
+ it_behaves_like 'returning an error', 'Access Denied', 403
+ end
+ end
+
+ before do
+ stub_config(dependency_proxy: { enabled: true })
+ end
+
+ context 'with existing dependency proxy image ttl policy' do
+ let_it_be(:ttl_policy) { create(:image_ttl_group_policy, group: group) }
+ let_it_be(:params) { { enabled: false, ttl: 2 } }
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'updating the dependency proxy image ttl policy'
+ :developer | 'updating the dependency proxy image ttl policy'
+ :reporter | 'denying access to dependency proxy image ttl policy'
+ :guest | 'denying access to dependency proxy image ttl policy'
+ :anonymous | 'denying access to dependency proxy image ttl policy'
+ end
+
+ with_them do
+ before do
+ group.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+
+ context 'without existing dependency proxy image ttl policy' do
+ let_it_be(:ttl_policy) { group.dependency_proxy_image_ttl_policy }
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'creating the dependency proxy image ttl policy'
+ :developer | 'creating the dependency proxy image ttl policy'
+ :reporter | 'denying access to dependency proxy image ttl policy'
+ :guest | 'denying access to dependency proxy image ttl policy'
+ :anonymous | 'denying access to dependency proxy image ttl policy'
+ end
+
+ with_them do
+ before do
+ group.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+
+ context 'when the policy is not found' do
+ before do
+ group.add_developer(user)
+ expect(group).to receive(:dependency_proxy_image_ttl_policy).and_return nil
+ end
+
+ it_behaves_like 'returning an error', 'Dependency proxy image TTL Policy not found', 404
+ end
+ end
+ end
+end
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index 341f71fa62c..bc7625d7c28 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -149,6 +149,12 @@ RSpec.describe DesignManagement::DeleteDesignsService do
expect { run_service }
.to change { designs.first.deleted? }.from(false).to(true)
end
+
+ it 'schedules deleting todos for that design' do
+ expect(TodosDestroyer::DestroyedDesignsWorker).to receive(:perform_async).with([designs.first.id])
+
+ run_service
+ end
end
context 'more than one design is passed' do
@@ -168,6 +174,12 @@ RSpec.describe DesignManagement::DeleteDesignsService do
.and change { Event.destroyed_action.for_design.count }.by(2)
end
+ it 'schedules deleting todos for that design' do
+ expect(TodosDestroyer::DestroyedDesignsWorker).to receive(:perform_async).with(designs.map(&:id))
+
+ run_service
+ end
+
it_behaves_like "a success"
context 'after executing the service' do
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index 4f761454516..51ef30c91c0 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -33,7 +33,8 @@ RSpec.describe DraftNotes::PublishService do
end
it 'does not skip notification', :sidekiq_might_not_need_inline do
- expect(Notes::CreateService).to receive(:new).with(project, user, drafts.first.publish_params).and_call_original
+ note_params = drafts.first.publish_params.merge(skip_keep_around_commits: false)
+ expect(Notes::CreateService).to receive(:new).with(project, user, note_params).and_call_original
expect_next_instance_of(NotificationService) do |notification_service|
expect(notification_service).to receive(:new_note)
end
@@ -127,12 +128,17 @@ RSpec.describe DraftNotes::PublishService do
publish
end
- context 'capturing diff notes positions' do
+ context 'capturing diff notes positions and keeping around commits' do
before do
# Need to execute this to ensure that we'll be able to test creation of
# DiffNotePosition records as that only happens when the `MergeRequest#merge_ref_head`
# is present. This service creates that for the specified merge request.
MergeRequests::MergeToRefService.new(project: project, current_user: user).execute(merge_request)
+
+ # Need to re-stub this and call original as we are stubbing
+ # `Gitlab::Git::KeepAround#execute` in spec_helper for performance reason.
+ # Enabling it here so we can test the Gitaly calls it makes.
+ allow(Gitlab::Git::KeepAround).to receive(:execute).and_call_original
end
it 'creates diff_note_positions for diff notes' do
@@ -143,11 +149,26 @@ RSpec.describe DraftNotes::PublishService do
expect(notes.last.diff_note_positions).to be_any
end
+ it 'keeps around the commits of each published note' do
+ publish
+
+ repository = project.repository
+ notes = merge_request.notes.order(id: :asc)
+
+ notes.first.shas.each do |sha|
+ expect(repository.ref_exists?("refs/keep-around/#{sha}")).to be_truthy
+ end
+
+ notes.last.shas.each do |sha|
+ expect(repository.ref_exists?("refs/keep-around/#{sha}")).to be_truthy
+ end
+ end
+
it 'does not requests a lot from Gitaly', :request_store do
# NOTE: This should be reduced as we work on reducing Gitaly calls.
# Gitaly requests shouldn't go above this threshold as much as possible
# as it may add more to the Gitaly N+1 issue we are experiencing.
- expect { publish }.to change { Gitlab::GitalyClient.get_request_count }.by(11)
+ expect { publish }.to change { Gitlab::GitalyClient.get_request_count }.by(21)
end
end
diff --git a/spec/services/environments/auto_stop_service_spec.rb b/spec/services/environments/auto_stop_service_spec.rb
index 93b1596586f..8dad59cbefd 100644
--- a/spec/services/environments/auto_stop_service_spec.rb
+++ b/spec/services/environments/auto_stop_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Environments::AutoStopService, :clean_gitlab_redis_shared_state do
+RSpec.describe Environments::AutoStopService, :clean_gitlab_redis_shared_state, :sidekiq_inline do
include CreateEnvironmentsHelpers
include ExclusiveLeaseHelpers
@@ -42,6 +42,15 @@ RSpec.describe Environments::AutoStopService, :clean_gitlab_redis_shared_state d
expect(Ci::Build.where(name: 'stop_review_app').map(&:status).uniq).to eq(['pending'])
end
+ it 'schedules stop processes in bulk' do
+ args = [[Environment.find_by_name('review/feature-1').id], [Environment.find_by_name('review/feature-2').id]]
+
+ expect(Environments::AutoStopWorker)
+ .to receive(:bulk_perform_async).with(args).once.and_call_original
+
+ subject
+ end
+
context 'when the other sidekiq worker has already been running' do
before do
stub_exclusive_lease_taken(described_class::EXCLUSIVE_LOCK_KEY)
diff --git a/spec/services/environments/stop_service_spec.rb b/spec/services/environments/stop_service_spec.rb
index 52be512612d..acc9869002f 100644
--- a/spec/services/environments/stop_service_spec.rb
+++ b/spec/services/environments/stop_service_spec.rb
@@ -237,60 +237,6 @@ RSpec.describe Environments::StopService do
end
end
- describe '.execute_in_batch' do
- subject { described_class.execute_in_batch(environments) }
-
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
-
- let(:environments) { Environment.available }
-
- before_all do
- project.add_developer(user)
- project.repository.add_branch(user, 'review/feature-1', 'master')
- project.repository.add_branch(user, 'review/feature-2', 'master')
- end
-
- before do
- create_review_app(user, project, 'review/feature-1')
- create_review_app(user, project, 'review/feature-2')
- end
-
- it 'stops environments' do
- expect { subject }
- .to change { project.environments.all.map(&:state).uniq }
- .from(['available']).to(['stopped'])
-
- expect(project.environments.all.map(&:auto_stop_at).uniq).to eq([nil])
- end
-
- it 'plays stop actions' do
- expect { subject }
- .to change { Ci::Build.where(name: 'stop_review_app').map(&:status).uniq }
- .from(['manual']).to(['pending'])
- end
-
- context 'when user does not have a permission to play the stop action' do
- before do
- project.team.truncate
- end
-
- it 'tracks the exception' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(Gitlab::Access::AccessDeniedError, anything)
- .twice
- .and_call_original
-
- subject
- end
-
- after do
- project.add_developer(user)
- end
- end
- end
-
def expect_environment_stopped_on(branch)
expect { service.execute_for_branch(branch) }
.to change { Environment.last.state }.from('available').to('stopped')
diff --git a/spec/services/error_tracking/collect_error_service_spec.rb b/spec/services/error_tracking/collect_error_service_spec.rb
index 14cd588f40b..ee9d0813e64 100644
--- a/spec/services/error_tracking/collect_error_service_spec.rb
+++ b/spec/services/error_tracking/collect_error_service_spec.rb
@@ -34,11 +34,35 @@ RSpec.describe ErrorTracking::CollectErrorService do
expect(error.platform).to eq 'ruby'
expect(error.last_seen_at).to eq '2021-07-08T12:59:16Z'
- expect(event.description).to eq 'ActionView::MissingTemplate'
+ expect(event.description).to start_with 'Missing template posts/error2'
expect(event.occurred_at).to eq '2021-07-08T12:59:16Z'
expect(event.level).to eq 'error'
expect(event.environment).to eq 'development'
expect(event.payload).to eq parsed_event
end
+
+ context 'unusual payload' do
+ let(:modified_event) { parsed_event }
+
+ context 'missing transaction' do
+ it 'builds actor from stacktrace' do
+ modified_event.delete('transaction')
+
+ event = described_class.new(project, nil, event: modified_event).execute
+
+ expect(event.error.actor).to eq 'find()'
+ end
+ end
+
+ context 'timestamp is numeric' do
+ it 'parses timestamp' do
+ modified_event['timestamp'] = '1631015580.50'
+
+ event = described_class.new(project, nil, event: modified_event).execute
+
+ expect(event.occurred_at).to eq '2021-09-07T11:53:00.5'
+ end
+ end
+ end
end
end
diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb
index 4eb2b25fb64..5a517ce6a64 100644
--- a/spec/services/feature_flags/create_service_spec.rb
+++ b/spec/services/feature_flags/create_service_spec.rb
@@ -48,8 +48,9 @@ RSpec.describe FeatureFlags::CreateService do
{
name: 'feature_flag',
description: 'description',
- scopes_attributes: [{ environment_scope: '*', active: true },
- { environment_scope: 'production', active: false }]
+ version: 'new_version_flag',
+ strategies_attributes: [{ name: 'default', scopes_attributes: [{ environment_scope: '*' }], parameters: {} },
+ { name: 'default', parameters: {}, scopes_attributes: [{ environment_scope: 'production' }] }]
}
end
@@ -68,15 +69,10 @@ RSpec.describe FeatureFlags::CreateService do
end
it 'creates audit event' do
- expected_message = 'Created feature flag feature_flag '\
- 'with description "description". '\
- 'Created rule * and set it as active '\
- 'with strategies [{"name"=&gt;"default", "parameters"=&gt;{}}]. '\
- 'Created rule production and set it as inactive '\
- 'with strategies [{"name"=&gt;"default", "parameters"=&gt;{}}].'
-
expect { subject }.to change { AuditEvent.count }.by(1)
- expect(AuditEvent.last.details[:custom_message]).to eq(expected_message)
+ expect(AuditEvent.last.details[:custom_message]).to start_with('Created feature flag feature_flag with description "description".')
+ expect(AuditEvent.last.details[:custom_message]).to include('Created strategy "default" with scopes "*".')
+ expect(AuditEvent.last.details[:custom_message]).to include('Created strategy "default" with scopes "production".')
end
context 'when user is reporter' do
diff --git a/spec/services/git/base_hooks_service_spec.rb b/spec/services/git/base_hooks_service_spec.rb
index 539c294a2e7..a8d753ff124 100644
--- a/spec/services/git/base_hooks_service_spec.rb
+++ b/spec/services/git/base_hooks_service_spec.rb
@@ -19,9 +19,13 @@ RSpec.describe Git::BaseHooksService do
:push_hooks
end
- def commits
+ def limited_commits
[]
end
+
+ def commits_count
+ 0
+ end
end
end
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index a93f594b360..79c2cb1fca3 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -362,6 +362,9 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
end
end
+ let(:commits_count) { service.send(:commits_count) }
+ let(:threshold_limit) { described_class::PROCESS_COMMIT_LIMIT + 1 }
+
let(:oldrev) { project.commit(commit_ids.first).parent_id }
let(:newrev) { commit_ids.last }
@@ -373,17 +376,31 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
let(:oldrev) { Gitlab::Git::BLANK_SHA }
it 'processes a limited number of commit messages' do
+ expect(project.repository)
+ .to receive(:commits)
+ .with(newrev, limit: threshold_limit)
+ .and_call_original
+
expect(ProcessCommitWorker).to receive(:perform_async).twice
service.execute
+
+ expect(commits_count).to eq(project.repository.commit_count_for_ref(newrev))
end
end
context 'updating the default branch' do
it 'processes a limited number of commit messages' do
+ expect(project.repository)
+ .to receive(:commits_between)
+ .with(oldrev, newrev, limit: threshold_limit)
+ .and_call_original
+
expect(ProcessCommitWorker).to receive(:perform_async).twice
service.execute
+
+ expect(commits_count).to eq(project.repository.count_commits_between(oldrev, newrev))
end
end
@@ -391,9 +408,13 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
let(:newrev) { Gitlab::Git::BLANK_SHA }
it 'does not process commit messages' do
+ expect(project.repository).not_to receive(:commits)
+ expect(project.repository).not_to receive(:commits_between)
expect(ProcessCommitWorker).not_to receive(:perform_async)
service.execute
+
+ expect(commits_count).to eq(0)
end
end
@@ -402,9 +423,16 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
let(:oldrev) { Gitlab::Git::BLANK_SHA }
it 'processes a limited number of commit messages' do
+ expect(project.repository)
+ .to receive(:commits_between)
+ .with(project.default_branch, newrev, limit: threshold_limit)
+ .and_call_original
+
expect(ProcessCommitWorker).to receive(:perform_async).twice
service.execute
+
+ expect(commits_count).to eq(project.repository.count_commits_between(project.default_branch, branch))
end
end
@@ -412,9 +440,15 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
let(:branch) { 'fix' }
it 'processes a limited number of commit messages' do
+ expect(project.repository)
+ .to receive(:commits_between)
+ .with(oldrev, newrev, limit: threshold_limit)
+ .and_call_original
+
expect(ProcessCommitWorker).to receive(:perform_async).twice
service.execute
+ expect(commits_count).to eq(project.repository.count_commits_between(oldrev, newrev))
end
end
@@ -423,9 +457,13 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
let(:newrev) { Gitlab::Git::BLANK_SHA }
it 'does not process commit messages' do
+ expect(project.repository).not_to receive(:commits)
+ expect(project.repository).not_to receive(:commits_between)
expect(ProcessCommitWorker).not_to receive(:perform_async)
service.execute
+
+ expect(commits_count).to eq(0)
end
end
diff --git a/spec/services/groups/group_links/create_service_spec.rb b/spec/services/groups/group_links/create_service_spec.rb
index b1bb9a8de23..03dac14be54 100644
--- a/spec/services/groups/group_links/create_service_spec.rb
+++ b/spec/services/groups/group_links/create_service_spec.rb
@@ -6,18 +6,20 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
let(:parent_group_user) { create(:user) }
let(:group_user) { create(:user) }
let(:child_group_user) { create(:user) }
+ let(:prevent_sharing) { false }
let_it_be(:group_parent) { create(:group, :private) }
let_it_be(:group) { create(:group, :private, parent: group_parent) }
let_it_be(:group_child) { create(:group, :private, parent: group) }
- let_it_be(:shared_group_parent, refind: true) { create(:group, :private) }
- let_it_be(:shared_group, refind: true) { create(:group, :private, parent: shared_group_parent) }
- let_it_be(:shared_group_child) { create(:group, :private, parent: shared_group) }
+ let(:ns_for_parent) { create(:namespace_settings, prevent_sharing_groups_outside_hierarchy: prevent_sharing) }
+ let(:shared_group_parent) { create(:group, :private, namespace_settings: ns_for_parent) }
+ let(:shared_group) { create(:group, :private, parent: shared_group_parent) }
+ let(:shared_group_child) { create(:group, :private, parent: shared_group) }
- let_it_be(:project_parent) { create(:project, group: shared_group_parent) }
- let_it_be(:project) { create(:project, group: shared_group) }
- let_it_be(:project_child) { create(:project, group: shared_group_child) }
+ let(:project_parent) { create(:project, group: shared_group_parent) }
+ let(:project) { create(:project, group: shared_group) }
+ let(:project_child) { create(:project, group: shared_group_child) }
let(:opts) do
{
@@ -129,9 +131,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
end
context 'sharing outside the hierarchy is disabled' do
- before do
- shared_group_parent.namespace_settings.update!(prevent_sharing_groups_outside_hierarchy: true)
- end
+ let(:prevent_sharing) { true }
it 'prevents sharing with a group outside the hierarchy' do
result = subject.execute
diff --git a/spec/services/groups/open_issues_count_service_spec.rb b/spec/services/groups/open_issues_count_service_spec.rb
index fca09bfdebe..7dd8c2a59a0 100644
--- a/spec/services/groups/open_issues_count_service_spec.rb
+++ b/spec/services/groups/open_issues_count_service_spec.rb
@@ -3,12 +3,18 @@
require 'spec_helper'
RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_caching do
- let_it_be(:group) { create(:group, :public)}
+ let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, namespace: group) }
+ let_it_be(:admin) { create(:user, :admin) }
let_it_be(:user) { create(:user) }
- let_it_be(:issue) { create(:issue, :opened, project: project) }
- let_it_be(:confidential) { create(:issue, :opened, confidential: true, project: project) }
- let_it_be(:closed) { create(:issue, :closed, project: project) }
+ let_it_be(:banned_user) { create(:user, :banned) }
+
+ before do
+ create(:issue, :opened, project: project)
+ create(:issue, :opened, confidential: true, project: project)
+ create(:issue, :opened, author: banned_user, project: project)
+ create(:issue, :closed, project: project)
+ end
subject { described_class.new(group, user) }
@@ -20,28 +26,42 @@ RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_cac
it 'uses the IssuesFinder to scope issues' do
expect(IssuesFinder)
.to receive(:new)
- .with(user, group_id: group.id, state: 'opened', non_archived: true, include_subgroups: true, public_only: true)
+ .with(user, group_id: group.id, state: 'opened', non_archived: true, include_subgroups: true, public_only: true, include_hidden: false)
subject.count
end
end
describe '#count' do
- context 'when user is nil' do
- it 'does not include confidential issues in the issue count' do
- expect(described_class.new(group).count).to eq(1)
+ shared_examples 'counts public issues, does not count hidden or confidential' do
+ it 'counts only public issues' do
+ expect(subject.count).to eq(1)
+ end
+
+ it 'uses PUBLIC_COUNT_WITHOUT_HIDDEN_KEY cache key' do
+ expect(subject.cache_key).to include('group_open_public_issues_without_hidden_count')
end
end
+ context 'when user is nil' do
+ let(:user) { nil }
+
+ it_behaves_like 'counts public issues, does not count hidden or confidential'
+ end
+
context 'when user is provided' do
context 'when user can read confidential issues' do
before do
group.add_reporter(user)
end
- it 'returns the right count with confidential issues' do
+ it 'includes confidential issues and does not include hidden issues in count' do
expect(subject.count).to eq(2)
end
+
+ it 'uses TOTAL_COUNT_WITHOUT_HIDDEN_KEY cache key' do
+ expect(subject.cache_key).to include('group_open_issues_without_hidden_count')
+ end
end
context 'when user cannot read confidential issues' do
@@ -49,8 +69,24 @@ RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_cac
group.add_guest(user)
end
- it 'does not include confidential issues' do
- expect(subject.count).to eq(1)
+ it_behaves_like 'counts public issues, does not count hidden or confidential'
+ end
+
+ context 'when user is an admin' do
+ let(:user) { admin }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'includes confidential and hidden issues in count' do
+ expect(subject.count).to eq(3)
+ end
+
+ it 'uses TOTAL_COUNT_KEY cache key' do
+ expect(subject.cache_key).to include('group_open_issues_including_hidden_count')
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it_behaves_like 'counts public issues, does not count hidden or confidential'
end
end
@@ -61,11 +97,13 @@ RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_cac
describe '#clear_all_cache_keys' do
it 'calls `Rails.cache.delete` with the correct keys' do
expect(Rails.cache).to receive(:delete)
- .with(['groups', 'open_issues_count_service', 1, group.id, described_class::PUBLIC_COUNT_KEY])
+ .with(['groups', 'open_issues_count_service', 1, group.id, described_class::PUBLIC_COUNT_WITHOUT_HIDDEN_KEY])
expect(Rails.cache).to receive(:delete)
.with(['groups', 'open_issues_count_service', 1, group.id, described_class::TOTAL_COUNT_KEY])
+ expect(Rails.cache).to receive(:delete)
+ .with(['groups', 'open_issues_count_service', 1, group.id, described_class::TOTAL_COUNT_WITHOUT_HIDDEN_KEY])
- subject.clear_all_cache_keys
+ described_class.new(group).clear_all_cache_keys
end
end
end
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index e941958eb8c..fe18277b5cd 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -55,6 +55,31 @@ RSpec.describe Groups::UpdateSharedRunnersService do
expect(subject[:status]).to eq(:success)
end
end
+
+ context 'when group has pending builds' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled) }
+ let_it_be(:project) { create(:project, namespace: group, shared_runners_enabled: false) }
+ let_it_be(:pending_build_1) { create(:ci_pending_build, project: project, instance_runners_enabled: false) }
+ let_it_be(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: false) }
+
+ it 'updates pending builds for the group' do
+ subject
+
+ expect(pending_build_1.reload.instance_runners_enabled).to be_truthy
+ expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
+ end
+
+ context 'when shared runners is not toggled' do
+ let(:params) { { shared_runners_setting: 'invalid_enabled' } }
+
+ it 'does not update pending builds for the group' do
+ subject
+
+ expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
+ expect(pending_build_2.reload.instance_runners_enabled).to be_falsey
+ end
+ end
+ end
end
context 'disable shared Runners' do
@@ -67,6 +92,19 @@ RSpec.describe Groups::UpdateSharedRunnersService do
expect(subject[:status]).to eq(:success)
end
+
+ context 'when group has pending builds' do
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:pending_build_1) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
+ let_it_be(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
+
+ it 'updates pending builds for the group' do
+ subject
+
+ expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
+ expect(pending_build_2.reload.instance_runners_enabled).to be_falsey
+ end
+ end
end
context 'allow descendants to override' do
diff --git a/spec/services/issue_rebalancing_service_spec.rb b/spec/services/issue_rebalancing_service_spec.rb
deleted file mode 100644
index 76ccb6d89ea..00000000000
--- a/spec/services/issue_rebalancing_service_spec.rb
+++ /dev/null
@@ -1,173 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe IssueRebalancingService do
- let_it_be(:project, reload: true) { create(:project) }
- let_it_be(:user) { project.creator }
- let_it_be(:start) { RelativePositioning::START_POSITION }
- let_it_be(:max_pos) { RelativePositioning::MAX_POSITION }
- let_it_be(:min_pos) { RelativePositioning::MIN_POSITION }
- let_it_be(:clump_size) { 300 }
-
- let_it_be(:unclumped, reload: true) do
- (1..clump_size).to_a.map do |i|
- create(:issue, project: project, author: user, relative_position: start + (1024 * i))
- end
- end
-
- let_it_be(:end_clump, reload: true) do
- (1..clump_size).to_a.map do |i|
- create(:issue, project: project, author: user, relative_position: max_pos - i)
- end
- end
-
- let_it_be(:start_clump, reload: true) do
- (1..clump_size).to_a.map do |i|
- create(:issue, project: project, author: user, relative_position: min_pos + i)
- end
- end
-
- before do
- stub_feature_flags(issue_rebalancing_with_retry: false)
- end
-
- def issues_in_position_order
- project.reload.issues.reorder(relative_position: :asc).to_a
- end
-
- shared_examples 'IssueRebalancingService shared examples' do
- it 'rebalances a set of issues with clumps at the end and start' do
- all_issues = start_clump + unclumped + end_clump.reverse
- service = described_class.new(Project.id_in([project.id]))
-
- expect { service.execute }.not_to change { issues_in_position_order.map(&:id) }
-
- all_issues.each(&:reset)
-
- gaps = all_issues.take(all_issues.count - 1).zip(all_issues.drop(1)).map do |a, b|
- b.relative_position - a.relative_position
- end
-
- expect(gaps).to all(be > RelativePositioning::MIN_GAP)
- expect(all_issues.first.relative_position).to be > (RelativePositioning::MIN_POSITION * 0.9999)
- expect(all_issues.last.relative_position).to be < (RelativePositioning::MAX_POSITION * 0.9999)
- end
-
- it 'is idempotent' do
- service = described_class.new(Project.id_in(project))
-
- expect do
- service.execute
- service.execute
- end.not_to change { issues_in_position_order.map(&:id) }
- end
-
- it 'does nothing if the feature flag is disabled' do
- stub_feature_flags(rebalance_issues: false)
- issue = project.issues.first
- issue.project
- issue.project.group
- old_pos = issue.relative_position
-
- service = described_class.new(Project.id_in(project))
-
- expect { service.execute }.not_to exceed_query_limit(0)
- expect(old_pos).to eq(issue.reload.relative_position)
- end
-
- it 'acts if the flag is enabled for the root namespace' do
- issue = create(:issue, project: project, author: user, relative_position: max_pos)
- stub_feature_flags(rebalance_issues: project.root_namespace)
-
- service = described_class.new(Project.id_in(project))
-
- expect { service.execute }.to change { issue.reload.relative_position }
- end
-
- it 'acts if the flag is enabled for the group' do
- issue = create(:issue, project: project, author: user, relative_position: max_pos)
- project.update!(group: create(:group))
- stub_feature_flags(rebalance_issues: issue.project.group)
-
- service = described_class.new(Project.id_in(project))
-
- expect { service.execute }.to change { issue.reload.relative_position }
- end
-
- it 'aborts if there are too many issues' do
- base = double(count: 10_001)
-
- allow(Issue).to receive(:in_projects).and_return(base)
-
- expect { described_class.new(Project.id_in(project)).execute }.to raise_error(described_class::TooManyIssues)
- end
- end
-
- shared_examples 'rebalancing is retried on statement timeout exceptions' do
- subject { described_class.new(Project.id_in(project)) }
-
- it 'retries update statement' do
- call_count = 0
- allow(subject).to receive(:run_update_query) do
- call_count += 1
- if call_count < 13
- raise(ActiveRecord::QueryCanceled)
- else
- call_count = 0 if call_count == 13 + 16 # 16 = 17 sub-batches - 1 call that succeeded as part of 5th batch
- true
- end
- end
-
- # call math:
- # batches start at 100 and are split in half after every 3 retries if ActiveRecord::StatementTimeout exception is raised.
- # We raise ActiveRecord::StatementTimeout exception for 13 calls:
- # 1. 100 => 3 calls
- # 2. 100/2=50 => 3 calls + 3 above = 6 calls, raise ActiveRecord::StatementTimeout
- # 3. 50/2=25 => 3 calls + 6 above = 9 calls, raise ActiveRecord::StatementTimeout
- # 4. 25/2=12 => 3 calls + 9 above = 12 calls, raise ActiveRecord::StatementTimeout
- # 5. 12/2=6 => 1 call + 12 above = 13 calls, run successfully
- #
- # so out of 100 elements we created batches of 6 items => 100/6 = 17 sub-batches of 6 or less elements
- #
- # project.issues.count: 900 issues, so 9 batches of 100 => 9 * (13+16) = 261
- expect(subject).to receive(:update_positions).exactly(261).times.and_call_original
-
- subject.execute
- end
- end
-
- context 'when issue_rebalancing_optimization feature flag is on' do
- before do
- stub_feature_flags(issue_rebalancing_optimization: true)
- end
-
- it_behaves_like 'IssueRebalancingService shared examples'
-
- context 'when issue_rebalancing_with_retry feature flag is on' do
- before do
- stub_feature_flags(issue_rebalancing_with_retry: true)
- end
-
- it_behaves_like 'IssueRebalancingService shared examples'
- it_behaves_like 'rebalancing is retried on statement timeout exceptions'
- end
- end
-
- context 'when issue_rebalancing_optimization feature flag is off' do
- before do
- stub_feature_flags(issue_rebalancing_optimization: false)
- end
-
- it_behaves_like 'IssueRebalancingService shared examples'
-
- context 'when issue_rebalancing_with_retry feature flag is on' do
- before do
- stub_feature_flags(issue_rebalancing_with_retry: true)
- end
-
- it_behaves_like 'IssueRebalancingService shared examples'
- it_behaves_like 'rebalancing is retried on statement timeout exceptions'
- end
- end
-end
diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb
index 3f506ec58b0..b96dd981e0f 100644
--- a/spec/services/issues/build_service_spec.rb
+++ b/spec/services/issues/build_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Issues::BuildService do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:project) { create(:project, :repository) }
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
@@ -144,6 +146,8 @@ RSpec.describe Issues::BuildService do
issue = build_issue(milestone_id: milestone.id)
expect(issue.milestone).to eq(milestone)
+ expect(issue.issue_type).to eq('issue')
+ expect(issue.work_item_type.base_type).to eq('issue')
end
it 'sets milestone to nil if it is not available for the project' do
@@ -152,6 +156,15 @@ RSpec.describe Issues::BuildService do
expect(issue.milestone).to be_nil
end
+
+ context 'when issue_type is incident' do
+ it 'sets the correct issue type' do
+ issue = build_issue(issue_type: 'incident')
+
+ expect(issue.issue_type).to eq('incident')
+ expect(issue.work_item_type.base_type).to eq('incident')
+ end
+ end
end
context 'as guest' do
@@ -165,28 +178,37 @@ RSpec.describe Issues::BuildService do
end
context 'setting issue type' do
- it 'defaults to issue if issue_type not given' do
- issue = build_issue
+ shared_examples 'builds an issue' do
+ specify do
+ issue = build_issue(issue_type: issue_type)
- expect(issue).to be_issue
+ expect(issue.issue_type).to eq(resulting_issue_type)
+ expect(issue.work_item_type_id).to eq(work_item_type_id)
+ end
end
- it 'sets issue' do
- issue = build_issue(issue_type: 'issue')
+ it 'cannot set invalid issue type' do
+ issue = build_issue(issue_type: 'project')
expect(issue).to be_issue
end
- it 'sets incident' do
- issue = build_issue(issue_type: 'incident')
-
- expect(issue).to be_incident
- end
-
- it 'cannot set invalid type' do
- issue = build_issue(issue_type: 'invalid type')
-
- expect(issue).to be_issue
+ context 'with a corresponding WorkItem::Type' do
+ let_it_be(:type_issue_id) { WorkItem::Type.default_issue_type.id }
+ let_it_be(:type_incident_id) { WorkItem::Type.default_by_type(:incident).id }
+
+ where(:issue_type, :work_item_type_id, :resulting_issue_type) do
+ nil | ref(:type_issue_id) | 'issue'
+ 'issue' | ref(:type_issue_id) | 'issue'
+ 'incident' | ref(:type_incident_id) | 'incident'
+ 'test_case' | ref(:type_issue_id) | 'issue' # update once support for test_case is enabled
+ 'requirement' | ref(:type_issue_id) | 'issue' # update once support for requirement is enabled
+ 'invalid' | ref(:type_issue_id) | 'issue'
+ end
+
+ with_them do
+ it_behaves_like 'builds an issue'
+ end
end
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index b1d4877e138..14e6b44f7b0 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -58,8 +58,11 @@ RSpec.describe Issues::CloseService do
end
it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do
- expect { service.execute(issue) }
- .to change { project.open_issues_count }.from(1).to(0)
+ expect do
+ service.execute(issue)
+
+ BatchLoader::Executor.clear_current
+ end.to change { project.open_issues_count }.from(1).to(0)
end
it 'invalidates counter cache for assignees' do
@@ -222,7 +225,7 @@ RSpec.describe Issues::CloseService do
it 'verifies the number of queries' do
recorded = ActiveRecord::QueryRecorder.new { close_issue }
- expected_queries = 25
+ expected_queries = 27
expect(recorded.count).to be <= expected_queries
expect(recorded.cached_count).to eq(0)
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 0e2b3b957a5..3988069d83a 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -43,10 +43,11 @@ RSpec.describe Issues::CreateService do
expect(issue).to be_persisted
expect(issue.title).to eq('Awesome issue')
- expect(issue.assignees).to eq [assignee]
- expect(issue.labels).to match_array labels
- expect(issue.milestone).to eq milestone
- expect(issue.due_date).to eq Date.tomorrow
+ expect(issue.assignees).to eq([assignee])
+ expect(issue.labels).to match_array(labels)
+ expect(issue.milestone).to eq(milestone)
+ expect(issue.due_date).to eq(Date.tomorrow)
+ expect(issue.work_item_type.base_type).to eq('issue')
end
context 'when skip_system_notes is true' do
@@ -91,7 +92,11 @@ RSpec.describe Issues::CreateService do
end
it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do
- expect { issue }.to change { project.open_issues_count }.from(0).to(1)
+ expect do
+ issue
+
+ BatchLoader::Executor.clear_current
+ end.to change { project.open_issues_count }.from(0).to(1)
end
context 'when current user cannot set issue metadata in the project' do
diff --git a/spec/services/issues/relative_position_rebalancing_service_spec.rb b/spec/services/issues/relative_position_rebalancing_service_spec.rb
new file mode 100644
index 00000000000..d5d81770817
--- /dev/null
+++ b/spec/services/issues/relative_position_rebalancing_service_spec.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_shared_state do
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:user) { project.creator }
+ let_it_be(:start) { RelativePositioning::START_POSITION }
+ let_it_be(:max_pos) { RelativePositioning::MAX_POSITION }
+ let_it_be(:min_pos) { RelativePositioning::MIN_POSITION }
+ let_it_be(:clump_size) { 300 }
+
+ let_it_be(:unclumped, reload: true) do
+ (1..clump_size).to_a.map do |i|
+ create(:issue, project: project, author: user, relative_position: start + (1024 * i))
+ end
+ end
+
+ let_it_be(:end_clump, reload: true) do
+ (1..clump_size).to_a.map do |i|
+ create(:issue, project: project, author: user, relative_position: max_pos - i)
+ end
+ end
+
+ let_it_be(:start_clump, reload: true) do
+ (1..clump_size).to_a.map do |i|
+ create(:issue, project: project, author: user, relative_position: min_pos + i)
+ end
+ end
+
+ before do
+ stub_feature_flags(issue_rebalancing_with_retry: false)
+ end
+
+ def issues_in_position_order
+ project.reload.issues.reorder(relative_position: :asc).to_a
+ end
+
+ subject(:service) { described_class.new(Project.id_in(project)) }
+
+ context 'execute' do
+ it 're-balances a set of issues with clumps at the end and start' do
+ all_issues = start_clump + unclumped + end_clump.reverse
+
+ expect { service.execute }.not_to change { issues_in_position_order.map(&:id) }
+
+ all_issues.each(&:reset)
+
+ gaps = all_issues.take(all_issues.count - 1).zip(all_issues.drop(1)).map do |a, b|
+ b.relative_position - a.relative_position
+ end
+
+ expect(gaps).to all(be > RelativePositioning::MIN_GAP)
+ expect(all_issues.first.relative_position).to be > (RelativePositioning::MIN_POSITION * 0.9999)
+ expect(all_issues.last.relative_position).to be < (RelativePositioning::MAX_POSITION * 0.9999)
+ expect(project.root_namespace.issue_repositioning_disabled?).to be false
+ end
+
+ it 'is idempotent' do
+ expect do
+ service.execute
+ service.execute
+ end.not_to change { issues_in_position_order.map(&:id) }
+ end
+
+ it 'does nothing if the feature flag is disabled' do
+ stub_feature_flags(rebalance_issues: false)
+ issue = project.issues.first
+ issue.project
+ issue.project.group
+ old_pos = issue.relative_position
+
+ # fetching root namespace in the initializer triggers 2 queries:
+ # for fetching a random project from collection and fetching the root namespace.
+ expect { service.execute }.not_to exceed_query_limit(2)
+ expect(old_pos).to eq(issue.reload.relative_position)
+ end
+
+ it 'acts if the flag is enabled for the root namespace' do
+ issue = create(:issue, project: project, author: user, relative_position: max_pos)
+ stub_feature_flags(rebalance_issues: project.root_namespace)
+
+ expect { service.execute }.to change { issue.reload.relative_position }
+ end
+
+ it 'acts if the flag is enabled for the group' do
+ issue = create(:issue, project: project, author: user, relative_position: max_pos)
+ project.update!(group: create(:group))
+ stub_feature_flags(rebalance_issues: issue.project.group)
+
+ expect { service.execute }.to change { issue.reload.relative_position }
+ end
+
+ it 'aborts if there are too many rebalances running' do
+ caching = service.send(:caching)
+ allow(caching).to receive(:rebalance_in_progress?).and_return(false)
+ allow(caching).to receive(:concurrent_running_rebalances_count).and_return(10)
+ allow(service).to receive(:caching).and_return(caching)
+
+ expect { service.execute }.to raise_error(Issues::RelativePositionRebalancingService::TooManyConcurrentRebalances)
+ expect(project.root_namespace.issue_repositioning_disabled?).to be false
+ end
+
+ it 'resumes a started rebalance even if there are already too many rebalances running' do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.sadd("gitlab:issues-position-rebalances:running_rebalances", "#{::Gitlab::Issues::Rebalancing::State::PROJECT}/#{project.id}")
+ redis.sadd("gitlab:issues-position-rebalances:running_rebalances", "1/100")
+ end
+
+ caching = service.send(:caching)
+ allow(caching).to receive(:concurrent_running_rebalances_count).and_return(10)
+ allow(service).to receive(:caching).and_return(caching)
+
+ expect { service.execute }.not_to raise_error(Issues::RelativePositionRebalancingService::TooManyConcurrentRebalances)
+ end
+
+ context 're-balancing is retried on statement timeout exceptions' do
+ subject { service }
+
+ it 'retries update statement' do
+ call_count = 0
+ allow(subject).to receive(:run_update_query) do
+ call_count += 1
+ if call_count < 13
+ raise(ActiveRecord::QueryCanceled)
+ else
+ call_count = 0 if call_count == 13 + 16 # 16 = 17 sub-batches - 1 call that succeeded as part of 5th batch
+ true
+ end
+ end
+
+ # call math:
+ # batches start at 100 and are split in half after every 3 retries if ActiveRecord::StatementTimeout exception is raised.
+ # We raise ActiveRecord::StatementTimeout exception for 13 calls:
+ # 1. 100 => 3 calls
+ # 2. 100/2=50 => 3 calls + 3 above = 6 calls, raise ActiveRecord::StatementTimeout
+ # 3. 50/2=25 => 3 calls + 6 above = 9 calls, raise ActiveRecord::StatementTimeout
+ # 4. 25/2=12 => 3 calls + 9 above = 12 calls, raise ActiveRecord::StatementTimeout
+ # 5. 12/2=6 => 1 call + 12 above = 13 calls, run successfully
+ #
+ # so out of 100 elements we created batches of 6 items => 100/6 = 17 sub-batches of 6 or less elements
+ #
+ # project.issues.count: 900 issues, so 9 batches of 100 => 9 * (13+16) = 261
+ expect(subject).to receive(:update_positions).exactly(261).times.and_call_original
+
+ subject.execute
+ end
+ end
+
+ context 'when resuming a stopped rebalance' do
+ before do
+ service.send(:preload_issue_ids)
+ expect(service.send(:caching).get_cached_issue_ids(0, 300)).not_to be_empty
+ # simulate we already rebalanced half the issues
+ index = clump_size * 3 / 2 + 1
+ service.send(:caching).cache_current_index(index)
+ end
+
+ it 'rebalances the other half of issues' do
+ expect(subject).to receive(:update_positions_with_retry).exactly(5).and_call_original
+
+ subject.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/issues/reopen_service_spec.rb b/spec/services/issues/reopen_service_spec.rb
index d58c27289c2..86190c4e475 100644
--- a/spec/services/issues/reopen_service_spec.rb
+++ b/spec/services/issues/reopen_service_spec.rb
@@ -39,8 +39,11 @@ RSpec.describe Issues::ReopenService do
it 'refreshes the number of opened issues' do
service = described_class.new(project: project, current_user: user)
- expect { service.execute(issue) }
- .to change { project.open_issues_count }.from(0).to(1)
+ expect do
+ service.execute(issue)
+
+ BatchLoader::Executor.clear_current
+ end.to change { project.open_issues_count }.from(0).to(1)
end
it 'deletes milestone issue counters cache' do
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 29ac7df88eb..331cf291f21 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -146,8 +146,11 @@ RSpec.describe Issues::UpdateService, :mailer do
it 'refreshes the number of open issues when the issue is made confidential', :use_clean_rails_memory_store_caching do
issue # make sure the issue is created first so our counts are correct.
- expect { update_issue(confidential: true) }
- .to change { project.open_issues_count }.from(1).to(0)
+ expect do
+ update_issue(confidential: true)
+
+ BatchLoader::Executor.clear_current
+ end.to change { project.open_issues_count }.from(1).to(0)
end
it 'enqueues ConfidentialIssueWorker when an issue is made confidential' do
@@ -189,6 +192,14 @@ RSpec.describe Issues::UpdateService, :mailer do
expect(issue.labels.pluck(:title)).to eq(['incident'])
end
+ it 'creates system note about issue type' do
+ update_issue(issue_type: 'incident')
+
+ note = find_note('changed issue type to incident')
+
+ expect(note).not_to eq(nil)
+ end
+
context 'for an issue with multiple labels' do
let(:issue) { create(:incident, project: project, labels: [label_1]) }
@@ -217,15 +228,19 @@ RSpec.describe Issues::UpdateService, :mailer do
context 'from incident to issue' do
let(:issue) { create(:incident, project: project) }
+ it 'changed from an incident to an issue type' do
+ expect { update_issue(issue_type: 'issue') }
+ .to change(issue, :issue_type).from('incident').to('issue')
+ .and(change { issue.work_item_type.base_type }.from('incident').to('issue'))
+ end
+
context 'for an incident with multiple labels' do
let(:issue) { create(:incident, project: project, labels: [label_1, label_2]) }
- before do
- update_issue(issue_type: 'issue')
- end
-
it 'removes an `incident` label if one exists on the incident' do
- expect(issue.labels).to eq([label_2])
+ expect { update_issue(issue_type: 'issue') }.to change(issue, :label_ids)
+ .from(containing_exactly(label_1.id, label_2.id))
+ .to([label_2.id])
end
end
@@ -233,12 +248,10 @@ RSpec.describe Issues::UpdateService, :mailer do
let(:issue) { create(:incident, project: project, labels: [label_1, label_2]) }
let(:params) { { label_ids: [label_1.id, label_2.id], remove_label_ids: [] } }
- before do
- update_issue(issue_type: 'issue')
- end
-
it 'adds an incident label id to remove_label_ids for it to be removed' do
- expect(issue.label_ids).to contain_exactly(label_2.id)
+ expect { update_issue(issue_type: 'issue') }.to change(issue, :label_ids)
+ .from(containing_exactly(label_1.id, label_2.id))
+ .to([label_2.id])
end
end
end
diff --git a/spec/services/members/groups/bulk_creator_service_spec.rb b/spec/services/members/groups/bulk_creator_service_spec.rb
new file mode 100644
index 00000000000..0623ae00080
--- /dev/null
+++ b/spec/services/members/groups/bulk_creator_service_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::Groups::BulkCreatorService do
+ it_behaves_like 'bulk member creation' do
+ let_it_be(:source, reload: true) { create(:group, :public) }
+ let_it_be(:member_type) { GroupMember }
+ end
+end
diff --git a/spec/services/members/mailgun/process_webhook_service_spec.rb b/spec/services/members/mailgun/process_webhook_service_spec.rb
new file mode 100644
index 00000000000..d6a21183395
--- /dev/null
+++ b/spec/services/members/mailgun/process_webhook_service_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::Mailgun::ProcessWebhookService do
+ describe '#execute', :aggregate_failures do
+ let_it_be(:member) { create(:project_member, :invited) }
+
+ let(:raw_invite_token) { member.raw_invite_token }
+ let(:payload) { { 'user-variables' => { ::Members::Mailgun::INVITE_EMAIL_TOKEN_KEY => raw_invite_token } } }
+
+ subject(:service) { described_class.new(payload).execute }
+
+ it 'marks the member invite email success as false' do
+ expect(Gitlab::AppLogger).to receive(:info).with(/^UPDATED MEMBER INVITE_EMAIL_SUCCESS/).and_call_original
+
+ expect { service }.to change { member.reload.invite_email_success }.from(true).to(false)
+ end
+
+ context 'when member can not be found' do
+ let(:raw_invite_token) { '_foobar_' }
+
+ it 'does not change member status' do
+ expect(Gitlab::AppLogger).not_to receive(:info).with(/^UPDATED MEMBER INVITE_EMAIL_SUCCESS/)
+
+ expect { service }.not_to change { member.reload.invite_email_success }
+ end
+ end
+
+ context 'when invite token is not found in payload' do
+ let(:payload) { {} }
+
+ it 'does not change member status and logs an error' do
+ expect(Gitlab::AppLogger).not_to receive(:info).with(/^UPDATED MEMBER INVITE_EMAIL_SUCCESS/)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(described_class::ProcessWebhookServiceError))
+
+ expect { service }.not_to change { member.reload.invite_email_success }
+ end
+ end
+ end
+end
diff --git a/spec/services/members/projects/bulk_creator_service_spec.rb b/spec/services/members/projects/bulk_creator_service_spec.rb
new file mode 100644
index 00000000000..7acb7d79fe7
--- /dev/null
+++ b/spec/services/members/projects/bulk_creator_service_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::Projects::BulkCreatorService do
+ it_behaves_like 'bulk member creation' do
+ let_it_be(:source, reload: true) { create(:project, :public) }
+ let_it_be(:member_type) { ProjectMember }
+ end
+end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index b3af4d67896..e3f33304aab 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -402,21 +402,6 @@ RSpec.describe MergeRequests::MergeService do
expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
end
- it 'logs and saves error if there is a squash in progress' do
- error_message = 'another squash is already in progress'
-
- allow_any_instance_of(MergeRequest).to receive(:squash_in_progress?).and_return(true)
- merge_request.update!(squash: true)
-
- service.execute(merge_request)
-
- expect(merge_request).to be_open
- expect(merge_request.merge_commit_sha).to be_nil
- expect(merge_request.squash_commit_sha).to be_nil
- expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
- end
-
it 'logs and saves error if there is an PreReceiveError exception' do
error_message = 'error message'
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 8fc12c6c2b1..0a781aee704 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -37,34 +37,26 @@ RSpec.describe MergeRequests::MergeToRefService do
expect(ref_head.id).to eq(result[:commit_id])
end
- context 'cache_merge_to_ref_calls flag enabled', :use_clean_rails_memory_store_caching do
+ context 'cache_merge_to_ref_calls parameter', :use_clean_rails_memory_store_caching do
before do
- stub_feature_flags(cache_merge_to_ref_calls: true)
-
# warm the cache
#
- service.execute(merge_request)
- end
-
- it 'caches the response', :request_store do
- expect { 3.times { service.execute(merge_request) } }
- .not_to change(Gitlab::GitalyClient, :get_request_count)
+ service.execute(merge_request, true)
end
- end
-
- context 'cache_merge_to_ref_calls flag disabled', :use_clean_rails_memory_store_caching do
- before do
- stub_feature_flags(cache_merge_to_ref_calls: false)
- # warm the cache
- #
- service.execute(merge_request)
+ context 'when true' do
+ it 'caches the response', :request_store do
+ expect { 3.times { service.execute(merge_request, true) } }
+ .not_to change(Gitlab::GitalyClient, :get_request_count)
+ end
end
- it 'does not cache the response', :request_store do
- expect(Gitlab::GitalyClient).to receive(:call).at_least(3).times.and_call_original
+ context 'when false' do
+ it 'does not cache the response', :request_store do
+ expect(Gitlab::GitalyClient).to receive(:call).at_least(3).times.and_call_original
- 3.times { service.execute(merge_request) }
+ 3.times { service.execute(merge_request, false) }
+ end
end
end
end
diff --git a/spec/services/merge_requests/mergeability_check_service_spec.rb b/spec/services/merge_requests/mergeability_check_service_spec.rb
index 65599b7e046..4f7be0f5965 100644
--- a/spec/services/merge_requests/mergeability_check_service_spec.rb
+++ b/spec/services/merge_requests/mergeability_check_service_spec.rb
@@ -132,6 +132,15 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
it_behaves_like 'mergeable merge request'
+ it 'calls MergeToRefService with cache parameter' do
+ service = instance_double(MergeRequests::MergeToRefService)
+
+ expect(MergeRequests::MergeToRefService).to receive(:new).once { service }
+ expect(service).to receive(:execute).once.with(merge_request, true).and_return(success: true)
+
+ described_class.new(merge_request).execute(recheck: true)
+ end
+
context 'when concurrent calls' do
it 'waits first lock and returns "cached" result in subsequent calls' do
threads = execute_within_threads(amount: 3)
diff --git a/spec/services/merge_requests/squash_service_spec.rb b/spec/services/merge_requests/squash_service_spec.rb
index 149748cdabc..09f83624e05 100644
--- a/spec/services/merge_requests/squash_service_spec.rb
+++ b/spec/services/merge_requests/squash_service_spec.rb
@@ -194,23 +194,6 @@ RSpec.describe MergeRequests::SquashService do
expect(service.execute).to match(status: :error, message: a_string_including('squash'))
end
end
-
- context 'with an error in squash in progress check' do
- before do
- allow(repository).to receive(:squash_in_progress?)
- .and_raise(Gitlab::Git::Repository::GitError, error)
- end
-
- it 'logs the stage and output' do
- expect(service).to receive(:log_error).with(exception: an_instance_of(Gitlab::Git::Repository::GitError), message: 'Failed to check squash in progress')
-
- service.execute
- end
-
- it 'returns an error' do
- expect(service.execute).to match(status: :error, message: 'An error occurred while checking whether another squash is in progress.')
- end
- end
end
context 'when any other exception is thrown' do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 3c4d7d50002..a03f1f17b39 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -2623,7 +2623,7 @@ RSpec.describe NotificationService, :mailer do
let_it_be(:user) { create(:user) }
it 'sends the user an email' do
- notification.user_deactivated(user.name, user.notification_email)
+ notification.user_deactivated(user.name, user.notification_email_or_default)
should_only_email(user)
end
diff --git a/spec/services/packages/composer/version_parser_service_spec.rb b/spec/services/packages/composer/version_parser_service_spec.rb
index 1a2f653c042..69253ff934e 100644
--- a/spec/services/packages/composer/version_parser_service_spec.rb
+++ b/spec/services/packages/composer/version_parser_service_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Packages::Composer::VersionParserService do
where(:tagname, :branchname, :expected_version) do
nil | 'master' | 'dev-master'
nil | 'my-feature' | 'dev-my-feature'
+ nil | '12-feature' | 'dev-12-feature'
nil | 'v1' | '1.x-dev'
nil | 'v1.x' | '1.x-dev'
nil | 'v1.7.x' | '1.7.x-dev'
diff --git a/spec/services/packages/generic/create_package_file_service_spec.rb b/spec/services/packages/generic/create_package_file_service_spec.rb
index 1c9eb53cfc7..9d6784b7721 100644
--- a/spec/services/packages/generic/create_package_file_service_spec.rb
+++ b/spec/services/packages/generic/create_package_file_service_spec.rb
@@ -105,6 +105,37 @@ RSpec.describe Packages::Generic::CreatePackageFileService do
it { expect { execute_service }.to change { project.package_files.count }.by(1) }
end
end
+
+ context 'with multiple files for the same package and the same pipeline' do
+ let(:file_2_params) { params.merge(file_name: 'myfile.tar.gz.2', file: file2) }
+ let(:file_3_params) { params.merge(file_name: 'myfile.tar.gz.3', file: file3) }
+
+ let(:temp_file2) { Tempfile.new("test2") }
+ let(:temp_file3) { Tempfile.new("test3") }
+
+ let(:file2) { UploadedFile.new(temp_file2.path, sha256: sha256) }
+ let(:file3) { UploadedFile.new(temp_file3.path, sha256: sha256) }
+
+ before do
+ FileUtils.touch(temp_file2)
+ FileUtils.touch(temp_file3)
+ expect(::Packages::Generic::FindOrCreatePackageService).to receive(:new).with(project, user, package_params).and_return(package_service).twice
+ expect(package_service).to receive(:execute).and_return(package).twice
+ end
+
+ after do
+ FileUtils.rm_f(temp_file2)
+ FileUtils.rm_f(temp_file3)
+ end
+
+ it 'creates the build info only once' do
+ expect do
+ described_class.new(project, user, params).execute
+ described_class.new(project, user, file_2_params).execute
+ described_class.new(project, user, file_3_params).execute
+ end.to change { package.build_infos.count }.by(1)
+ end
+ end
end
end
end
diff --git a/spec/services/packages/maven/find_or_create_package_service_spec.rb b/spec/services/packages/maven/find_or_create_package_service_spec.rb
index d8b48af0121..59f5677f526 100644
--- a/spec/services/packages/maven/find_or_create_package_service_spec.rb
+++ b/spec/services/packages/maven/find_or_create_package_service_spec.rb
@@ -98,6 +98,19 @@ RSpec.describe Packages::Maven::FindOrCreatePackageService do
it 'creates a build_info' do
expect { subject }.to change { Packages::BuildInfo.count }.by(1)
end
+
+ context 'with multiple files for the same package and the same pipeline' do
+ let(:file_2_params) { params.merge(file_name: 'test2.jar') }
+ let(:file_3_params) { params.merge(file_name: 'test3.jar') }
+
+ it 'creates a single build info' do
+ expect do
+ described_class.new(project, user, params).execute
+ described_class.new(project, user, file_2_params).execute
+ described_class.new(project, user, file_3_params).execute
+ end.to change { ::Packages::BuildInfo.count }.by(1)
+ end
+ end
end
context 'when package duplicates are not allowed' do
diff --git a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
index 66ff6a8d03f..d682ee12ed5 100644
--- a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
+++ b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
- let(:package) { create(:nuget_package, :processing, :with_symbol_package) }
+ let!(:package) { create(:nuget_package, :processing, :with_symbol_package) }
let(:package_file) { package.package_files.first }
let(:service) { described_class.new(package_file) }
let(:package_name) { 'DummyProject.DummyPackage' }
@@ -63,234 +63,213 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
end
end
- shared_examples 'handling all conditions' do
- context 'with no existing package' do
- let(:package_id) { package.id }
+ context 'with no existing package' do
+ let(:package_id) { package.id }
+
+ it 'updates package and package file', :aggregate_failures do
+ expect { subject }
+ .to not_change { ::Packages::Package.count }
+ .and change { Packages::Dependency.count }.by(1)
+ .and change { Packages::DependencyLink.count }.by(1)
+ .and change { ::Packages::Nuget::Metadatum.count }.by(0)
+
+ expect(package.reload.name).to eq(package_name)
+ expect(package.version).to eq(package_version)
+ expect(package).to be_default
+ expect(package_file.reload.file_name).to eq(package_file_name)
+ # hard reset needed to properly reload package_file.file
+ expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
+ end
- it 'updates package and package file', :aggregate_failures do
- expect { subject }
- .to not_change { ::Packages::Package.count }
- .and change { Packages::Dependency.count }.by(1)
- .and change { Packages::DependencyLink.count }.by(1)
- .and change { ::Packages::Nuget::Metadatum.count }.by(0)
+ it_behaves_like 'taking the lease'
- expect(package.reload.name).to eq(package_name)
- expect(package.version).to eq(package_version)
- expect(package).to be_default
- expect(package_file.reload.file_name).to eq(package_file_name)
- # hard reset needed to properly reload package_file.file
- expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
- end
+ it_behaves_like 'not updating the package if the lease is taken'
+ end
- it_behaves_like 'taking the lease'
+ context 'with existing package' do
+ let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
+ let(:package_id) { existing_package.id }
- it_behaves_like 'not updating the package if the lease is taken'
- end
+ it 'link existing package and updates package file', :aggregate_failures do
+ expect(service).to receive(:try_obtain_lease).and_call_original
- context 'with existing package' do
- let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
- let(:package_id) { existing_package.id }
+ expect { subject }
+ .to change { ::Packages::Package.count }.by(-1)
+ .and change { Packages::Dependency.count }.by(0)
+ .and change { Packages::DependencyLink.count }.by(0)
+ .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
+ .and change { ::Packages::Nuget::Metadatum.count }.by(0)
+ expect(package_file.reload.file_name).to eq(package_file_name)
+ expect(package_file.package).to eq(existing_package)
+ end
- it 'link existing package and updates package file', :aggregate_failures do
- expect(service).to receive(:try_obtain_lease).and_call_original
+ it_behaves_like 'taking the lease'
- expect { subject }
- .to change { ::Packages::Package.count }.by(-1)
- .and change { Packages::Dependency.count }.by(0)
- .and change { Packages::DependencyLink.count }.by(0)
- .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
- .and change { ::Packages::Nuget::Metadatum.count }.by(0)
- expect(package_file.reload.file_name).to eq(package_file_name)
- expect(package_file.package).to eq(existing_package)
- end
+ it_behaves_like 'not updating the package if the lease is taken'
+ end
- it_behaves_like 'taking the lease'
+ context 'with a nuspec file with metadata' do
+ let(:nuspec_filepath) { 'packages/nuget/with_metadata.nuspec' }
+ let(:expected_tags) { %w(foo bar test tag1 tag2 tag3 tag4 tag5) }
- it_behaves_like 'not updating the package if the lease is taken'
+ before do
+ allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
+ allow(service)
+ .to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
+ end
end
- context 'with a nuspec file with metadata' do
- let(:nuspec_filepath) { 'packages/nuget/with_metadata.nuspec' }
- let(:expected_tags) { %w(foo bar test tag1 tag2 tag3 tag4 tag5) }
+ it 'creates tags' do
+ expect(service).to receive(:try_obtain_lease).and_call_original
+ expect { subject }.to change { ::Packages::Tag.count }.by(8)
+ expect(package.reload.tags.map(&:name)).to contain_exactly(*expected_tags)
+ end
- before do
- allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
- allow(service)
- .to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
- end
- end
+ context 'with existing package and tags' do
+ let!(:existing_package) { create(:nuget_package, project: package.project, name: 'DummyProject.WithMetadata', version: '1.2.3') }
+ let!(:tag1) { create(:packages_tag, package: existing_package, name: 'tag1') }
+ let!(:tag2) { create(:packages_tag, package: existing_package, name: 'tag2') }
+ let!(:tag3) { create(:packages_tag, package: existing_package, name: 'tag_not_in_metadata') }
- it 'creates tags' do
+ it 'creates tags and deletes those not in metadata' do
expect(service).to receive(:try_obtain_lease).and_call_original
- expect { subject }.to change { ::Packages::Tag.count }.by(8)
- expect(package.reload.tags.map(&:name)).to contain_exactly(*expected_tags)
+ expect { subject }.to change { ::Packages::Tag.count }.by(5)
+ expect(existing_package.tags.map(&:name)).to contain_exactly(*expected_tags)
end
+ end
- context 'with existing package and tags' do
- let!(:existing_package) { create(:nuget_package, project: package.project, name: 'DummyProject.WithMetadata', version: '1.2.3') }
- let!(:tag1) { create(:packages_tag, package: existing_package, name: 'tag1') }
- let!(:tag2) { create(:packages_tag, package: existing_package, name: 'tag2') }
- let!(:tag3) { create(:packages_tag, package: existing_package, name: 'tag_not_in_metadata') }
-
- it 'creates tags and deletes those not in metadata' do
- expect(service).to receive(:try_obtain_lease).and_call_original
- expect { subject }.to change { ::Packages::Tag.count }.by(5)
- expect(existing_package.tags.map(&:name)).to contain_exactly(*expected_tags)
- end
- end
-
- it 'creates nuget metadatum', :aggregate_failures do
- expect { subject }
- .to not_change { ::Packages::Package.count }
- .and change { ::Packages::Nuget::Metadatum.count }.by(1)
-
- metadatum = package_file.reload.package.nuget_metadatum
- expect(metadatum.license_url).to eq('https://opensource.org/licenses/MIT')
- expect(metadatum.project_url).to eq('https://gitlab.com/gitlab-org/gitlab')
- expect(metadatum.icon_url).to eq('https://opensource.org/files/osi_keyhole_300X300_90ppi_0.png')
- end
+ it 'creates nuget metadatum', :aggregate_failures do
+ expect { subject }
+ .to not_change { ::Packages::Package.count }
+ .and change { ::Packages::Nuget::Metadatum.count }.by(1)
- context 'with too long url' do
- let_it_be(:too_long_url) { "http://localhost/#{'bananas' * 50}" }
+ metadatum = package_file.reload.package.nuget_metadatum
+ expect(metadatum.license_url).to eq('https://opensource.org/licenses/MIT')
+ expect(metadatum.project_url).to eq('https://gitlab.com/gitlab-org/gitlab')
+ expect(metadatum.icon_url).to eq('https://opensource.org/files/osi_keyhole_300X300_90ppi_0.png')
+ end
- let(:metadata) { { package_name: package_name, package_version: package_version, license_url: too_long_url } }
+ context 'with too long url' do
+ let_it_be(:too_long_url) { "http://localhost/#{'bananas' * 50}" }
- before do
- allow(service).to receive(:metadata).and_return(metadata)
- end
+ let(:metadata) { { package_name: package_name, package_version: package_version, license_url: too_long_url } }
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ before do
+ allow(service).to receive(:metadata).and_return(metadata)
end
+
+ it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
end
+ end
- context 'with nuspec file with dependencies' do
- let(:nuspec_filepath) { 'packages/nuget/with_dependencies.nuspec' }
- let(:package_name) { 'Test.Package' }
- let(:package_version) { '3.5.2' }
- let(:package_file_name) { 'test.package.3.5.2.nupkg' }
+ context 'with nuspec file with dependencies' do
+ let(:nuspec_filepath) { 'packages/nuget/with_dependencies.nuspec' }
+ let(:package_name) { 'Test.Package' }
+ let(:package_version) { '3.5.2' }
+ let(:package_file_name) { 'test.package.3.5.2.nupkg' }
- before do
- allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
- allow(service)
- .to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
- end
+ before do
+ allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
+ allow(service)
+ .to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
end
+ end
- it 'updates package and package file', :aggregate_failures do
- expect { subject }
- .to not_change { ::Packages::Package.count }
- .and change { Packages::Dependency.count }.by(4)
- .and change { Packages::DependencyLink.count }.by(4)
- .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(2)
-
- expect(package.reload.name).to eq(package_name)
- expect(package.version).to eq(package_version)
- expect(package).to be_default
- expect(package_file.reload.file_name).to eq(package_file_name)
- # hard reset needed to properly reload package_file.file
- expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
- end
+ it 'updates package and package file', :aggregate_failures do
+ expect { subject }
+ .to not_change { ::Packages::Package.count }
+ .and change { Packages::Dependency.count }.by(4)
+ .and change { Packages::DependencyLink.count }.by(4)
+ .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(2)
+
+ expect(package.reload.name).to eq(package_name)
+ expect(package.version).to eq(package_version)
+ expect(package).to be_default
+ expect(package_file.reload.file_name).to eq(package_file_name)
+ # hard reset needed to properly reload package_file.file
+ expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
end
+ end
- context 'with package file not containing a nuspec file' do
- before do
- allow_next_instance_of(Zip::File) do |file|
- allow(file).to receive(:glob).and_return([])
- end
+ context 'with package file not containing a nuspec file' do
+ before do
+ allow_next_instance_of(Zip::File) do |file|
+ allow(file).to receive(:glob).and_return([])
end
-
- it_behaves_like 'raising an', ::Packages::Nuget::MetadataExtractionService::ExtractionError
end
- context 'with a symbol package' do
- let(:package_file) { package.package_files.last }
- let(:package_file_name) { 'dummyproject.dummypackage.1.0.0.snupkg' }
+ it_behaves_like 'raising an', ::Packages::Nuget::MetadataExtractionService::ExtractionError
+ end
- context 'with no existing package' do
- let(:package_id) { package.id }
+ context 'with a symbol package' do
+ let(:package_file) { package.package_files.last }
+ let(:package_file_name) { 'dummyproject.dummypackage.1.0.0.snupkg' }
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
- end
-
- context 'with existing package' do
- let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
- let(:package_id) { existing_package.id }
+ context 'with no existing package' do
+ let(:package_id) { package.id }
- it 'link existing package and updates package file', :aggregate_failures do
- expect(service).to receive(:try_obtain_lease).and_call_original
- expect(::Packages::Nuget::SyncMetadatumService).not_to receive(:new)
- expect(::Packages::UpdateTagsService).not_to receive(:new)
+ it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ end
- expect { subject }
- .to change { ::Packages::Package.count }.by(-1)
- .and change { Packages::Dependency.count }.by(0)
- .and change { Packages::DependencyLink.count }.by(0)
- .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
- .and change { ::Packages::Nuget::Metadatum.count }.by(0)
- expect(package_file.reload.file_name).to eq(package_file_name)
- expect(package_file.package).to eq(existing_package)
- end
+ context 'with existing package' do
+ let!(:existing_package) { create(:nuget_package, project: package.project, name: package_name, version: package_version) }
+ let(:package_id) { existing_package.id }
- it_behaves_like 'taking the lease'
+ it 'link existing package and updates package file', :aggregate_failures do
+ expect(service).to receive(:try_obtain_lease).and_call_original
+ expect(::Packages::Nuget::SyncMetadatumService).not_to receive(:new)
+ expect(::Packages::UpdateTagsService).not_to receive(:new)
- it_behaves_like 'not updating the package if the lease is taken'
+ expect { subject }
+ .to change { ::Packages::Package.count }.by(-1)
+ .and change { Packages::Dependency.count }.by(0)
+ .and change { Packages::DependencyLink.count }.by(0)
+ .and change { Packages::Nuget::DependencyLinkMetadatum.count }.by(0)
+ .and change { ::Packages::Nuget::Metadatum.count }.by(0)
+ expect(package_file.reload.file_name).to eq(package_file_name)
+ expect(package_file.package).to eq(existing_package)
end
- end
-
- context 'with an invalid package name' do
- invalid_names = [
- '',
- 'My/package',
- '../../../my_package',
- '%2e%2e%2fmy_package'
- ]
- invalid_names.each do |invalid_name|
- before do
- allow(service).to receive(:package_name).and_return(invalid_name)
- end
+ it_behaves_like 'taking the lease'
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
- end
+ it_behaves_like 'not updating the package if the lease is taken'
end
+ end
- context 'with an invalid package version' do
- invalid_versions = [
- '',
- '555',
- '1.2',
- '1./2.3',
- '../../../../../1.2.3',
- '%2e%2e%2f1.2.3'
- ]
-
- invalid_versions.each do |invalid_version|
- before do
- allow(service).to receive(:package_version).and_return(invalid_version)
- end
-
- it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ context 'with an invalid package name' do
+ invalid_names = [
+ '',
+ 'My/package',
+ '../../../my_package',
+ '%2e%2e%2fmy_package'
+ ]
+
+ invalid_names.each do |invalid_name|
+ before do
+ allow(service).to receive(:package_name).and_return(invalid_name)
end
- end
- end
- context 'with packages_nuget_new_package_file_updater enabled' do
- before do
- expect(service).not_to receive(:legacy_execute)
+ it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
end
-
- it_behaves_like 'handling all conditions'
end
- context 'with packages_nuget_new_package_file_updater disabled' do
- before do
- stub_feature_flags(packages_nuget_new_package_file_updater: false)
- expect(::Packages::UpdatePackageFileService)
- .not_to receive(:new).with(package_file, instance_of(Hash)).and_call_original
- expect(service).not_to receive(:new_execute)
- end
+ context 'with an invalid package version' do
+ invalid_versions = [
+ '',
+ '555',
+ '1.2',
+ '1./2.3',
+ '../../../../../1.2.3',
+ '%2e%2e%2f1.2.3'
+ ]
+
+ invalid_versions.each do |invalid_version|
+ before do
+ allow(service).to receive(:package_version).and_return(invalid_version)
+ end
- it_behaves_like 'handling all conditions'
+ it_behaves_like 'raising an', ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError
+ end
end
end
end
diff --git a/spec/services/pages/delete_service_spec.rb b/spec/services/pages/delete_service_spec.rb
index 295abe15bf0..e02e8e72e0b 100644
--- a/spec/services/pages/delete_service_spec.rb
+++ b/spec/services/pages/delete_service_spec.rb
@@ -12,27 +12,6 @@ RSpec.describe Pages::DeleteService do
project.mark_pages_as_deployed
end
- it 'deletes published pages', :sidekiq_inline do
- expect_next_instance_of(Gitlab::PagesTransfer) do |pages_transfer|
- expect(pages_transfer).to receive(:rename_project).and_return true
- end
-
- expect(PagesWorker).to receive(:perform_in).with(5.minutes, :remove, project.namespace.full_path, anything)
-
- service.execute
- end
-
- it "doesn't remove anything from the legacy storage if local_store is disabled", :sidekiq_inline do
- allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
-
- expect(project.pages_deployed?).to be(true)
- expect(PagesWorker).not_to receive(:perform_in)
-
- service.execute
-
- expect(project.pages_deployed?).to be(false)
- end
-
it 'marks pages as not deployed' do
expect do
service.execute
diff --git a/spec/services/pages/legacy_storage_lease_spec.rb b/spec/services/pages/legacy_storage_lease_spec.rb
deleted file mode 100644
index 092dce093ff..00000000000
--- a/spec/services/pages/legacy_storage_lease_spec.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Pages::LegacyStorageLease do
- let(:project) { create(:project) }
-
- let(:implementation) do
- Class.new do
- include ::Pages::LegacyStorageLease
-
- attr_reader :project
-
- def initialize(project)
- @project = project
- end
-
- def execute
- try_obtain_lease do
- execute_unsafe
- end
- end
-
- def execute_unsafe
- true
- end
- end
- end
-
- let(:service) { implementation.new(project) }
-
- it 'allows method to be executed' do
- expect(service).to receive(:execute_unsafe).and_call_original
-
- expect(service.execute).to eq(true)
- end
-
- context 'when another service holds the lease for the same project' do
- around do |example|
- implementation.new(project).try_obtain_lease do
- example.run
- end
- end
-
- it 'does not run guarded method' do
- expect(service).not_to receive(:execute_unsafe)
-
- expect(service.execute).to eq(nil)
- end
- end
-
- context 'when another service holds the lease for the different project' do
- around do |example|
- implementation.new(create(:project)).try_obtain_lease do
- example.run
- end
- end
-
- it 'allows method to be executed' do
- expect(service).to receive(:execute_unsafe).and_call_original
-
- expect(service.execute).to eq(true)
- end
- end
-end
diff --git a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
index 25f571a73d1..177467aac85 100644
--- a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
+++ b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
@@ -114,13 +114,5 @@ RSpec.describe Pages::MigrateLegacyStorageToDeploymentService do
described_class.new(project).execute
end.not_to change { project.pages_metadatum.reload.pages_deployment_id }.from(old_deployment.id)
end
-
- it 'raises exception if exclusive lease is taken' do
- described_class.new(project).try_obtain_lease do
- expect do
- described_class.new(project).execute
- end.to raise_error(described_class::ExclusiveLeaseTakenError)
- end
- end
end
end
diff --git a/spec/services/projects/batch_open_issues_count_service_spec.rb b/spec/services/projects/batch_open_issues_count_service_spec.rb
index 82d50604309..17bd5f7a37b 100644
--- a/spec/services/projects/batch_open_issues_count_service_spec.rb
+++ b/spec/services/projects/batch_open_issues_count_service_spec.rb
@@ -5,52 +5,49 @@ require 'spec_helper'
RSpec.describe Projects::BatchOpenIssuesCountService do
let!(:project_1) { create(:project) }
let!(:project_2) { create(:project) }
+ let!(:banned_user) { create(:user, :banned) }
let(:subject) { described_class.new([project_1, project_2]) }
- describe '#refresh_cache', :use_clean_rails_memory_store_caching do
+ describe '#refresh_cache_and_retrieve_data', :use_clean_rails_memory_store_caching do
before do
create(:issue, project: project_1)
create(:issue, project: project_1, confidential: true)
-
+ create(:issue, project: project_1, author: banned_user)
create(:issue, project: project_2)
create(:issue, project: project_2, confidential: true)
+ create(:issue, project: project_2, author: banned_user)
end
- context 'when cache is clean' do
+ context 'when cache is clean', :aggregate_failures do
it 'refreshes cache keys correctly' do
- subject.refresh_cache
+ expect(get_cache_key(project_1)).to eq(nil)
+ expect(get_cache_key(project_2)).to eq(nil)
- # It does not update total issues cache
- expect(Rails.cache.read(get_cache_key(subject, project_1))).to eq(nil)
- expect(Rails.cache.read(get_cache_key(subject, project_2))).to eq(nil)
+ subject.count_service.new(project_1).refresh_cache
+ subject.count_service.new(project_2).refresh_cache
- expect(Rails.cache.read(get_cache_key(subject, project_1, true))).to eq(1)
- expect(Rails.cache.read(get_cache_key(subject, project_1, true))).to eq(1)
- end
- end
-
- context 'when issues count is already cached' do
- before do
- create(:issue, project: project_2)
- subject.refresh_cache
- end
+ expect(get_cache_key(project_1)).to eq(1)
+ expect(get_cache_key(project_2)).to eq(1)
- it 'does update cache again' do
- expect(Rails.cache).not_to receive(:write)
+ expect(get_cache_key(project_1, true)).to eq(2)
+ expect(get_cache_key(project_2, true)).to eq(2)
- subject.refresh_cache
+ expect(get_cache_key(project_1, true, true)).to eq(3)
+ expect(get_cache_key(project_2, true, true)).to eq(3)
end
end
end
- def get_cache_key(subject, project, public_key = false)
+ def get_cache_key(project, with_confidential = false, with_hidden = false)
service = subject.count_service.new(project)
- if public_key
- service.cache_key(service.class::PUBLIC_COUNT_KEY)
+ if with_confidential && with_hidden
+ Rails.cache.read(service.cache_key(service.class::TOTAL_COUNT_KEY))
+ elsif with_confidential
+ Rails.cache.read(service.cache_key(service.class::TOTAL_COUNT_WITHOUT_HIDDEN_KEY))
else
- service.cache_key(service.class::TOTAL_COUNT_KEY)
+ Rails.cache.read(service.cache_key(service.class::PUBLIC_COUNT_WITHOUT_HIDDEN_KEY))
end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index c3928563125..e15d9341fd1 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe Projects::CreateService, '#execute' do
subject(:project) { create_project(user, opts) }
context "with 'topics' parameter" do
- let(:opts) { { topics: 'topics' } }
+ let(:opts) { { name: 'topic-project', topics: 'topics' } }
it 'keeps them as specified' do
expect(project.topic_list).to eq(%w[topics])
@@ -94,7 +94,7 @@ RSpec.describe Projects::CreateService, '#execute' do
end
context "with 'topic_list' parameter" do
- let(:opts) { { topic_list: 'topic_list' } }
+ let(:opts) { { name: 'topic-project', topic_list: 'topic_list' } }
it 'keeps them as specified' do
expect(project.topic_list).to eq(%w[topic_list])
@@ -102,7 +102,7 @@ RSpec.describe Projects::CreateService, '#execute' do
end
context "with 'tag_list' parameter (deprecated)" do
- let(:opts) { { tag_list: 'tag_list' } }
+ let(:opts) { { name: 'topic-project', tag_list: 'tag_list' } }
it 'keeps them as specified' do
expect(project.topic_list).to eq(%w[tag_list])
@@ -346,6 +346,12 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(imported_project.import_data.data).to eq(import_data[:data])
expect(imported_project.import_url).to eq('http://import-url')
end
+
+ it 'tracks for the combined_registration experiment', :experiment do
+ expect(experiment(:combined_registration)).to track(:import_project).on_next_instance
+
+ imported_project
+ end
end
context 'builds_enabled global setting' do
@@ -601,6 +607,18 @@ RSpec.describe Projects::CreateService, '#execute' do
MARKDOWN
end
end
+
+ context 'and readme_template is specified' do
+ before do
+ opts[:readme_template] = "# GitLab\nThis is customized template."
+ end
+
+ it_behaves_like 'creates README.md'
+
+ it 'creates README.md with specified template' do
+ expect(project.repository.readme.data).to include('This is customized template.')
+ end
+ end
end
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index d710e4a777f..3f58fa46806 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -28,7 +28,8 @@ RSpec.describe Projects::ForkService do
namespace: @from_namespace,
star_count: 107,
avatar: avatar,
- description: 'wow such project')
+ description: 'wow such project',
+ external_authorization_classification_label: 'classification-label')
@to_user = create(:user)
@to_namespace = @to_user.namespace
@from_project.add_user(@to_user, :developer)
@@ -66,6 +67,7 @@ RSpec.describe Projects::ForkService do
it { expect(to_project.description).to eq(@from_project.description) }
it { expect(to_project.avatar.file).to be_exists }
it { expect(to_project.ci_config_path).to eq(@from_project.ci_config_path) }
+ it { expect(to_project.external_authorization_classification_label).to eq(@from_project.external_authorization_classification_label) }
# This test is here because we had a bug where the from-project lost its
# avatar after being forked.
diff --git a/spec/services/projects/group_links/destroy_service_spec.rb b/spec/services/projects/group_links/destroy_service_spec.rb
index d65afb68bfe..5d07fd52230 100644
--- a/spec/services/projects/group_links/destroy_service_spec.rb
+++ b/spec/services/projects/group_links/destroy_service_spec.rb
@@ -20,54 +20,28 @@ RSpec.describe Projects::GroupLinks::DestroyService, '#execute' do
group.add_maintainer(user)
end
- context 'when the feature flag `use_specialized_worker_for_project_auth_recalculation` is enabled' do
- before do
- stub_feature_flags(use_specialized_worker_for_project_auth_recalculation: true)
- end
-
- it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
- expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
- .to receive(:perform_async).with(group_link.project.id)
-
- subject.execute(group_link)
- end
-
- it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
- expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
- receive(:bulk_perform_in)
- .with(1.hour,
- [[user.id]],
- batch_delay: 30.seconds, batch_size: 100)
- )
-
- subject.execute(group_link)
- end
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(group_link.project.id)
- it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
- expect { subject.execute(group_link) }.to(
- change { Ability.allowed?(user, :read_project, project) }
- .from(true).to(false))
- end
+ subject.execute(group_link)
end
- context 'when the feature flag `use_specialized_worker_for_project_auth_recalculation` is disabled' do
- before do
- stub_feature_flags(use_specialized_worker_for_project_auth_recalculation: false)
- end
-
- it 'calls UserProjectAccessChangedService to update project authorizations' do
- expect_next_instance_of(UserProjectAccessChangedService, [user.id]) do |service|
- expect(service).to receive(:execute)
- end
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
- subject.execute(group_link)
- end
+ subject.execute(group_link)
+ end
- it 'updates project authorizations of users who had access to the project via the group share' do
- expect { subject.execute(group_link) }.to(
- change { Ability.allowed?(user, :read_project, project) }
- .from(true).to(false))
- end
+ it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
+ expect { subject.execute(group_link) }.to(
+ change { Ability.allowed?(user, :read_project, project) }
+ .from(true).to(false))
end
end
diff --git a/spec/services/projects/open_issues_count_service_spec.rb b/spec/services/projects/open_issues_count_service_spec.rb
index c739fea5ecf..8710d0c0267 100644
--- a/spec/services/projects/open_issues_count_service_spec.rb
+++ b/spec/services/projects/open_issues_count_service_spec.rb
@@ -4,89 +4,102 @@ require 'spec_helper'
RSpec.describe Projects::OpenIssuesCountService, :use_clean_rails_memory_store_caching do
let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:banned_user) { create(:user, :banned) }
- subject { described_class.new(project) }
+ subject { described_class.new(project, user) }
it_behaves_like 'a counter caching service'
+ before do
+ create(:issue, :opened, project: project)
+ create(:issue, :opened, confidential: true, project: project)
+ create(:issue, :opened, author: banned_user, project: project)
+ create(:issue, :closed, project: project)
+
+ described_class.new(project).refresh_cache
+ end
+
describe '#count' do
- context 'when user is nil' do
- it 'does not include confidential issues in the issue count' do
- create(:issue, :opened, project: project)
- create(:issue, :opened, confidential: true, project: project)
+ shared_examples 'counts public issues, does not count hidden or confidential' do
+ it 'counts only public issues' do
+ expect(subject.count).to eq(1)
+ end
- expect(described_class.new(project).count).to eq(1)
+ it 'uses PUBLIC_COUNT_WITHOUT_HIDDEN_KEY cache key' do
+ expect(subject.cache_key).to include('project_open_public_issues_without_hidden_count')
end
end
- context 'when user is provided' do
- let(:user) { create(:user) }
+ context 'when user is nil' do
+ let(:user) { nil }
+
+ it_behaves_like 'counts public issues, does not count hidden or confidential'
+ end
+ context 'when user is provided' do
context 'when user can read confidential issues' do
before do
project.add_reporter(user)
end
- it 'returns the right count with confidential issues' do
- create(:issue, :opened, project: project)
- create(:issue, :opened, confidential: true, project: project)
-
- expect(described_class.new(project, user).count).to eq(2)
+ it 'includes confidential issues and does not include hidden issues in count' do
+ expect(subject.count).to eq(2)
end
- it 'uses total_open_issues_count cache key' do
- expect(described_class.new(project, user).cache_key_name).to eq('total_open_issues_count')
+ it 'uses TOTAL_COUNT_WITHOUT_HIDDEN_KEY cache key' do
+ expect(subject.cache_key).to include('project_open_issues_without_hidden_count')
end
end
- context 'when user cannot read confidential issues' do
+ context 'when user cannot read confidential or hidden issues' do
before do
project.add_guest(user)
end
- it 'does not include confidential issues' do
- create(:issue, :opened, project: project)
- create(:issue, :opened, confidential: true, project: project)
+ it_behaves_like 'counts public issues, does not count hidden or confidential'
+ end
+
+ context 'when user is an admin' do
+ let_it_be(:user) { create(:user, :admin) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'includes confidential and hidden issues in count' do
+ expect(subject.count).to eq(3)
+ end
- expect(described_class.new(project, user).count).to eq(1)
+ it 'uses TOTAL_COUNT_KEY cache key' do
+ expect(subject.cache_key).to include('project_open_issues_including_hidden_count')
+ end
end
- it 'uses public_open_issues_count cache key' do
- expect(described_class.new(project, user).cache_key_name).to eq('public_open_issues_count')
+ context 'when admin mode is disabled' do
+ it_behaves_like 'counts public issues, does not count hidden or confidential'
end
end
end
+ end
- describe '#refresh_cache' do
- before do
- create(:issue, :opened, project: project)
- create(:issue, :opened, project: project)
- create(:issue, :opened, confidential: true, project: project)
- end
-
- context 'when cache is empty' do
- it 'refreshes cache keys correctly' do
- subject.refresh_cache
-
- expect(Rails.cache.read(subject.cache_key(described_class::PUBLIC_COUNT_KEY))).to eq(2)
- expect(Rails.cache.read(subject.cache_key(described_class::TOTAL_COUNT_KEY))).to eq(3)
- end
+ describe '#refresh_cache', :aggregate_failures do
+ context 'when cache is empty' do
+ it 'refreshes cache keys correctly' do
+ expect(Rails.cache.read(described_class.new(project).cache_key(described_class::PUBLIC_COUNT_WITHOUT_HIDDEN_KEY))).to eq(1)
+ expect(Rails.cache.read(described_class.new(project).cache_key(described_class::TOTAL_COUNT_WITHOUT_HIDDEN_KEY))).to eq(2)
+ expect(Rails.cache.read(described_class.new(project).cache_key(described_class::TOTAL_COUNT_KEY))).to eq(3)
end
+ end
- context 'when cache is outdated' do
- before do
- subject.refresh_cache
- end
-
- it 'refreshes cache keys correctly' do
- create(:issue, :opened, project: project)
- create(:issue, :opened, confidential: true, project: project)
+ context 'when cache is outdated' do
+ it 'refreshes cache keys correctly' do
+ create(:issue, :opened, project: project)
+ create(:issue, :opened, confidential: true, project: project)
+ create(:issue, :opened, author: banned_user, project: project)
- subject.refresh_cache
+ described_class.new(project).refresh_cache
- expect(Rails.cache.read(subject.cache_key(described_class::PUBLIC_COUNT_KEY))).to eq(3)
- expect(Rails.cache.read(subject.cache_key(described_class::TOTAL_COUNT_KEY))).to eq(5)
- end
+ expect(Rails.cache.read(described_class.new(project).cache_key(described_class::PUBLIC_COUNT_WITHOUT_HIDDEN_KEY))).to eq(2)
+ expect(Rails.cache.read(described_class.new(project).cache_key(described_class::TOTAL_COUNT_WITHOUT_HIDDEN_KEY))).to eq(4)
+ expect(Rails.cache.read(described_class.new(project).cache_key(described_class::TOTAL_COUNT_KEY))).to eq(6)
end
end
end
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index 1d9d5f6e938..a71fafb2121 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -153,6 +153,7 @@ RSpec.describe Projects::Operations::UpdateService do
{
error_tracking_setting_attributes: {
enabled: false,
+ integrated: true,
api_host: 'http://gitlab.com/',
token: 'token',
project: {
@@ -174,6 +175,7 @@ RSpec.describe Projects::Operations::UpdateService do
project.reload
expect(project.error_tracking_setting).not_to be_enabled
+ expect(project.error_tracking_setting.integrated).to be_truthy
expect(project.error_tracking_setting.api_url).to eq(
'http://gitlab.com/api/0/projects/org/project/'
)
@@ -206,6 +208,7 @@ RSpec.describe Projects::Operations::UpdateService do
{
error_tracking_setting_attributes: {
enabled: true,
+ integrated: true,
api_host: 'http://gitlab.com/',
token: 'token',
project: {
@@ -222,6 +225,7 @@ RSpec.describe Projects::Operations::UpdateService do
expect(result[:status]).to eq(:success)
expect(project.error_tracking_setting).to be_enabled
+ expect(project.error_tracking_setting.integrated).to be_truthy
expect(project.error_tracking_setting.api_url).to eq(
'http://gitlab.com/api/0/projects/org/project/'
)
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index b71677a5e8f..d96573e26af 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -292,10 +292,37 @@ RSpec.describe Projects::TransferService do
end
end
- context 'target namespace allows developers to create projects' do
+ context 'target namespace matches current namespace' do
+ let(:group) { user.namespace }
+
+ it 'does not allow project transfer' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to eq false
+ expect(project.namespace).to eq(user.namespace)
+ expect(project.errors[:new_namespace]).to include('Project is already in this namespace.')
+ end
+ end
+
+ context 'when user does not own the project' do
+ let(:project) { create(:project, :repository, :legacy_storage) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'does not allow project transfer to the target namespace' do
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to eq false
+ expect(project.errors[:new_namespace]).to include("You don't have permission to transfer this project.")
+ end
+ end
+
+ context 'when user can create projects in the target namespace' do
let(:group) { create(:group, project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) }
- context 'the user is a member of the target namespace with developer permissions' do
+ context 'but has only developer permissions in the target namespace' do
before do
group.add_developer(user)
end
@@ -305,7 +332,7 @@ RSpec.describe Projects::TransferService do
expect(transfer_result).to eq false
expect(project.namespace).to eq(user.namespace)
- expect(project.errors[:new_namespace]).to include('Transfer failed, please contact an admin.')
+ expect(project.errors[:new_namespace]).to include("You don't have permission to transfer projects into that namespace.")
end
end
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 0f21736eda0..6d0b75e0c95 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -18,17 +18,6 @@ RSpec.describe Projects::UpdatePagesService do
subject { described_class.new(project, build) }
- before do
- stub_feature_flags(skip_pages_deploy_to_legacy_storage: false)
- project.legacy_remove_pages
- end
-
- context '::TMP_EXTRACT_PATH' do
- subject { described_class::TMP_EXTRACT_PATH }
-
- it { is_expected.not_to match(Gitlab::PathRegex.namespace_format_regex) }
- end
-
context 'for new artifacts' do
context "for a valid job" do
let!(:artifacts_archive) { create(:ci_job_artifact, :correct_checksum, file: file, job: build) }
@@ -52,36 +41,6 @@ RSpec.describe Projects::UpdatePagesService do
expect(project.pages_metadatum).to be_deployed
expect(project.pages_metadatum.artifacts_archive).to eq(artifacts_archive)
expect(project.pages_deployed?).to be_truthy
-
- # Check that all expected files are extracted
- %w[index.html zero .hidden/file].each do |filename|
- expect(File.exist?(File.join(project.pages_path, 'public', filename))).to be_truthy
- end
- end
-
- it 'creates a temporary directory with the project and build ID' do
- expect(Dir).to receive(:mktmpdir).with("project-#{project.id}-build-#{build.id}-", anything).and_call_original
-
- subject.execute
- end
-
- it "doesn't deploy to legacy storage if it's disabled" do
- allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
-
- expect(execute).to eq(:success)
- expect(project.pages_deployed?).to be_truthy
-
- expect(File.exist?(File.join(project.pages_path, 'public', 'index.html'))).to eq(false)
- end
-
- it "doesn't deploy to legacy storage if skip_pages_deploy_to_legacy_storage is enabled" do
- allow(Settings.pages.local_store).to receive(:enabled).and_return(true)
- stub_feature_flags(skip_pages_deploy_to_legacy_storage: true)
-
- expect(execute).to eq(:success)
- expect(project.pages_deployed?).to be_truthy
-
- expect(File.exist?(File.join(project.pages_path, 'public', 'index.html'))).to eq(false)
end
it 'creates pages_deployment and saves it in the metadata' do
@@ -99,16 +58,6 @@ RSpec.describe Projects::UpdatePagesService do
expect(deployment.ci_build_id).to eq(build.id)
end
- it 'fails if another deployment is in progress' do
- subject.try_obtain_lease do
- expect do
- execute
- end.to raise_error("Failed to deploy pages - other deployment is in progress")
-
- expect(GenericCommitStatus.last.description).to eq("Failed to deploy pages - other deployment is in progress")
- end
- end
-
it 'does not fail if pages_metadata is absent' do
project.pages_metadatum.destroy!
project.reload
@@ -156,47 +105,10 @@ RSpec.describe Projects::UpdatePagesService do
expect(GenericCommitStatus.last.description).to eq("pages site contains 3 file entries, while limit is set to 2")
end
- it 'removes pages after destroy' do
- expect(PagesWorker).to receive(:perform_in)
- expect(project.pages_deployed?).to be_falsey
- expect(Dir.exist?(File.join(project.pages_path))).to be_falsey
-
- expect(execute).to eq(:success)
-
- expect(project.pages_metadatum).to be_deployed
- expect(project.pages_deployed?).to be_truthy
- expect(Dir.exist?(File.join(project.pages_path))).to be_truthy
-
- project.destroy!
-
- expect(Dir.exist?(File.join(project.pages_path))).to be_falsey
- expect(ProjectPagesMetadatum.find_by_project_id(project)).to be_nil
- end
-
- context 'when using empty file' do
- let(:file) { empty_file }
-
- it 'fails to extract' do
- expect { execute }
- .to raise_error(Projects::UpdatePagesService::FailedToExtractError)
- end
- end
-
- context 'when using pages with non-writeable public' do
- let(:file) { fixture_file_upload("spec/fixtures/pages_non_writeable.zip") }
-
- context 'when using RubyZip' do
- it 'succeeds to extract' do
- expect(execute).to eq(:success)
- expect(project.pages_metadatum).to be_deployed
- end
- end
- end
-
context 'when timeout happens by DNS error' do
before do
allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:extract_zip_archive!).and_raise(SocketError)
+ allow(instance).to receive(:create_pages_deployment).and_raise(SocketError)
end
end
@@ -209,24 +121,6 @@ RSpec.describe Projects::UpdatePagesService do
end
end
- context 'when failed to extract zip artifacts' do
- before do
- expect_next_instance_of(described_class) do |instance|
- expect(instance).to receive(:extract_zip_archive!)
- .and_raise(Projects::UpdatePagesService::FailedToExtractError)
- end
- end
-
- it 'raises an error' do
- expect { execute }
- .to raise_error(Projects::UpdatePagesService::FailedToExtractError)
-
- build.reload
- expect(deploy_status).to be_failed
- expect(project.pages_metadatum).not_to be_deployed
- end
- end
-
context 'when missing artifacts metadata' do
before do
expect(build).to receive(:artifacts_metadata?).and_return(false)
@@ -338,12 +232,6 @@ RSpec.describe Projects::UpdatePagesService do
end
end
- it 'fails to remove project pages when no pages is deployed' do
- expect(PagesWorker).not_to receive(:perform_in)
- expect(project.pages_deployed?).to be_falsey
- project.destroy!
- end
-
it 'fails if no artifacts' do
expect(execute).not_to eq(:success)
end
@@ -384,38 +272,6 @@ RSpec.describe Projects::UpdatePagesService do
end
end
- context 'when file size is spoofed' do
- let(:metadata) { spy('metadata') }
-
- include_context 'pages zip with spoofed size'
-
- before do
- file = fixture_file_upload(fake_zip_path, 'pages.zip')
- metafile = fixture_file_upload('spec/fixtures/pages.zip.meta')
-
- create(:ci_job_artifact, :archive, file: file, job: build)
- create(:ci_job_artifact, :metadata, file: metafile, job: build)
-
- allow(build).to receive(:artifacts_metadata_entry).with('public/', recursive: true)
- .and_return(metadata)
- allow(metadata).to receive(:total_size).and_return(100)
-
- # to pass entries count check
- root_metadata = double('root metadata')
- allow(build).to receive(:artifacts_metadata_entry).with('', recursive: true)
- .and_return(root_metadata)
- allow(root_metadata).to receive_message_chain(:entries, :count).and_return(10)
- end
-
- it 'raises an error' do
- expect do
- subject.execute
- end.to raise_error(Projects::UpdatePagesService::FailedToExtractError,
- 'Entry public/index.html should be 1B but is larger when inflated')
- expect(deploy_status).to be_script_failure
- end
- end
-
context 'when retrying the job' do
let!(:older_deploy_job) do
create(:generic_commit_status, :failed, pipeline: pipeline,
@@ -435,18 +291,6 @@ RSpec.describe Projects::UpdatePagesService do
expect(older_deploy_job.reload).to be_retried
end
-
- context 'when FF ci_fix_commit_status_retried is disabled' do
- before do
- stub_feature_flags(ci_fix_commit_status_retried: false)
- end
-
- it 'does not mark older pages:deploy jobs retried' do
- expect(execute).to eq(:success)
-
- expect(older_deploy_job.reload).not_to be_retried
- end
- end
end
private
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index c74a8295d0a..115f3098185 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -441,6 +441,30 @@ RSpec.describe Projects::UpdateService do
end
end
+ context 'when updating #shared_runners', :https_pages_enabled do
+ let!(:pending_build) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
+
+ subject(:call_service) do
+ update_project(project, admin, shared_runners_enabled: shared_runners_enabled)
+ end
+
+ context 'when shared runners is toggled' do
+ let(:shared_runners_enabled) { false }
+
+ it 'updates ci pending builds' do
+ expect { call_service }.to change { pending_build.reload.instance_runners_enabled }.to(false)
+ end
+ end
+
+ context 'when shared runners is not toggled' do
+ let(:shared_runners_enabled) { true }
+
+ it 'updates ci pending builds' do
+ expect { call_service }.to not_change { pending_build.reload.instance_runners_enabled }
+ end
+ end
+ end
+
context 'with external authorization enabled' do
before do
enable_external_authorization_service_check
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index a1b726071d6..02997096021 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -624,6 +624,18 @@ RSpec.describe QuickActions::InterpretService do
end
end
+ shared_examples 'approve command unavailable' do
+ it 'is not part of the available commands' do
+ expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :approve))
+ end
+ end
+
+ shared_examples 'unapprove command unavailable' do
+ it 'is not part of the available commands' do
+ expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :unapprove))
+ end
+ end
+
shared_examples 'shrug command' do
it 'appends ¯\_(ツ)_/¯ to the comment' do
new_content, _, _ = service.execute(content, issuable)
@@ -2135,6 +2147,66 @@ RSpec.describe QuickActions::InterpretService do
end
end
end
+
+ context 'approve command' do
+ let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:content) { '/approve' }
+
+ it 'approves the current merge request' do
+ service.execute(content, merge_request)
+
+ expect(merge_request.approved_by_users).to eq([developer])
+ end
+
+ context "when the user can't approve" do
+ before do
+ project.team.truncate
+ project.add_guest(developer)
+ end
+
+ it 'does not approve the MR' do
+ service.execute(content, merge_request)
+
+ expect(merge_request.approved_by_users).to be_empty
+ end
+ end
+
+ it_behaves_like 'approve command unavailable' do
+ let(:issuable) { issue }
+ end
+ end
+
+ context 'unapprove command' do
+ let!(:merge_request) { create(:merge_request, source_project: project) }
+ let(:content) { '/unapprove' }
+
+ before do
+ service.execute('/approve', merge_request)
+ end
+
+ it 'unapproves the current merge request' do
+ service.execute(content, merge_request)
+
+ expect(merge_request.approved_by_users).to be_empty
+ end
+
+ context "when the user can't unapprove" do
+ before do
+ project.team.truncate
+ project.add_guest(developer)
+ end
+
+ it 'does not unapprove the MR' do
+ service.execute(content, merge_request)
+
+ expect(merge_request.approved_by_users).to eq([developer])
+ end
+
+ it_behaves_like 'unapprove command unavailable' do
+ let(:issuable) { issue }
+ end
+ end
+ end
end
describe '#explain' do
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index 02d60f076ca..b547ae17317 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe Repositories::ChangelogService do
recorder = ActiveRecord::QueryRecorder.new { service.execute }
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
- expect(recorder.count).to eq(11)
+ expect(recorder.count).to eq(9)
expect(changelog).to include('Title 1', 'Title 2')
end
diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index 05df4e49014..c2fe565938a 100644
--- a/spec/services/service_ping/submit_service_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -300,8 +300,32 @@ RSpec.describe ServicePing::SubmitService do
end
end
- def stub_response(body:, status: 201)
- stub_full_request(subject.send(:url), method: :post)
+ describe '#url' do
+ let(:url) { subject.url.to_s }
+
+ context 'when Rails.env is production' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'points to the production Version app' do
+ expect(url).to eq("#{described_class::PRODUCTION_BASE_URL}/#{described_class::USAGE_DATA_PATH}")
+ end
+ end
+
+ context 'when Rails.env is not production' do
+ before do
+ stub_rails_env('development')
+ end
+
+ it 'points to the staging Version app' do
+ expect(url).to eq("#{described_class::STAGING_BASE_URL}/#{described_class::USAGE_DATA_PATH}")
+ end
+ end
+ end
+
+ def stub_response(url: subject.url, body:, status: 201)
+ stub_full_request(url, method: :post)
.to_return(
headers: { 'Content-Type' => 'application/json' },
body: body.to_json,
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index 9cf794cde7e..dc330a5546f 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe Suggestions::ApplyService do
author = suggestions.first.note.author
expect(user.commit_email).not_to eq(user.email)
- expect(commit.author_email).to eq(author.commit_email)
+ expect(commit.author_email).to eq(author.commit_email_or_default)
expect(commit.committer_email).to eq(user.commit_email)
expect(commit.author_name).to eq(author.name)
expect(commit.committer_name).to eq(user.name)
@@ -79,7 +79,7 @@ RSpec.describe Suggestions::ApplyService do
it 'tracks apply suggestion event' do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
.to receive(:track_apply_suggestion_action)
- .with(user: user)
+ .with(user: user, suggestions: suggestions)
apply(suggestions)
end
@@ -333,9 +333,9 @@ RSpec.describe Suggestions::ApplyService do
end
it 'created commit by same author and committer' do
- expect(user.commit_email).to eq(author.commit_email)
+ expect(user.commit_email).to eq(author.commit_email_or_default)
expect(author).to eq(user)
- expect(commit.author_email).to eq(author.commit_email)
+ expect(commit.author_email).to eq(author.commit_email_or_default)
expect(commit.committer_email).to eq(user.commit_email)
expect(commit.author_name).to eq(author.name)
expect(commit.committer_name).to eq(user.name)
@@ -350,7 +350,7 @@ RSpec.describe Suggestions::ApplyService do
it 'created commit has authors info and commiters info' do
expect(user.commit_email).not_to eq(user.email)
expect(author).not_to eq(user)
- expect(commit.author_email).to eq(author.commit_email)
+ expect(commit.author_email).to eq(author.commit_email_or_default)
expect(commit.committer_email).to eq(user.commit_email)
expect(commit.author_name).to eq(author.name)
expect(commit.committer_name).to eq(user.name)
@@ -359,7 +359,7 @@ RSpec.describe Suggestions::ApplyService do
end
context 'multiple suggestions' do
- let(:author_emails) { suggestions.map {|s| s.note.author.commit_email } }
+ let(:author_emails) { suggestions.map {|s| s.note.author.commit_email_or_default } }
let(:first_author) { suggestion.note.author }
let(:commit) { project.repository.commit }
@@ -369,8 +369,8 @@ RSpec.describe Suggestions::ApplyService do
end
it 'uses first authors information' do
- expect(author_emails).to include(first_author.commit_email).exactly(3)
- expect(commit.author_email).to eq(first_author.commit_email)
+ expect(author_emails).to include(first_author.commit_email_or_default).exactly(3)
+ expect(commit.author_email).to eq(first_author.commit_email_or_default)
end
end
diff --git a/spec/services/suggestions/create_service_spec.rb b/spec/services/suggestions/create_service_spec.rb
index 5148d6756fc..a4e62431128 100644
--- a/spec/services/suggestions/create_service_spec.rb
+++ b/spec/services/suggestions/create_service_spec.rb
@@ -159,7 +159,7 @@ RSpec.describe Suggestions::CreateService do
it 'tracks add suggestion event' do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
.to receive(:track_add_suggestion_action)
- .with(user: note.author)
+ .with(note: note)
subject.execute
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 5aff5149dcf..1a421999ffb 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -793,4 +793,16 @@ RSpec.describe SystemNoteService do
described_class.log_resolving_alert(alert, monitoring_tool)
end
end
+
+ describe '.change_issue_type' do
+ let(:incident) { build(:incident) }
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:change_issue_type)
+ end
+
+ described_class.change_issue_type(incident, author)
+ end
+ end
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 1ea3c241d27..71a28a89cd8 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -773,4 +773,16 @@ RSpec.describe ::SystemNotes::IssuablesService do
expect(event.state).to eq('closed')
end
end
+
+ describe '#change_issue_type' do
+ let(:noteable) { create(:incident, project: project) }
+
+ subject { service.change_issue_type }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'issue_type' }
+ end
+
+ it { expect(subject.note).to eq "changed issue type to incident" }
+ end
end
diff --git a/spec/services/todos/destroy/design_service_spec.rb b/spec/services/todos/destroy/design_service_spec.rb
new file mode 100644
index 00000000000..61a6718dc9d
--- /dev/null
+++ b/spec/services/todos/destroy/design_service_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Todos::Destroy::DesignService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user_2) { create(:user) }
+ let_it_be(:design) { create(:design) }
+ let_it_be(:design_2) { create(:design) }
+ let_it_be(:design_3) { create(:design) }
+
+ let_it_be(:create_action) { create(:design_action, design: design)}
+ let_it_be(:create_action_2) { create(:design_action, design: design_2)}
+
+ describe '#execute' do
+ before do
+ create(:todo, user: user, target: design)
+ create(:todo, user: user_2, target: design)
+ create(:todo, user: user, target: design_2)
+ create(:todo, user: user, target: design_3)
+ end
+
+ subject { described_class.new([design.id, design_2.id, design_3.id]).execute }
+
+ context 'when the design has been archived' do
+ let_it_be(:archive_action) { create(:design_action, design: design, event: :deletion)}
+ let_it_be(:archive_action_2) { create(:design_action, design: design_3, event: :deletion)}
+
+ it 'removes todos for that design' do
+ expect { subject }.to change { Todo.count }.from(4).to(1)
+ end
+ end
+
+ context 'when no design has been archived' do
+ it 'does not remove any todos' do
+ expect { subject }.not_to change { Todo.count }.from(4)
+ end
+ end
+ end
+end
diff --git a/spec/services/users/ban_service_spec.rb b/spec/services/users/ban_service_spec.rb
index 6f49ee08782..79f3cbeb46d 100644
--- a/spec/services/users/ban_service_spec.rb
+++ b/spec/services/users/ban_service_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Users::BanService do
response = ban_user
expect(response[:status]).to eq(:error)
- expect(response[:message]).to match(/State cannot transition/)
+ expect(response[:message]).to match('You cannot ban blocked users.')
end
it_behaves_like 'does not modify the BannedUser record or user state'
diff --git a/spec/services/users/dismiss_group_callout_service_spec.rb b/spec/services/users/dismiss_group_callout_service_spec.rb
new file mode 100644
index 00000000000..d74602a7606
--- /dev/null
+++ b/spec/services/users/dismiss_group_callout_service_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::DismissGroupCalloutService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let(:params) { { feature_name: feature_name, group_id: group.id } }
+ let(:feature_name) { Users::GroupCallout.feature_names.each_key.first }
+
+ subject(:execute) do
+ described_class.new(
+ container: nil, current_user: user, params: params
+ ).execute
+ end
+
+ it_behaves_like 'dismissing user callout', Users::GroupCallout
+
+ it 'sets the group_id' do
+ expect(execute.group_id).to eq(group.id)
+ end
+ end
+end
diff --git a/spec/services/users/dismiss_user_callout_service_spec.rb b/spec/services/users/dismiss_user_callout_service_spec.rb
index 22f84a939f7..6bf9961eb74 100644
--- a/spec/services/users/dismiss_user_callout_service_spec.rb
+++ b/spec/services/users/dismiss_user_callout_service_spec.rb
@@ -3,25 +3,18 @@
require 'spec_helper'
RSpec.describe Users::DismissUserCalloutService do
- let(:user) { create(:user) }
-
- let(:service) do
- described_class.new(
- container: nil, current_user: user, params: { feature_name: UserCallout.feature_names.each_key.first }
- )
- end
-
describe '#execute' do
- subject(:execute) { service.execute }
+ let_it_be(:user) { create(:user) }
- it 'returns a user callout' do
- expect(execute).to be_an_instance_of(UserCallout)
- end
+ let(:params) { { feature_name: feature_name } }
+ let(:feature_name) { UserCallout.feature_names.each_key.first }
- it 'sets the dismisse_at attribute to current time' do
- freeze_time do
- expect(execute).to have_attributes(dismissed_at: Time.current)
- end
+ subject(:execute) do
+ described_class.new(
+ container: nil, current_user: user, params: params
+ ).execute
end
+
+ it_behaves_like 'dismissing user callout', UserCallout
end
end
diff --git a/spec/services/users/migrate_to_ghost_user_service_spec.rb b/spec/services/users/migrate_to_ghost_user_service_spec.rb
index c9c8f9a74d3..c36889f20ec 100644
--- a/spec/services/users/migrate_to_ghost_user_service_spec.rb
+++ b/spec/services/users/migrate_to_ghost_user_service_spec.rb
@@ -92,23 +92,5 @@ RSpec.describe Users::MigrateToGhostUserService do
let(:created_record) { create(:review, author: user) }
end
end
-
- context "when record migration fails with a rollback exception" do
- before do
- expect_any_instance_of(ActiveRecord::Associations::CollectionProxy)
- .to receive(:update_all).and_raise(ActiveRecord::Rollback)
- end
-
- context "for records that were already migrated" do
- let!(:issue) { create(:issue, project: project, author: user) }
- let!(:merge_request) { create(:merge_request, source_project: project, author: user, target_branch: "first") }
-
- it "reverses the migration" do
- service.execute
-
- expect(issue.reload.author).to eq(user)
- end
- end
- end
end
end
diff --git a/spec/services/users/reject_service_spec.rb b/spec/services/users/reject_service_spec.rb
index b0094a7c47e..5a243e876ac 100644
--- a/spec/services/users/reject_service_spec.rb
+++ b/spec/services/users/reject_service_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Users::RejectService do
it 'returns error result' do
expect(subject[:status]).to eq(:error)
expect(subject[:message])
- .to match(/This user does not have a pending request/)
+ .to match(/User does not have a pending request/)
end
end
end
@@ -44,7 +44,7 @@ RSpec.describe Users::RejectService do
it 'emails the user on rejection' do
expect_next_instance_of(NotificationService) do |notification|
- allow(notification).to receive(:user_admin_rejection).with(user.name, user.notification_email)
+ allow(notification).to receive(:user_admin_rejection).with(user.name, user.notification_email_or_default)
end
subject
diff --git a/spec/services/users/unban_service_spec.rb b/spec/services/users/unban_service_spec.rb
index b2b3140ccb3..d536baafdcc 100644
--- a/spec/services/users/unban_service_spec.rb
+++ b/spec/services/users/unban_service_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Users::UnbanService do
response = unban_user
expect(response[:status]).to eq(:error)
- expect(response[:message]).to match(/State cannot transition/)
+ expect(response[:message]).to match('You cannot unban active users.')
end
it_behaves_like 'does not modify the BannedUser record or user state'
diff --git a/spec/services/wiki_pages/event_create_service_spec.rb b/spec/services/wiki_pages/event_create_service_spec.rb
index 6bc6a678189..8476f872e98 100644
--- a/spec/services/wiki_pages/event_create_service_spec.rb
+++ b/spec/services/wiki_pages/event_create_service_spec.rb
@@ -34,10 +34,6 @@ RSpec.describe WikiPages::EventCreateService do
it 'does not create an event' do
expect { response }.not_to change(Event, :count)
end
-
- it 'does not create a metadata record' do
- expect { response }.not_to change(WikiPage::Meta, :count)
- end
end
it 'returns a successful response' do