summaryrefslogtreecommitdiff
path: root/spec/services
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/admin/propagate_service_template_spec.rb2
-rw-r--r--spec/services/authorized_project_update/periodic_recalculate_service_spec.rb2
-rw-r--r--spec/services/authorized_project_update/project_recalculate_service_spec.rb76
-rw-r--r--spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb21
-rw-r--r--spec/services/bulk_imports/file_decompression_service_spec.rb99
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb133
-rw-r--r--spec/services/bulk_imports/relation_export_service_spec.rb2
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb8
-rw-r--r--spec/services/ci/append_build_trace_service_spec.rb2
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb47
-rw-r--r--spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb13
-rw-r--r--spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/needs_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb3
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb47
-rw-r--r--spec/services/ci/pipeline_creation/start_pipeline_service_spec.rb20
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb2
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb1
-rw-r--r--spec/services/ci/register_job_service_spec.rb147
-rw-r--r--spec/services/ci/retry_build_service_spec.rb10
-rw-r--r--spec/services/ci/update_build_queue_service_spec.rb368
-rw-r--r--spec/services/ci/update_build_state_service_spec.rb42
-rw-r--r--spec/services/clusters/applications/create_service_spec.rb7
-rw-r--r--spec/services/clusters/cleanup/app_service_spec.rb118
-rw-r--r--spec/services/clusters/destroy_service_spec.rb6
-rw-r--r--spec/services/clusters/gcp/finalize_creation_service_spec.rb6
-rw-r--r--spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb126
-rw-r--r--spec/services/commits/cherry_pick_service_spec.rb13
-rw-r--r--spec/services/container_expiration_policies/cleanup_service_spec.rb286
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb36
-rw-r--r--spec/services/design_management/copy_design_collection/copy_service_spec.rb8
-rw-r--r--spec/services/discussions/resolve_service_spec.rb45
-rw-r--r--spec/services/feature_flags/disable_service_spec.rb92
-rw-r--r--spec/services/feature_flags/enable_service_spec.rb154
-rw-r--r--spec/services/feature_flags/update_service_spec.rb145
-rw-r--r--spec/services/groups/create_service_spec.rb2
-rw-r--r--spec/services/groups/destroy_service_spec.rb2
-rw-r--r--spec/services/groups/group_links/create_service_spec.rb46
-rw-r--r--spec/services/groups/participants_service_spec.rb37
-rw-r--r--spec/services/import_export_clean_up_service_spec.rb77
-rw-r--r--spec/services/issue_rebalancing_service_spec.rb21
-rw-r--r--spec/services/issues/close_service_spec.rb85
-rw-r--r--spec/services/issues/create_service_spec.rb24
-rw-r--r--spec/services/issues/update_service_spec.rb90
-rw-r--r--spec/services/issues/zoom_link_service_spec.rb10
-rw-r--r--spec/services/jira_import/users_importer_spec.rb72
-rw-r--r--spec/services/lfs/push_service_spec.rb17
-rw-r--r--spec/services/members/create_service_spec.rb45
-rw-r--r--spec/services/members/invite_service_spec.rb16
-rw-r--r--spec/services/merge_requests/build_service_spec.rb70
-rw-r--r--spec/services/merge_requests/create_service_spec.rb2
-rw-r--r--spec/services/merge_requests/handle_assignees_change_service_spec.rb22
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb4
-rw-r--r--spec/services/merge_requests/update_assignees_service_spec.rb47
-rw-r--r--spec/services/merge_requests/update_service_spec.rb8
-rw-r--r--spec/services/namespace_settings/update_service_spec.rb32
-rw-r--r--spec/services/namespaces/in_product_marketing_emails_service_spec.rb29
-rw-r--r--spec/services/notes/create_service_spec.rb4
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb16
-rw-r--r--spec/services/notification_recipients/builder/default_spec.rb16
-rw-r--r--spec/services/packages/debian/create_distribution_service_spec.rb9
-rw-r--r--spec/services/packages/debian/destroy_distribution_service_spec.rb78
-rw-r--r--spec/services/packages/debian/extract_changes_metadata_service_spec.rb30
-rw-r--r--spec/services/packages/debian/generate_distribution_service_spec.rb175
-rw-r--r--spec/services/packages/debian/parse_debian822_service_spec.rb2
-rw-r--r--spec/services/packages/debian/process_changes_service_spec.rb3
-rw-r--r--spec/services/packages/debian/update_distribution_service_spec.rb2
-rw-r--r--spec/services/packages/helm/extract_file_metadata_service_spec.rb59
-rw-r--r--spec/services/packages/nuget/metadata_extraction_service_spec.rb7
-rw-r--r--spec/services/packages/nuget/update_package_from_metadata_service_spec.rb28
-rw-r--r--spec/services/pages/delete_service_spec.rb37
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb18
-rw-r--r--spec/services/projects/create_service_spec.rb72
-rw-r--r--spec/services/projects/destroy_service_spec.rb17
-rw-r--r--spec/services/projects/group_links/create_service_spec.rb24
-rw-r--r--spec/services/projects/group_links/destroy_service_spec.rb58
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb39
-rw-r--r--spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb2
-rw-r--r--spec/services/projects/transfer_service_spec.rb2
-rw-r--r--spec/services/projects/update_service_spec.rb25
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb10
-rw-r--r--spec/services/security/ci_configuration/sast_parser_service_spec.rb30
-rw-r--r--spec/services/snippets/create_service_spec.rb2
-rw-r--r--spec/services/snippets/update_service_spec.rb2
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb2
-rw-r--r--spec/services/user_project_access_changed_service_spec.rb35
-rw-r--r--spec/services/users/activity_service_spec.rb47
-rw-r--r--spec/services/users/authorized_build_service_spec.rb16
-rw-r--r--spec/services/users/build_service_spec.rb139
-rw-r--r--spec/services/users/update_assigned_open_issue_count_service_spec.rb49
-rw-r--r--spec/services/web_hook_service_spec.rb96
91 files changed, 1972 insertions, 2028 deletions
diff --git a/spec/services/admin/propagate_service_template_spec.rb b/spec/services/admin/propagate_service_template_spec.rb
index 406da790a66..1bcf9af78ce 100644
--- a/spec/services/admin/propagate_service_template_spec.rb
+++ b/spec/services/admin/propagate_service_template_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Admin::PropagateServiceTemplate do
describe '.propagate' do
let_it_be(:project) { create(:project) }
let!(:service_template) do
- PushoverService.create!(
+ Integrations::Pushover.create!(
template: true,
active: true,
push_events: false,
diff --git a/spec/services/authorized_project_update/periodic_recalculate_service_spec.rb b/spec/services/authorized_project_update/periodic_recalculate_service_spec.rb
index c776e013fdf..782f6858870 100644
--- a/spec/services/authorized_project_update/periodic_recalculate_service_spec.rb
+++ b/spec/services/authorized_project_update/periodic_recalculate_service_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe AuthorizedProjectUpdate::PeriodicRecalculateService do
end
it 'calls AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker' do
- (1..User.maximum(:id)).each_slice(batch_size).with_index do |batch, index|
+ (1..User.maximum(:id)).each_slice(batch_size).with_index(1) do |batch, index|
delay = AuthorizedProjectUpdate::PeriodicRecalculateService::DELAY_INTERVAL * index
expect(AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker).to(
diff --git a/spec/services/authorized_project_update/project_recalculate_service_spec.rb b/spec/services/authorized_project_update/project_recalculate_service_spec.rb
new file mode 100644
index 00000000000..c339faaeabf
--- /dev/null
+++ b/spec/services/authorized_project_update/project_recalculate_service_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuthorizedProjectUpdate::ProjectRecalculateService, '#execute' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:execute) { described_class.new(project).execute }
+
+ it 'returns success' do
+ expect(execute.success?).to eq(true)
+ end
+
+ context 'when there are no changes to be made' do
+ it 'does not change authorizations' do
+ expect { execute }.not_to(change { ProjectAuthorization.count })
+ end
+ end
+
+ context 'when there are changes to be made' do
+ let(:user) { create(:user) }
+
+ context 'when addition is required' do
+ before do
+ project.add_developer(user)
+ project.project_authorizations.where(user: user).delete_all
+ end
+
+ it 'adds a new authorization record' do
+ expect { execute }.to(
+ change { project.project_authorizations.where(user: user).count }
+ .from(0).to(1)
+ )
+ end
+
+ it 'adds a new authorization record with the correct access level' do
+ execute
+
+ project_authorization = project.project_authorizations.where(
+ user: user,
+ access_level: Gitlab::Access::DEVELOPER
+ )
+
+ expect(project_authorization).to exist
+ end
+ end
+
+ context 'when removal is required' do
+ before do
+ create(:project_authorization, user: user, project: project)
+ end
+
+ it 'removes the authorization record' do
+ expect { execute }.to(
+ change { project.project_authorizations.where(user: user).count }
+ .from(1).to(0)
+ )
+ end
+ end
+
+ context 'when an update in access level is required' do
+ before do
+ project.add_developer(user)
+ project.project_authorizations.where(user: user).delete_all
+ create(:project_authorization, project: project, user: user, access_level: Gitlab::Access::GUEST)
+ end
+
+ it 'updates the authorization of the user to the correct access level' do
+ expect { execute }.to(
+ change { project.project_authorizations.find_by(user: user).access_level }
+ .from(Gitlab::Access::GUEST).to(Gitlab::Access::DEVELOPER)
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb b/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb
deleted file mode 100644
index 95e2c0380bf..00000000000
--- a/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe AuthorizedProjectUpdate::RecalculateForUserRangeService do
- describe '#execute' do
- let_it_be(:users) { create_list(:user, 2) }
-
- it 'calls Users::RefreshAuthorizedProjectsService' do
- user_ids = users.map(&:id)
-
- User.where(id: user_ids).select(:id).each do |user|
- expect(Users::RefreshAuthorizedProjectsService).to(
- receive(:new).with(user, source: described_class.name).and_call_original)
- end
-
- range = user_ids.minmax
- described_class.new(*range).execute
- end
- end
-end
diff --git a/spec/services/bulk_imports/file_decompression_service_spec.rb b/spec/services/bulk_imports/file_decompression_service_spec.rb
new file mode 100644
index 00000000000..4e8f78c8243
--- /dev/null
+++ b/spec/services/bulk_imports/file_decompression_service_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::FileDecompressionService do
+ let_it_be(:tmpdir) { Dir.mktmpdir }
+ let_it_be(:ndjson_filename) { 'labels.ndjson' }
+ let_it_be(:ndjson_filepath) { File.join(tmpdir, ndjson_filename) }
+ let_it_be(:gz_filename) { "#{ndjson_filename}.gz" }
+ let_it_be(:gz_filepath) { "spec/fixtures/bulk_imports/gz/#{gz_filename}" }
+
+ before do
+ FileUtils.copy_file(gz_filepath, File.join(tmpdir, gz_filename))
+ FileUtils.remove_entry(ndjson_filepath) if File.exist?(ndjson_filepath)
+ end
+
+ after(:all) do
+ FileUtils.remove_entry(tmpdir)
+ end
+
+ subject { described_class.new(dir: tmpdir, filename: gz_filename) }
+
+ describe '#execute' do
+ it 'decompresses specified file' do
+ subject.execute
+
+ expect(File.exist?(File.join(tmpdir, ndjson_filename))).to eq(true)
+ expect(File.open(ndjson_filepath, &:readline)).to include('title', 'description')
+ end
+
+ context 'when validate_import_decompressed_archive_size feature flag is enabled' do
+ before do
+ stub_feature_flags(validate_import_decompressed_archive_size: true)
+ end
+
+ it 'performs decompressed file size validation' do
+ expect_next_instance_of(Gitlab::ImportExport::DecompressedArchiveSizeValidator) do |validator|
+ expect(validator).to receive(:valid?).and_return(true)
+ end
+
+ subject.execute
+ end
+ end
+
+ context 'when validate_import_decompressed_archive_size feature flag is disabled' do
+ before do
+ stub_feature_flags(validate_import_decompressed_archive_size: false)
+ end
+
+ it 'does not perform decompressed file size validation' do
+ expect(Gitlab::ImportExport::DecompressedArchiveSizeValidator).not_to receive(:new)
+
+ subject.execute
+ end
+ end
+
+ context 'when dir is not in tmpdir' do
+ subject { described_class.new(dir: '/etc', filename: 'filename') }
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid target directory')
+ end
+ end
+
+ context 'when compressed file is a symlink' do
+ let_it_be(:symlink) { File.join(tmpdir, 'symlink.gz') }
+
+ before do
+ FileUtils.ln_s(File.join(tmpdir, gz_filename), symlink)
+ end
+
+ subject { described_class.new(dir: tmpdir, filename: 'symlink.gz') }
+
+ it 'raises an error and removes the file' do
+ expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid file')
+
+ expect(File.exist?(symlink)).to eq(false)
+ end
+ end
+
+ context 'when decompressed file is a symlink' do
+ let_it_be(:symlink) { File.join(tmpdir, 'symlink') }
+
+ before do
+ FileUtils.ln_s(File.join(tmpdir, ndjson_filename), symlink)
+
+ subject.instance_variable_set(:@decompressed_filepath, symlink)
+ end
+
+ subject { described_class.new(dir: tmpdir, filename: gz_filename) }
+
+ it 'raises an error and removes the file' do
+ expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid file')
+
+ expect(File.exist?(symlink)).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
new file mode 100644
index 00000000000..0961ddce553
--- /dev/null
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::FileDownloadService do
+ describe '#execute' do
+ let_it_be(:config) { build(:bulk_import_configuration) }
+ let_it_be(:content_type) { 'application/octet-stream' }
+ let_it_be(:filename) { 'file_download_service_spec' }
+ let_it_be(:tmpdir) { Dir.tmpdir }
+ let_it_be(:filepath) { File.join(tmpdir, filename) }
+
+ let(:chunk_double) { double('chunk', size: 1000, code: 200) }
+ let(:response_double) do
+ double(
+ code: 200,
+ success?: true,
+ parsed_response: {},
+ headers: {
+ 'content-length' => 100,
+ 'content-type' => content_type
+ }
+ )
+ end
+
+ subject { described_class.new(configuration: config, relative_url: '/test', dir: tmpdir, filename: filename) }
+
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:head).and_return(response_double)
+ allow(client).to receive(:stream).and_yield(chunk_double)
+ end
+ end
+
+ shared_examples 'downloads file' do
+ it 'downloads file' do
+ subject.execute
+
+ expect(File.exist?(filepath)).to eq(true)
+ expect(File.read(filepath)).to include('chunk')
+ end
+ end
+
+ include_examples 'downloads file'
+
+ context 'when content-type is application/gzip' do
+ let_it_be(:content_type) { 'application/gzip' }
+
+ include_examples 'downloads file'
+ end
+
+ context 'when url is not valid' do
+ it 'raises an error' do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+
+ double = instance_double(BulkImports::Configuration, url: 'https://localhost', access_token: 'token')
+ service = described_class.new(configuration: double, relative_url: '/test', dir: tmpdir, filename: filename)
+
+ expect { service.execute }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ end
+ end
+
+ context 'when content-type is not valid' do
+ let(:content_type) { 'invalid' }
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content type')
+ end
+ end
+
+ context 'when content-length is not valid' do
+ context 'when content-length exceeds limit' do
+ before do
+ stub_const("#{described_class}::FILE_SIZE_LIMIT", 1)
+ end
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content length')
+ end
+ end
+
+ context 'when content-length is missing' do
+ let(:response_double) { double(success?: true, headers: { 'content-type' => content_type }) }
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content length')
+ end
+ end
+ end
+
+ context 'when partially downloaded file exceeds limit' do
+ before do
+ stub_const("#{described_class}::FILE_SIZE_LIMIT", 150)
+ end
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid downloaded file')
+ end
+ end
+
+ context 'when chunk code is not 200' do
+ let(:chunk_double) { double('chunk', size: 1000, code: 307) }
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(described_class::ServiceError, 'File download error 307')
+ end
+ end
+
+ context 'when file is a symlink' do
+ let_it_be(:symlink) { File.join(tmpdir, 'symlink') }
+
+ before do
+ FileUtils.ln_s(File.join(tmpdir, filename), symlink)
+ end
+
+ subject { described_class.new(configuration: config, relative_url: '/test', dir: tmpdir, filename: 'symlink') }
+
+ it 'raises an error and removes the file' do
+ expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid downloaded file')
+
+ expect(File.exist?(symlink)).to eq(false)
+ end
+ end
+
+ context 'when dir is not in tmpdir' do
+ subject { described_class.new(configuration: config, relative_url: '/test', dir: '/etc', filename: filename) }
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid target directory')
+ end
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/relation_export_service_spec.rb b/spec/services/bulk_imports/relation_export_service_spec.rb
index bf286998df2..333cd9201d8 100644
--- a/spec/services/bulk_imports/relation_export_service_spec.rb
+++ b/spec/services/bulk_imports/relation_export_service_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe BulkImports::RelationExportService do
let(:upload) { create(:bulk_import_export_upload, export: export) }
it 'removes existing export before exporting' do
- upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/labels.ndjson.gz'))
+ upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/gz/labels.ndjson.gz'))
expect_any_instance_of(BulkImports::ExportUpload) do |upload|
expect(upload).to receive(:remove_export_file!)
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
index cd50a2a5708..a866e0852bc 100644
--- a/spec/services/bulk_update_integration_service_spec.rb
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -17,14 +17,14 @@ RSpec.describe BulkUpdateIntegrationService do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:group_integration) do
- JiraService.create!(
+ Integrations::Jira.create!(
group: group,
url: 'http://group.jira.com'
)
end
let_it_be(:subgroup_integration) do
- JiraService.create!(
+ Integrations::Jira.create!(
inherit_from_id: group_integration.id,
group: subgroup,
url: 'http://subgroup.jira.com',
@@ -33,7 +33,7 @@ RSpec.describe BulkUpdateIntegrationService do
end
let_it_be(:excluded_integration) do
- JiraService.create!(
+ Integrations::Jira.create!(
group: create(:group),
url: 'http://another.jira.com',
push_events: false
@@ -41,7 +41,7 @@ RSpec.describe BulkUpdateIntegrationService do
end
let_it_be(:integration) do
- JiraService.create!(
+ Integrations::Jira.create!(
project: create(:project, group: subgroup),
inherit_from_id: subgroup_integration.id,
url: 'http://project.jira.com',
diff --git a/spec/services/ci/append_build_trace_service_spec.rb b/spec/services/ci/append_build_trace_service_spec.rb
index a0a7f594881..8812680b665 100644
--- a/spec/services/ci/append_build_trace_service_spec.rb
+++ b/spec/services/ci/append_build_trace_service_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Ci::AppendBuildTraceService do
expect(::Gitlab::ErrorTracking)
.to receive(:log_exception)
- .with(anything, hash_including(chunk_index: 0, chunk_store: 'redis'))
+ .with(anything, hash_including(chunk_index: 0, chunk_store: 'redis_trace_chunks'))
result = described_class
.new(build, content_range: '0-128')
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index 8bab7856375..18bd59a17f0 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -136,7 +136,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
bridge_id: bridge.id, project_id: bridge.project.id)
.and_call_original
expect(Ci::CreatePipelineService).not_to receive(:new)
- expect(service.execute(bridge)).to be_nil
+ expect(service.execute(bridge)).to eq({ message: "Already has a downstream pipeline", status: :error })
end
end
@@ -393,6 +393,51 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
end
end
end
+
+ context 'when multi-project pipeline runs from child pipelines bridge job' do
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(rspec: { script: 'rspec' }))
+ end
+
+ # instantiate new service, to clear memoized values from child pipeline run
+ subject(:execute_with_trigger_project_bridge) do
+ described_class.new(upstream_project, user).execute(trigger_project_bridge)
+ end
+
+ let!(:child_pipeline) do
+ service.execute(bridge)
+ bridge.downstream_pipeline
+ end
+
+ let!(:trigger_downstream_project) do
+ {
+ trigger: {
+ project: downstream_project.full_path,
+ branch: 'feature'
+ }
+ }
+ end
+
+ let!(:trigger_project_bridge) do
+ create(
+ :ci_bridge, status: :pending,
+ user: user,
+ options: trigger_downstream_project,
+ pipeline: child_pipeline
+ )
+ end
+
+ it 'creates a new pipeline' do
+ expect { execute_with_trigger_project_bridge }
+ .to change { Ci::Pipeline.count }.by(1)
+
+ new_pipeline = trigger_project_bridge.downstream_pipeline
+
+ expect(new_pipeline.child?).to eq(false)
+ expect(new_pipeline.triggered_by_pipeline).to eq child_pipeline
+ expect(trigger_project_bridge.reload).not_to be_failed
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
index 9ccf289df7c..7193e5bd7d4 100644
--- a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
+++ b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe Ci::CreatePipelineService do
before do
stub_ci_pipeline_yaml_file(config)
- stub_feature_flags(ci_raise_job_rules_without_workflow_rules_warning: true)
end
context 'when created successfully' do
@@ -35,18 +34,6 @@ RSpec.describe Ci::CreatePipelineService do
/jobs:test may allow multiple pipelines to run/
)
end
-
- context 'when feature flag is disabled for the particular warning' do
- before do
- stub_feature_flags(ci_raise_job_rules_without_workflow_rules_warning: false)
- end
-
- it 'does not contain warnings' do
- expect(pipeline.error_messages.map(&:content)).to be_empty
-
- expect(pipeline.warning_messages.map(&:content)).to be_empty
- end
- end
end
context 'when no warnings are raised' do
diff --git a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
index b3b8e34dd8e..7fd32288893 100644
--- a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
@@ -53,6 +53,8 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
end
context 'when sidekiq processes the job', :sidekiq_inline do
+ let_it_be(:runner) { create(:ci_runner, :online) }
+
it 'transitions to pending status and triggers a downstream pipeline' do
pipeline = create_pipeline!
diff --git a/spec/services/ci/create_pipeline_service/needs_spec.rb b/spec/services/ci/create_pipeline_service/needs_spec.rb
index 4521067cd52..3b4a6178b8f 100644
--- a/spec/services/ci/create_pipeline_service/needs_spec.rb
+++ b/spec/services/ci/create_pipeline_service/needs_spec.rb
@@ -211,7 +211,7 @@ RSpec.describe Ci::CreatePipelineService do
deploy_a = processables.find { |processable| processable.name == 'deploy_a' }
deploy_b = processables.find { |processable| processable.name == 'deploy_b' }
- expect(pipeline).to be_persisted
+ expect(pipeline).to be_created_successfully
expect(build_a.status).to eq('pending')
expect(test_a.status).to eq('created')
expect(test_b.status).to eq('pending')
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 9fdce1ae926..052727401dd 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Ci::CreatePipelineService do
let_it_be(:project, reload: true) { create(:project, :repository) }
let_it_be(:user, reload: true) { project.owner }
+ let_it_be(:runner) { create(:ci_runner, :online, tag_list: %w[postgres mysql ruby]) }
let(:ref_name) { 'refs/heads/master' }
@@ -532,7 +533,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'pull it from Auto-DevOps' do
pipeline = execute_service
expect(pipeline).to be_auto_devops_source
- expect(pipeline.builds.map(&:name)).to match_array(%w[brakeman-sast build code_quality eslint-sast secret_detection_default_branch semgrep-sast test])
+ expect(pipeline.builds.map(&:name)).to match_array(%w[brakeman-sast build code_quality eslint-sast secret_detection semgrep-sast test])
end
end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index 97c65dc005e..e6d9f208096 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -203,53 +203,6 @@ RSpec.describe Ci::JobArtifacts::CreateService do
end
end
- context 'when artifact type is cluster_applications' do
- let(:artifacts_file) do
- file_to_upload('spec/fixtures/helm/helm_list_v2_prometheus_missing.json.gz', sha256: artifacts_sha256)
- end
-
- let(:params) do
- {
- 'artifact_type' => 'cluster_applications',
- 'artifact_format' => 'gzip'
- }.with_indifferent_access
- end
-
- it 'calls cluster applications parse service' do
- expect_next_instance_of(Clusters::ParseClusterApplicationsArtifactService) do |service|
- expect(service).to receive(:execute).once.and_call_original
- end
-
- subject
- end
-
- context 'when there is a deployment cluster' do
- let(:user) { project.owner }
-
- before do
- job.update!(user: user)
- end
-
- it 'calls cluster applications parse service with job and job user', :aggregate_failures do
- expect(Clusters::ParseClusterApplicationsArtifactService).to receive(:new).with(job, user).and_call_original
-
- subject
- end
- end
-
- context 'when ci_synchronous_artifact_parsing feature flag is disabled' do
- before do
- stub_feature_flags(ci_synchronous_artifact_parsing: false)
- end
-
- it 'does not call parse service' do
- expect(Clusters::ParseClusterApplicationsArtifactService).not_to receive(:new)
-
- expect(subject[:status]).to eq(:success)
- end
- end
- end
-
shared_examples 'rescues object storage error' do |klass, message, expected_message|
it "handles #{klass}" do
allow_next_instance_of(JobArtifactUploader) do |uploader|
diff --git a/spec/services/ci/pipeline_creation/start_pipeline_service_spec.rb b/spec/services/ci/pipeline_creation/start_pipeline_service_spec.rb
new file mode 100644
index 00000000000..2aa810e8ea1
--- /dev/null
+++ b/spec/services/ci/pipeline_creation/start_pipeline_service_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineCreation::StartPipelineService do
+ let(:pipeline) { build(:ci_pipeline) }
+
+ subject(:service) { described_class.new(pipeline) }
+
+ describe '#execute' do
+ it 'calls the pipeline process service' do
+ expect(Ci::ProcessPipelineService)
+ .to receive(:new)
+ .with(pipeline)
+ .and_return(double('service', execute: true))
+
+ service.execute
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
index 13c924a3089..34d9b60217f 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -859,6 +859,8 @@ RSpec.shared_examples 'Pipeline Processing Service' do
end
context 'when a bridge job has parallel:matrix config', :sidekiq_inline do
+ let_it_be(:runner) { create(:ci_runner, :online) }
+
let(:parent_config) do
<<-EOY
test:
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
index 572808cd2db..9c8e6fd3292 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
@@ -3,6 +3,7 @@
RSpec.shared_context 'Pipeline Processing Service Tests With Yaml' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.owner }
+ let_it_be(:runner) { create(:ci_runner, :online) }
where(:test_file_path) do
Dir.glob(Rails.root.join('spec/services/ci/pipeline_processing/test_cases/*.yml'))
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 839a3c53f07..c4b1e2133ed 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -11,9 +11,37 @@ module Ci
let!(:shared_runner) { create(:ci_runner, :instance) }
let!(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
- let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
describe '#execute' do
+ context 'checks database loadbalancing stickiness' do
+ subject { described_class.new(shared_runner).execute }
+
+ before do
+ project.update!(shared_runners_enabled: false)
+ end
+
+ it 'result is valid if replica did caught-up' do
+ allow(Gitlab::Database::LoadBalancing).to receive(:enable?)
+ .and_return(true)
+
+ expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?)
+ .with(:runner, shared_runner.id) { true }
+
+ expect(subject).to be_valid
+ end
+
+ it 'result is invalid if replica did not caught-up' do
+ allow(Gitlab::Database::LoadBalancing).to receive(:enable?)
+ .and_return(true)
+
+ expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:all_caught_up?)
+ .with(:runner, shared_runner.id) { false }
+
+ expect(subject).not_to be_valid
+ end
+ end
+
shared_examples 'handles runner assignment' do
context 'runner follow tag list' do
it "picks build with the same tag" do
@@ -76,11 +104,11 @@ module Ci
let!(:project3) { create :project, shared_runners_enabled: true }
let!(:pipeline3) { create :ci_pipeline, project: project3 }
let!(:build1_project1) { pending_job }
- let!(:build2_project1) { FactoryBot.create :ci_build, pipeline: pipeline }
- let!(:build3_project1) { FactoryBot.create :ci_build, pipeline: pipeline }
- let!(:build1_project2) { FactoryBot.create :ci_build, pipeline: pipeline2 }
- let!(:build2_project2) { FactoryBot.create :ci_build, pipeline: pipeline2 }
- let!(:build1_project3) { FactoryBot.create :ci_build, pipeline: pipeline3 }
+ let!(:build2_project1) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+ let!(:build3_project1) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
+ let!(:build1_project2) { create(:ci_build, :pending, :queued, pipeline: pipeline2) }
+ let!(:build2_project2) { create(:ci_build, :pending, :queued, pipeline: pipeline2) }
+ let!(:build1_project3) { create(:ci_build, :pending, :queued, pipeline: pipeline3) }
context 'when using fair scheduling' do
context 'when all builds are pending' do
@@ -227,17 +255,17 @@ module Ci
let!(:pipeline3) { create(:ci_pipeline, project: project3) }
let!(:build1_project1) { pending_job }
- let!(:build2_project1) { create(:ci_build, pipeline: pipeline) }
- let!(:build3_project1) { create(:ci_build, pipeline: pipeline) }
- let!(:build1_project2) { create(:ci_build, pipeline: pipeline2) }
- let!(:build2_project2) { create(:ci_build, pipeline: pipeline2) }
- let!(:build1_project3) { create(:ci_build, pipeline: pipeline3) }
+ let!(:build2_project1) { create(:ci_build, :queued, pipeline: pipeline) }
+ let!(:build3_project1) { create(:ci_build, :queued, pipeline: pipeline) }
+ let!(:build1_project2) { create(:ci_build, :queued, pipeline: pipeline2) }
+ let!(:build2_project2) { create(:ci_build, :queued, pipeline: pipeline2) }
+ let!(:build1_project3) { create(:ci_build, :queued, pipeline: pipeline3) }
# these shouldn't influence the scheduling
let!(:unrelated_group) { create(:group) }
let!(:unrelated_project) { create(:project, group_runners_enabled: true, group: unrelated_group) }
let!(:unrelated_pipeline) { create(:ci_pipeline, project: unrelated_project) }
- let!(:build1_unrelated_project) { create(:ci_build, pipeline: unrelated_pipeline) }
+ let!(:build1_unrelated_project) { create(:ci_build, :pending, :queued, pipeline: unrelated_pipeline) }
let!(:unrelated_group_runner) { create(:ci_runner, :group, groups: [unrelated_group]) }
it 'does not consider builds from other group runners' do
@@ -318,7 +346,7 @@ module Ci
subject { described_class.new(specific_runner).execute }
context 'with multiple builds are in queue' do
- let!(:other_build) { create :ci_build, pipeline: pipeline }
+ let!(:other_build) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
before do
allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_project_runner)
@@ -359,7 +387,7 @@ module Ci
let!(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
context 'when a job is protected' do
- let!(:pending_job) { create(:ci_build, :protected, pipeline: pipeline) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, :protected, pipeline: pipeline) }
it 'picks the job' do
expect(execute(specific_runner)).to eq(pending_job)
@@ -367,7 +395,7 @@ module Ci
end
context 'when a job is unprotected' do
- let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
it 'picks the job' do
expect(execute(specific_runner)).to eq(pending_job)
@@ -375,7 +403,7 @@ module Ci
end
context 'when protected attribute of a job is nil' do
- let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
before do
pending_job.update_attribute(:protected, nil)
@@ -391,7 +419,7 @@ module Ci
let!(:specific_runner) { create(:ci_runner, :project, :ref_protected, projects: [project]) }
context 'when a job is protected' do
- let!(:pending_job) { create(:ci_build, :protected, pipeline: pipeline) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, :protected, pipeline: pipeline) }
it 'picks the job' do
expect(execute(specific_runner)).to eq(pending_job)
@@ -399,7 +427,7 @@ module Ci
end
context 'when a job is unprotected' do
- let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
it 'does not pick the job' do
expect(execute(specific_runner)).to be_nil
@@ -407,7 +435,7 @@ module Ci
end
context 'when protected attribute of a job is nil' do
- let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
before do
pending_job.update_attribute(:protected, nil)
@@ -421,7 +449,7 @@ module Ci
context 'runner feature set is verified' do
let(:options) { { artifacts: { reports: { junit: "junit.xml" } } } }
- let!(:pending_job) { create(:ci_build, :pending, pipeline: pipeline, options: options) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline, options: options) }
subject { execute(specific_runner, params) }
@@ -457,7 +485,7 @@ module Ci
shared_examples 'validation is active' do
context 'when depended job has not been completed yet' do
- let!(:pre_stage_job) { create(:ci_build, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ let!(:pre_stage_job) { create(:ci_build, :pending, :queued, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
it { expect(subject).to eq(pending_job) }
end
@@ -494,7 +522,7 @@ module Ci
shared_examples 'validation is not active' do
context 'when depended job has not been completed yet' do
- let!(:pre_stage_job) { create(:ci_build, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ let!(:pre_stage_job) { create(:ci_build, :pending, :queued, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
it { expect(subject).to eq(pending_job) }
end
@@ -519,7 +547,7 @@ module Ci
let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
let!(:pending_job) do
- create(:ci_build, :pending,
+ create(:ci_build, :pending, :queued,
pipeline: pipeline, stage_idx: 1,
options: { script: ["bash"], dependencies: ['test'] })
end
@@ -530,7 +558,7 @@ module Ci
end
context 'when build is degenerated' do
- let!(:pending_job) { create(:ci_build, :pending, :degenerated, pipeline: pipeline) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, :degenerated, pipeline: pipeline) }
subject { execute(specific_runner, {}) }
@@ -545,7 +573,7 @@ module Ci
context 'when build has data integrity problem' do
let!(:pending_job) do
- create(:ci_build, :pending, pipeline: pipeline)
+ create(:ci_build, :pending, :queued, pipeline: pipeline)
end
before do
@@ -570,7 +598,7 @@ module Ci
context 'when build fails to be run!' do
let!(:pending_job) do
- create(:ci_build, :pending, pipeline: pipeline)
+ create(:ci_build, :pending, :queued, pipeline: pipeline)
end
before do
@@ -612,12 +640,12 @@ module Ci
context 'when only some builds can be matched by runner' do
let!(:specific_runner) { create(:ci_runner, :project, projects: [project], tag_list: %w[matching]) }
- let!(:pending_job) { create(:ci_build, pipeline: pipeline, tag_list: %w[matching]) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline, tag_list: %w[matching]) }
before do
# create additional matching and non-matching jobs
- create_list(:ci_build, 2, pipeline: pipeline, tag_list: %w[matching])
- create(:ci_build, pipeline: pipeline, tag_list: %w[non-matching])
+ create_list(:ci_build, 2, :pending, :queued, pipeline: pipeline, tag_list: %w[matching])
+ create(:ci_build, :pending, :queued, pipeline: pipeline, tag_list: %w[non-matching])
end
it 'observes queue size of only matching jobs' do
@@ -665,7 +693,7 @@ module Ci
end
context 'when there is another build in queue' do
- let!(:next_pending_job) { create(:ci_build, pipeline: pipeline) }
+ let!(:next_pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
it 'skips this build and picks another build' do
expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
@@ -683,11 +711,7 @@ module Ci
end
end
- context 'when ci_register_job_service_one_by_one is enabled' do
- before do
- stub_feature_flags(ci_register_job_service_one_by_one: true)
- end
-
+ context 'when a long queue is created' do
it 'picks builds one-by-one' do
expect(Ci::Build).to receive(:find).with(pending_job.id).and_call_original
@@ -697,9 +721,17 @@ module Ci
include_examples 'handles runner assignment'
end
- context 'when ci_register_job_service_one_by_one is disabled' do
+ context 'when joining with pending builds table' do
+ before do
+ stub_feature_flags(ci_pending_builds_queue_join: true)
+ end
+
+ include_examples 'handles runner assignment'
+ end
+
+ context 'when not joining with pending builds table' do
before do
- stub_feature_flags(ci_register_job_service_one_by_one: false)
+ stub_feature_flags(ci_pending_builds_queue_join: false)
end
include_examples 'handles runner assignment'
@@ -747,8 +779,8 @@ module Ci
end
context 'when project already has running jobs' do
- let!(:build2) { create( :ci_build, :running, pipeline: pipeline, runner: shared_runner) }
- let!(:build3) { create( :ci_build, :running, pipeline: pipeline, runner: shared_runner) }
+ let!(:build2) { create(:ci_build, :running, pipeline: pipeline, runner: shared_runner) }
+ let!(:build3) { create(:ci_build, :running, pipeline: pipeline, runner: shared_runner) }
it 'counts job queuing time histogram with expected labels' do
allow(attempt_counter).to receive(:increment)
@@ -831,42 +863,21 @@ module Ci
end
context 'when max queue depth is reached' do
- let!(:pending_job) { create(:ci_build, :pending, :degenerated, pipeline: pipeline) }
- let!(:pending_job_2) { create(:ci_build, :pending, :degenerated, pipeline: pipeline) }
- let!(:pending_job_3) { create(:ci_build, :pending, pipeline: pipeline) }
+ let!(:pending_job) { create(:ci_build, :pending, :queued, :degenerated, pipeline: pipeline) }
+ let!(:pending_job_2) { create(:ci_build, :pending, :queued, :degenerated, pipeline: pipeline) }
+ let!(:pending_job_3) { create(:ci_build, :pending, :queued, pipeline: pipeline) }
before do
stub_const("#{described_class}::MAX_QUEUE_DEPTH", 2)
end
- context 'when feature is enabled' do
- before do
- stub_feature_flags(gitlab_ci_builds_queue_limit: true)
- end
+ it 'returns 409 conflict' do
+ expect(Ci::Build.pending.unstarted.count).to eq 3
- it 'returns 409 conflict' do
- expect(Ci::Build.pending.unstarted.count).to eq 3
+ result = described_class.new(specific_runner).execute
- result = described_class.new(specific_runner).execute
-
- expect(result).not_to be_valid
- expect(result.build).to be_nil
- end
- end
-
- context 'when feature is disabled' do
- before do
- stub_feature_flags(gitlab_ci_builds_queue_limit: false)
- end
-
- it 'returns a valid result' do
- expect(Ci::Build.pending.unstarted.count).to eq 3
-
- result = described_class.new(specific_runner).execute
-
- expect(result).to be_valid
- expect(result.build).to eq pending_job_3
- end
+ expect(result).not_to be_valid
+ expect(result.build).to be_nil
end
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 86bda868625..c71bec31984 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Ci::RetryBuildService do
job_artifacts_metadata job_artifacts_trace job_artifacts_junit
job_artifacts_sast job_artifacts_secret_detection job_artifacts_dependency_scanning
job_artifacts_container_scanning job_artifacts_dast
- job_artifacts_license_management job_artifacts_license_scanning
+ job_artifacts_license_scanning
job_artifacts_performance job_artifacts_browser_performance job_artifacts_load_performance
job_artifacts_lsif job_artifacts_terraform job_artifacts_cluster_applications
job_artifacts_codequality job_artifacts_metrics scheduled_at
@@ -59,13 +59,14 @@ RSpec.describe Ci::RetryBuildService do
metadata runner_session trace_chunks upstream_pipeline_id
artifacts_file artifacts_metadata artifacts_size commands
resource resource_group_id processed security_scans author
- pipeline_id report_results pending_state pages_deployments].freeze
+ pipeline_id report_results pending_state pages_deployments
+ queuing_entry runtime_metadata].freeze
shared_examples 'build duplication' do
let_it_be(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
let_it_be(:build) do
- create(:ci_build, :failed, :expired, :erased, :queued, :coverage, :tags,
+ create(:ci_build, :failed, :picked, :expired, :erased, :queued, :coverage, :tags,
:allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group,
description: 'my-job', stage: 'test', stage_id: stage.id,
pipeline: pipeline, auto_canceled_by: another_pipeline,
@@ -73,9 +74,6 @@ RSpec.describe Ci::RetryBuildService do
end
before_all do
- # Test correctly behaviour of deprecated artifact because it can be still in use
- stub_feature_flags(drop_license_management_artifact: false)
-
# Make sure that build has both `stage_id` and `stage` because FactoryBot
# can reset one of the fields when assigning another. We plan to deprecate
# and remove legacy `stage` column in the future.
diff --git a/spec/services/ci/update_build_queue_service_spec.rb b/spec/services/ci/update_build_queue_service_spec.rb
index 2d9f80a249d..44d7809b85f 100644
--- a/spec/services/ci/update_build_queue_service_spec.rb
+++ b/spec/services/ci/update_build_queue_service_spec.rb
@@ -4,154 +4,344 @@ require 'spec_helper'
RSpec.describe Ci::UpdateBuildQueueService do
let(:project) { create(:project, :repository) }
- let(:build) { create(:ci_build, pipeline: pipeline) }
let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
+ describe 'pending builds queue push / pop' do
+ describe '#push' do
+ let(:transition) { double('transition') }
+
+ before do
+ allow(transition).to receive(:to).and_return('pending')
+ allow(transition).to receive(:within_transaction).and_yield
+ end
+
+ context 'when pending build can be created' do
+ it 'creates a new pending build in transaction' do
+ queued = subject.push(build, transition)
+
+ expect(queued).to eq build.id
+ end
+
+ it 'increments queue push metric' do
+ metrics = spy('metrics')
+
+ described_class.new(metrics).push(build, transition)
+
+ expect(metrics)
+ .to have_received(:increment_queue_operation)
+ .with(:build_queue_push)
+ end
+ end
- shared_examples 'refreshes runner' do
- it 'ticks runner queue value' do
- expect { subject.execute(build) }.to change { runner.ensure_runner_queue_value }
+ context 'when invalid transition is detected' do
+ it 'raises an error' do
+ allow(transition).to receive(:to).and_return('created')
+
+ expect { subject.push(build, transition) }
+ .to raise_error(described_class::InvalidQueueTransition)
+ end
+ end
+
+ context 'when duplicate entry exists' do
+ before do
+ ::Ci::PendingBuild.create!(build: build, project: project)
+ end
+
+ it 'does nothing and returns build id' do
+ queued = subject.push(build, transition)
+
+ expect(queued).to eq build.id
+ end
+ end
end
- end
- shared_examples 'does not refresh runner' do
- it 'ticks runner queue value' do
- expect { subject.execute(build) }.not_to change { runner.ensure_runner_queue_value }
+ describe '#pop' do
+ let(:transition) { double('transition') }
+
+ before do
+ allow(transition).to receive(:from).and_return('pending')
+ allow(transition).to receive(:within_transaction).and_yield
+ end
+
+ context 'when pending build exists' do
+ before do
+ Ci::PendingBuild.create!(build: build, project: project)
+ end
+
+ it 'removes pending build in a transaction' do
+ dequeued = subject.pop(build, transition)
+
+ expect(dequeued).to eq build.id
+ end
+
+ it 'increments queue pop metric' do
+ metrics = spy('metrics')
+
+ described_class.new(metrics).pop(build, transition)
+
+ expect(metrics)
+ .to have_received(:increment_queue_operation)
+ .with(:build_queue_pop)
+ end
+ end
+
+ context 'when pending build does not exist' do
+ it 'does nothing if there is no pending build to remove' do
+ dequeued = subject.pop(build, transition)
+
+ expect(dequeued).to be_nil
+ end
+ end
+
+ context 'when invalid transition is detected' do
+ it 'raises an error' do
+ allow(transition).to receive(:from).and_return('created')
+
+ expect { subject.pop(build, transition) }
+ .to raise_error(described_class::InvalidQueueTransition)
+ end
+ end
end
end
- shared_examples 'matching build' do
- context 'when there is a online runner that can pick build' do
+ describe 'shared runner builds tracking' do
+ let(:runner) { create(:ci_runner, :instance_type) }
+ let(:build) { create(:ci_build, runner: runner, pipeline: pipeline) }
+
+ describe '#track' do
+ let(:transition) { double('transition') }
+
before do
- runner.update!(contacted_at: 30.minutes.ago)
+ allow(transition).to receive(:to).and_return('running')
+ allow(transition).to receive(:within_transaction).and_yield
end
- it_behaves_like 'refreshes runner'
+ context 'when a shared runner build can be tracked' do
+ it 'creates a new shared runner build tracking entry' do
+ build_id = subject.track(build, transition)
+
+ expect(build_id).to eq build.id
+ end
+
+ it 'increments new shared runner build metric' do
+ metrics = spy('metrics')
- it 'avoids running redundant queries' do
- expect(Ci::Runner).not_to receive(:owned_or_instance_wide)
+ described_class.new(metrics).track(build, transition)
- subject.execute(build)
+ expect(metrics)
+ .to have_received(:increment_queue_operation)
+ .with(:shared_runner_build_new)
+ end
end
- context 'when feature flag ci_reduce_queries_when_ticking_runner_queue is disabled' do
+ context 'when invalid transition is detected' do
+ it 'raises an error' do
+ allow(transition).to receive(:to).and_return('pending')
+
+ expect { subject.track(build, transition) }
+ .to raise_error(described_class::InvalidQueueTransition)
+ end
+ end
+
+ context 'when duplicate entry exists' do
before do
- stub_feature_flags(ci_reduce_queries_when_ticking_runner_queue: false)
- stub_feature_flags(ci_runners_short_circuit_assignable_for: false)
+ ::Ci::RunningBuild.create!(
+ build: build, project: project, runner: runner, runner_type: runner.runner_type
+ )
end
- it 'runs redundant queries using `owned_or_instance_wide` scope' do
- expect(Ci::Runner).to receive(:owned_or_instance_wide).and_call_original
+ it 'does nothing and returns build id' do
+ build_id = subject.track(build, transition)
- subject.execute(build)
+ expect(build_id).to eq build.id
end
end
end
- end
- shared_examples 'mismatching tags' do
- context 'when there is no runner that can pick build due to tag mismatch' do
+ describe '#untrack' do
+ let(:transition) { double('transition') }
+
before do
- build.tag_list = [:docker]
+ allow(transition).to receive(:from).and_return('running')
+ allow(transition).to receive(:within_transaction).and_yield
end
- it_behaves_like 'does not refresh runner'
+ context 'when shared runner build tracking entry exists' do
+ before do
+ Ci::RunningBuild.create!(
+ build: build, project: project, runner: runner, runner_type: runner.runner_type
+ )
+ end
+
+ it 'removes shared runner build' do
+ build_id = subject.untrack(build, transition)
+
+ expect(build_id).to eq build.id
+ end
+
+ it 'increments shared runner build done metric' do
+ metrics = spy('metrics')
+
+ described_class.new(metrics).untrack(build, transition)
+
+ expect(metrics)
+ .to have_received(:increment_queue_operation)
+ .with(:shared_runner_build_done)
+ end
+ end
+
+ context 'when tracking entry does not exist' do
+ it 'does nothing if there is no tracking entry to remove' do
+ build_id = subject.untrack(build, transition)
+
+ expect(build_id).to be_nil
+ end
+ end
+
+ context 'when invalid transition is detected' do
+ it 'raises an error' do
+ allow(transition).to receive(:from).and_return('pending')
+
+ expect { subject.untrack(build, transition) }
+ .to raise_error(described_class::InvalidQueueTransition)
+ end
+ end
end
end
- shared_examples 'recent runner queue' do
- context 'when there is runner with expired cache' do
- before do
- runner.update!(contacted_at: Ci::Runner.recent_queue_deadline)
+ describe '#tick' do
+ shared_examples 'refreshes runner' do
+ it 'ticks runner queue value' do
+ expect { subject.tick(build) }.to change { runner.ensure_runner_queue_value }
end
+ end
- it_behaves_like 'does not refresh runner'
+ shared_examples 'does not refresh runner' do
+ it 'ticks runner queue value' do
+ expect { subject.tick(build) }.not_to change { runner.ensure_runner_queue_value }
+ end
end
- end
- context 'when updating specific runners' do
- let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ shared_examples 'matching build' do
+ context 'when there is a online runner that can pick build' do
+ before do
+ runner.update!(contacted_at: 30.minutes.ago)
+ end
- it_behaves_like 'matching build'
- it_behaves_like 'mismatching tags'
- it_behaves_like 'recent runner queue'
+ it_behaves_like 'refreshes runner'
- context 'when the runner is assigned to another project' do
- let(:another_project) { create(:project) }
- let(:runner) { create(:ci_runner, :project, projects: [another_project]) }
+ it 'avoids running redundant queries' do
+ expect(Ci::Runner).not_to receive(:owned_or_instance_wide)
- it_behaves_like 'does not refresh runner'
+ subject.tick(build)
+ end
+ end
end
- end
- context 'when updating shared runners' do
- let(:runner) { create(:ci_runner, :instance) }
-
- it_behaves_like 'matching build'
- it_behaves_like 'mismatching tags'
- it_behaves_like 'recent runner queue'
+ shared_examples 'mismatching tags' do
+ context 'when there is no runner that can pick build due to tag mismatch' do
+ before do
+ build.tag_list = [:docker]
+ end
- context 'when there is no runner that can pick build due to being disabled on project' do
- before do
- build.project.shared_runners_enabled = false
+ it_behaves_like 'does not refresh runner'
end
+ end
- it_behaves_like 'does not refresh runner'
+ shared_examples 'recent runner queue' do
+ context 'when there is runner with expired cache' do
+ before do
+ runner.update!(contacted_at: Ci::Runner.recent_queue_deadline)
+ end
+
+ it_behaves_like 'does not refresh runner'
+ end
end
- end
- context 'when updating group runners' do
- let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
- let(:runner) { create(:ci_runner, :group, groups: [group]) }
+ context 'when updating specific runners' do
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
- it_behaves_like 'matching build'
- it_behaves_like 'mismatching tags'
- it_behaves_like 'recent runner queue'
+ it_behaves_like 'matching build'
+ it_behaves_like 'mismatching tags'
+ it_behaves_like 'recent runner queue'
- context 'when there is no runner that can pick build due to being disabled on project' do
- before do
- build.project.group_runners_enabled = false
- end
+ context 'when the runner is assigned to another project' do
+ let(:another_project) { create(:project) }
+ let(:runner) { create(:ci_runner, :project, projects: [another_project]) }
- it_behaves_like 'does not refresh runner'
+ it_behaves_like 'does not refresh runner'
+ end
end
- end
- context 'avoids N+1 queries', :request_store do
- let!(:build) { create(:ci_build, pipeline: pipeline, tag_list: %w[a b]) }
- let!(:project_runner) { create(:ci_runner, :project, :online, projects: [project], tag_list: %w[a b c]) }
+ context 'when updating shared runners' do
+ let(:runner) { create(:ci_runner, :instance) }
- context 'when ci_preload_runner_tags and ci_reduce_queries_when_ticking_runner_queue are enabled' do
- before do
- stub_feature_flags(
- ci_reduce_queries_when_ticking_runner_queue: true,
- ci_preload_runner_tags: true
- )
+ it_behaves_like 'matching build'
+ it_behaves_like 'mismatching tags'
+ it_behaves_like 'recent runner queue'
+
+ context 'when there is no runner that can pick build due to being disabled on project' do
+ before do
+ build.project.shared_runners_enabled = false
+ end
+
+ it_behaves_like 'does not refresh runner'
end
+ end
- it 'does execute the same amount of queries regardless of number of runners' do
- control_count = ActiveRecord::QueryRecorder.new { subject.execute(build) }.count
+ context 'when updating group runners' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+ let(:runner) { create(:ci_runner, :group, groups: [group]) }
- create_list(:ci_runner, 10, :project, :online, projects: [project], tag_list: %w[b c d])
+ it_behaves_like 'matching build'
+ it_behaves_like 'mismatching tags'
+ it_behaves_like 'recent runner queue'
- expect { subject.execute(build) }.not_to exceed_all_query_limit(control_count)
+ context 'when there is no runner that can pick build due to being disabled on project' do
+ before do
+ build.project.group_runners_enabled = false
+ end
+
+ it_behaves_like 'does not refresh runner'
end
end
- context 'when ci_preload_runner_tags and ci_reduce_queries_when_ticking_runner_queue are disabled' do
- before do
- stub_feature_flags(
- ci_reduce_queries_when_ticking_runner_queue: false,
- ci_preload_runner_tags: false
- )
+ context 'avoids N+1 queries', :request_store do
+ let!(:build) { create(:ci_build, pipeline: pipeline, tag_list: %w[a b]) }
+ let!(:project_runner) { create(:ci_runner, :project, :online, projects: [project], tag_list: %w[a b c]) }
+
+ context 'when ci_preload_runner_tags is enabled' do
+ before do
+ stub_feature_flags(
+ ci_preload_runner_tags: true
+ )
+ end
+
+ it 'does execute the same amount of queries regardless of number of runners' do
+ control_count = ActiveRecord::QueryRecorder.new { subject.tick(build) }.count
+
+ create_list(:ci_runner, 10, :project, :online, projects: [project], tag_list: %w[b c d])
+
+ expect { subject.tick(build) }.not_to exceed_all_query_limit(control_count)
+ end
end
- it 'does execute more queries for more runners' do
- control_count = ActiveRecord::QueryRecorder.new { subject.execute(build) }.count
+ context 'when ci_preload_runner_tags are disabled' do
+ before do
+ stub_feature_flags(
+ ci_preload_runner_tags: false
+ )
+ end
+
+ it 'does execute more queries for more runners' do
+ control_count = ActiveRecord::QueryRecorder.new { subject.tick(build) }.count
- create_list(:ci_runner, 10, :project, :online, projects: [project], tag_list: %w[b c d])
+ create_list(:ci_runner, 10, :project, :online, projects: [project], tag_list: %w[b c d])
- expect { subject.execute(build) }.to exceed_all_query_limit(control_count)
+ expect { subject.tick(build) }.to exceed_all_query_limit(control_count)
+ end
end
end
end
diff --git a/spec/services/ci/update_build_state_service_spec.rb b/spec/services/ci/update_build_state_service_spec.rb
index 63190cc5d49..5bb3843da8f 100644
--- a/spec/services/ci/update_build_state_service_spec.rb
+++ b/spec/services/ci/update_build_state_service_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe Ci::UpdateBuildStateService do
- let(:project) { create(:project) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
let(:metrics) { spy('metrics') }
@@ -14,6 +15,24 @@ RSpec.describe Ci::UpdateBuildStateService do
stub_feature_flags(ci_enable_live_trace: true)
end
+ context 'when build has unknown failure reason' do
+ let(:params) do
+ {
+ output: { checksum: 'crc32:12345678', bytesize: 123 },
+ state: 'failed',
+ failure_reason: 'no idea here',
+ exit_code: 42
+ }
+ end
+
+ it 'updates a build status' do
+ result = subject.execute
+
+ expect(build).to be_failed
+ expect(result.status).to eq 200
+ end
+ end
+
context 'when build does not have checksum' do
context 'when state has changed' do
let(:params) { { state: 'success' } }
@@ -47,25 +66,6 @@ RSpec.describe Ci::UpdateBuildStateService do
end
end
- context 'when request payload carries a trace' do
- let(:params) { { state: 'success', trace: 'overwritten' } }
-
- it 'overwrites a trace' do
- result = subject.execute
-
- expect(build.trace.raw).to eq 'overwritten'
- expect(result.status).to eq 200
- end
-
- it 'updates overwrite operation metric' do
- execute_with_stubbed_metrics!
-
- expect(metrics)
- .to have_received(:increment_trace_operation)
- .with(operation: :overwrite)
- end
- end
-
context 'when state is unknown' do
let(:params) { { state: 'unknown' } }
diff --git a/spec/services/clusters/applications/create_service_spec.rb b/spec/services/clusters/applications/create_service_spec.rb
index f3b420510a6..eb907377ca8 100644
--- a/spec/services/clusters/applications/create_service_spec.rb
+++ b/spec/services/clusters/applications/create_service_spec.rb
@@ -46,8 +46,7 @@ RSpec.describe Clusters::Applications::CreateService do
context 'ingress application' do
let(:params) do
{
- application: 'ingress',
- modsecurity_enabled: true
+ application: 'ingress'
}
end
@@ -64,10 +63,6 @@ RSpec.describe Clusters::Applications::CreateService do
cluster.reload
end.to change(cluster, :application_ingress)
end
-
- it 'sets modsecurity_enabled' do
- expect(subject.modsecurity_enabled).to eq(true)
- end
end
context 'cert manager application' do
diff --git a/spec/services/clusters/cleanup/app_service_spec.rb b/spec/services/clusters/cleanup/app_service_spec.rb
deleted file mode 100644
index ea1194d2100..00000000000
--- a/spec/services/clusters/cleanup/app_service_spec.rb
+++ /dev/null
@@ -1,118 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Cleanup::AppService do
- describe '#execute' do
- let!(:cluster) { create(:cluster, :project, :cleanup_uninstalling_applications, provider_type: :gcp) }
- let(:service) { described_class.new(cluster) }
- let(:logger) { service.send(:logger) }
- let(:log_meta) do
- {
- service: described_class.name,
- cluster_id: cluster.id,
- execution_count: 0
- }
- end
-
- subject { service.execute }
-
- shared_examples 'does not reschedule itself' do
- it 'does not reschedule itself' do
- expect(Clusters::Cleanup::AppWorker).not_to receive(:perform_in)
- end
- end
-
- context 'when cluster has no applications available or transitioning applications' do
- it_behaves_like 'does not reschedule itself'
-
- it 'transitions cluster to cleanup_removing_project_namespaces' do
- expect { subject }
- .to change { cluster.reload.cleanup_status_name }
- .from(:cleanup_uninstalling_applications)
- .to(:cleanup_removing_project_namespaces)
- end
-
- it 'schedules Clusters::Cleanup::ProjectNamespaceWorker' do
- expect(Clusters::Cleanup::ProjectNamespaceWorker).to receive(:perform_async).with(cluster.id)
- subject
- end
-
- it 'logs all events' do
- expect(logger).to receive(:info)
- .with(log_meta.merge(event: :schedule_remove_project_namespaces))
-
- subject
- end
- end
-
- context 'when cluster has uninstallable applications' do
- shared_examples 'reschedules itself' do
- it 'reschedules itself' do
- expect(Clusters::Cleanup::AppWorker)
- .to receive(:perform_in)
- .with(1.minute, cluster.id, 1)
-
- subject
- end
- end
-
- context 'has applications with dependencies' do
- let!(:helm) { create(:clusters_applications_helm, :installed, cluster: cluster) }
- let!(:ingress) { create(:clusters_applications_ingress, :installed, cluster: cluster) }
- let!(:cert_manager) { create(:clusters_applications_cert_manager, :installed, cluster: cluster) }
- let!(:jupyter) { create(:clusters_applications_jupyter, :installed, cluster: cluster) }
-
- it_behaves_like 'reschedules itself'
-
- it 'only uninstalls apps that are not dependencies for other installed apps' do
- expect(Clusters::Applications::UninstallWorker)
- .to receive(:perform_async).with(helm.name, helm.id)
- .and_call_original
-
- expect(Clusters::Applications::UninstallWorker)
- .not_to receive(:perform_async).with(ingress.name, ingress.id)
-
- expect(Clusters::Applications::UninstallWorker)
- .to receive(:perform_async).with(cert_manager.name, cert_manager.id)
- .and_call_original
-
- expect(Clusters::Applications::UninstallWorker)
- .to receive(:perform_async).with(jupyter.name, jupyter.id)
- .and_call_original
-
- subject
- end
-
- it 'logs application uninstalls and next execution' do
- expect(logger).to receive(:info)
- .with(log_meta.merge(event: :uninstalling_app, application: kind_of(String))).exactly(3).times
- expect(logger).to receive(:info)
- .with(log_meta.merge(event: :scheduling_execution, next_execution: 1))
-
- subject
- end
-
- context 'cluster is not cleanup_uninstalling_applications' do
- let!(:cluster) { create(:cluster, :project, provider_type: :gcp) }
-
- it_behaves_like 'does not reschedule itself'
- end
- end
-
- context 'when applications are still uninstalling/scheduled/depending on others' do
- let!(:helm) { create(:clusters_applications_helm, :installed, cluster: cluster) }
- let!(:ingress) { create(:clusters_applications_ingress, :scheduled, cluster: cluster) }
- let!(:runner) { create(:clusters_applications_runner, :uninstalling, cluster: cluster) }
-
- it_behaves_like 'reschedules itself'
-
- it 'does not call the uninstallation service' do
- expect(Clusters::Applications::UninstallWorker).not_to receive(:new)
-
- subject
- end
- end
- end
- end
-end
diff --git a/spec/services/clusters/destroy_service_spec.rb b/spec/services/clusters/destroy_service_spec.rb
index 76d9cc34b5d..dc600c9e830 100644
--- a/spec/services/clusters/destroy_service_spec.rb
+++ b/spec/services/clusters/destroy_service_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Clusters::DestroyService do
let(:params) { { cleanup: 'true' } }
before do
- allow(Clusters::Cleanup::AppWorker).to receive(:perform_async)
+ allow(Clusters::Cleanup::ProjectNamespaceWorker).to receive(:perform_async)
end
it 'does not destroy cluster' do
@@ -45,10 +45,10 @@ RSpec.describe Clusters::DestroyService do
expect(Clusters::Cluster.where(id: cluster.id).exists?).not_to be_falsey
end
- it 'transition cluster#cleanup_status from cleanup_not_started to cleanup_uninstalling_applications' do
+ it 'transition cluster#cleanup_status from cleanup_not_started to cleanup_removing_project_namespaces' do
expect { subject }.to change { cluster.cleanup_status_name }
.from(:cleanup_not_started)
- .to(:cleanup_uninstalling_applications)
+ .to(:cleanup_removing_project_namespaces)
end
end
end
diff --git a/spec/services/clusters/gcp/finalize_creation_service_spec.rb b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
index d8c95a70bd0..9c553d0eec2 100644
--- a/spec/services/clusters/gcp/finalize_creation_service_spec.rb
+++ b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
@@ -11,8 +11,6 @@ RSpec.describe Clusters::Gcp::FinalizeCreationService, '#execute' do
let(:platform) { cluster.platform }
let(:endpoint) { '111.111.111.111' }
let(:api_url) { 'https://' + endpoint }
- let(:username) { 'sample-username' }
- let(:password) { 'sample-password' }
let(:secret_name) { 'gitlab-token' }
let(:token) { 'sample-token' }
let(:namespace) { "#{cluster.project.path}-#{cluster.project.id}" }
@@ -34,8 +32,6 @@ RSpec.describe Clusters::Gcp::FinalizeCreationService, '#execute' do
expect(provider.endpoint).to eq(endpoint)
expect(platform.api_url).to eq(api_url)
expect(platform.ca_cert).to eq(Base64.decode64(load_sample_cert).strip)
- expect(platform.username).to eq(username)
- expect(platform.password).to eq(password)
expect(platform.token).to eq(token)
end
end
@@ -83,7 +79,7 @@ RSpec.describe Clusters::Gcp::FinalizeCreationService, '#execute' do
shared_context 'kubernetes information successfully fetched' do
before do
stub_cloud_platform_get_zone_cluster(
- provider.gcp_project_id, provider.zone, cluster.name, { endpoint: endpoint, username: username, password: password }
+ provider.gcp_project_id, provider.zone, cluster.name, { endpoint: endpoint }
)
stub_kubeclient_discover(api_url)
diff --git a/spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb b/spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb
deleted file mode 100644
index 1f6ad218927..00000000000
--- a/spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb
+++ /dev/null
@@ -1,126 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::ParseClusterApplicationsArtifactService do
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
-
- before do
- project.add_maintainer(user)
- end
-
- describe 'RELEASE_NAMES' do
- it 'is included in Cluster application names', :aggregate_failures do
- described_class::RELEASE_NAMES.each do |release_name|
- expect(Clusters::Cluster::APPLICATIONS).to include(release_name)
- end
- end
- end
-
- describe '.new' do
- let(:job) { build(:ci_build) }
-
- it 'sets the project and current user', :aggregate_failures do
- service = described_class.new(job, user)
-
- expect(service.project).to eq(job.project)
- expect(service.current_user).to eq(user)
- end
- end
-
- describe '#execute' do
- let_it_be(:cluster, reload: true) { create(:cluster, projects: [project]) }
- let_it_be(:deployment, reload: true) { create(:deployment, cluster: cluster) }
-
- let(:job) { deployment.deployable }
- let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job) }
-
- it 'calls Gitlab::Kubernetes::Helm::Parsers::ListV2' do
- expect(Gitlab::Kubernetes::Helm::Parsers::ListV2).to receive(:new).and_call_original
-
- result = described_class.new(job, user).execute(artifact)
-
- expect(result[:status]).to eq(:success)
- end
-
- context 'artifact is not of cluster_applications type' do
- let(:artifact) { create(:ci_job_artifact, :archive) }
- let(:job) { artifact.job }
-
- it 'raise ArgumentError' do
- expect do
- described_class.new(job, user).execute(artifact)
- end.to raise_error(ArgumentError, 'Artifact is not cluster_applications file type')
- end
- end
-
- context 'artifact exceeds acceptable size' do
- it 'returns an error' do
- stub_const("#{described_class}::MAX_ACCEPTABLE_ARTIFACT_SIZE", 1.byte)
-
- result = described_class.new(job, user).execute(artifact)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Cluster_applications artifact too big. Maximum allowable size: 1 Byte')
- end
- end
-
- context 'job has no deployment' do
- let(:job) { build(:ci_build) }
-
- it 'returns an error' do
- result = described_class.new(job, user).execute(artifact)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('No deployment found for this job')
- end
- end
-
- context 'job has no deployment cluster' do
- let(:deployment) { create(:deployment) }
- let(:job) { deployment.deployable }
-
- it 'returns an error' do
- result = described_class.new(job, user).execute(artifact)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('No deployment cluster found for this job')
- end
- end
-
- context 'blob is empty' do
- let(:file) { fixture_file_upload(Rails.root.join("spec/fixtures/helm/helm_list_v2_empty_blob.json.gz")) }
- let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job, file: file) }
-
- it 'returns success' do
- result = described_class.new(job, user).execute(artifact)
-
- expect(result[:status]).to eq(:success)
- end
- end
-
- context 'job has deployment cluster' do
- context 'current user does not have access to deployment cluster' do
- let(:other_user) { create(:user) }
-
- it 'returns an error' do
- result = described_class.new(job, other_user).execute(artifact)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('No deployment cluster found for this job')
- end
- end
-
- it 'does not affect unpermitted cluster applications' do
- expect(Clusters::ParseClusterApplicationsArtifactService::RELEASE_NAMES).to contain_exactly('cilium')
- end
-
- Clusters::ParseClusterApplicationsArtifactService::RELEASE_NAMES.each do |release_name|
- context release_name do
- include_examples 'parse cluster applications artifact', release_name
- end
- end
- end
- end
-end
diff --git a/spec/services/commits/cherry_pick_service_spec.rb b/spec/services/commits/cherry_pick_service_spec.rb
index 8fad5164b77..2565e17ac90 100644
--- a/spec/services/commits/cherry_pick_service_spec.rb
+++ b/spec/services/commits/cherry_pick_service_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Commits::CherryPickService do
repository.add_branch(user, branch_name, merge_base_sha)
end
- def cherry_pick(sha, branch_name)
+ def cherry_pick(sha, branch_name, message: nil)
commit = project.commit(sha)
described_class.new(
@@ -32,7 +32,8 @@ RSpec.describe Commits::CherryPickService do
user,
commit: commit,
start_branch: branch_name,
- branch_name: branch_name
+ branch_name: branch_name,
+ message: message
).execute
end
@@ -45,6 +46,14 @@ RSpec.describe Commits::CherryPickService do
head = repository.find_branch(branch_name).target
expect(head).not_to eq(merge_base_sha)
end
+
+ it 'supports a custom commit message' do
+ result = cherry_pick(merge_commit_sha, branch_name, message: 'foo')
+ branch = repository.find_branch(branch_name)
+
+ expect(result[:status]).to eq(:success)
+ expect(branch.dereferenced_target.message).to eq('foo')
+ end
end
it_behaves_like 'successful cherry-pick'
diff --git a/spec/services/container_expiration_policies/cleanup_service_spec.rb b/spec/services/container_expiration_policies/cleanup_service_spec.rb
index c6faae7449d..5f284b9dd8b 100644
--- a/spec/services/container_expiration_policies/cleanup_service_spec.rb
+++ b/spec/services/container_expiration_policies/cleanup_service_spec.rb
@@ -9,37 +9,68 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
let(:service) { described_class.new(repository) }
describe '#execute' do
+ let(:policy) { repository.project.container_expiration_policy }
+
subject { service.execute }
- shared_examples 'cleaning up a container repository' do
- context 'with a successful cleanup tags service execution' do
- let(:cleanup_tags_service_params) { project.container_expiration_policy.policy_params.merge('container_expiration_policy' => true) }
- let(:cleanup_tags_service) { instance_double(Projects::ContainerRepository::CleanupTagsService) }
+ before do
+ policy.update!(enabled: true)
+ policy.update_column(:next_run_at, 5.minutes.ago)
+ end
- it 'completely clean up the repository' do
- expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).with(project, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
- expect(cleanup_tags_service).to receive(:execute).with(repository).and_return(status: :success)
+ context 'with a successful cleanup tags service execution' do
+ let(:cleanup_tags_service_params) { project.container_expiration_policy.policy_params.merge('container_expiration_policy' => true) }
+ let(:cleanup_tags_service) { instance_double(Projects::ContainerRepository::CleanupTagsService) }
- response = subject
+ it 'completely clean up the repository' do
+ expect(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:new).with(project, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
+ expect(cleanup_tags_service).to receive(:execute).with(repository).and_return(status: :success)
- aggregate_failures "checking the response and container repositories" do
- expect(response.success?).to eq(true)
- expect(response.payload).to include(cleanup_status: :finished, container_repository_id: repository.id)
- expect(ContainerRepository.waiting_for_cleanup.count).to eq(0)
- expect(repository.reload.cleanup_unscheduled?).to be_truthy
- expect(repository.expiration_policy_completed_at).not_to eq(nil)
- expect(repository.expiration_policy_started_at).not_to eq(nil)
- end
+ response = subject
+
+ aggregate_failures "checking the response and container repositories" do
+ expect(response.success?).to eq(true)
+ expect(response.payload).to include(cleanup_status: :finished, container_repository_id: repository.id)
+ expect(ContainerRepository.waiting_for_cleanup.count).to eq(0)
+ expect(repository.reload.cleanup_unscheduled?).to be_truthy
+ expect(repository.expiration_policy_completed_at).not_to eq(nil)
+ expect(repository.expiration_policy_started_at).not_to eq(nil)
end
end
+ end
- context 'without a successful cleanup tags service execution' do
- let(:cleanup_tags_service_response) { { status: :error, message: 'timeout' } }
+ context 'without a successful cleanup tags service execution' do
+ let(:cleanup_tags_service_response) { { status: :error, message: 'timeout' } }
- before do
- expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).and_return(double(execute: cleanup_tags_service_response))
+ before do
+ expect(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:new).and_return(double(execute: cleanup_tags_service_response))
+ end
+
+ it 'partially clean up the repository' do
+ response = subject
+
+ aggregate_failures "checking the response and container repositories" do
+ expect(response.success?).to eq(true)
+ expect(response.payload).to include(cleanup_status: :unfinished, container_repository_id: repository.id)
+ expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
+ expect(repository.reload.cleanup_unfinished?).to be_truthy
+ expect(repository.expiration_policy_started_at).not_to eq(nil)
+ expect(repository.expiration_policy_completed_at).to eq(nil)
+ end
+ end
+
+ context 'with a truncated cleanup tags service response' do
+ let(:cleanup_tags_service_response) do
+ {
+ status: :error,
+ original_size: 1000,
+ before_truncate_size: 800,
+ after_truncate_size: 200,
+ before_delete_size: 100,
+ deleted_size: 100
+ }
end
it 'partially clean up the repository' do
@@ -47,179 +78,134 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
aggregate_failures "checking the response and container repositories" do
expect(response.success?).to eq(true)
- expect(response.payload).to include(cleanup_status: :unfinished, container_repository_id: repository.id)
+ expect(response.payload)
+ .to include(
+ cleanup_status: :unfinished,
+ container_repository_id: repository.id,
+ cleanup_tags_service_original_size: 1000,
+ cleanup_tags_service_before_truncate_size: 800,
+ cleanup_tags_service_after_truncate_size: 200,
+ cleanup_tags_service_before_delete_size: 100,
+ cleanup_tags_service_deleted_size: 100
+ )
expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
expect(repository.reload.cleanup_unfinished?).to be_truthy
expect(repository.expiration_policy_started_at).not_to eq(nil)
expect(repository.expiration_policy_completed_at).to eq(nil)
end
end
-
- context 'with a truncated cleanup tags service response' do
- let(:cleanup_tags_service_response) do
- {
- status: :error,
- original_size: 1000,
- before_truncate_size: 800,
- after_truncate_size: 200,
- before_delete_size: 100,
- deleted_size: 100
- }
- end
-
- it 'partially clean up the repository' do
- response = subject
-
- aggregate_failures "checking the response and container repositories" do
- expect(response.success?).to eq(true)
- expect(response.payload)
- .to include(
- cleanup_status: :unfinished,
- container_repository_id: repository.id,
- cleanup_tags_service_original_size: 1000,
- cleanup_tags_service_before_truncate_size: 800,
- cleanup_tags_service_after_truncate_size: 200,
- cleanup_tags_service_before_delete_size: 100,
- cleanup_tags_service_deleted_size: 100
- )
- expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
- expect(repository.reload.cleanup_unfinished?).to be_truthy
- expect(repository.expiration_policy_started_at).not_to eq(nil)
- expect(repository.expiration_policy_completed_at).to eq(nil)
- end
- end
- end
end
+ end
- context 'with no repository' do
- let(:service) { described_class.new(nil) }
+ context 'with no repository' do
+ let(:service) { described_class.new(nil) }
- it 'returns an error response' do
- expect(subject.success?).to eq(false)
- expect(subject.message).to eq('no repository')
- end
+ it 'returns an error response' do
+ expect(subject.success?).to eq(false)
+ expect(subject.message).to eq('no repository')
end
+ end
- context 'with an invalid policy' do
- let(:policy) { repository.project.container_expiration_policy }
+ context 'with an invalid policy' do
+ let(:policy) { repository.project.container_expiration_policy }
- before do
- policy.name_regex = nil
- policy.enabled = true
- repository.expiration_policy_cleanup_status = :cleanup_ongoing
- end
+ before do
+ policy.name_regex = nil
+ policy.enabled = true
+ repository.expiration_policy_cleanup_status = :cleanup_ongoing
+ end
- it 'returns an error response' do
- expect { subject }.to change { repository.expiration_policy_cleanup_status }.from('cleanup_ongoing').to('cleanup_unscheduled')
- expect(subject.success?).to eq(false)
- expect(subject.message).to eq('invalid policy')
- expect(policy).not_to be_enabled
- end
+ it 'returns an error response' do
+ expect { subject }.to change { repository.expiration_policy_cleanup_status }.from('cleanup_ongoing').to('cleanup_unscheduled')
+ expect(subject.success?).to eq(false)
+ expect(subject.message).to eq('invalid policy')
+ expect(policy).not_to be_enabled
end
+ end
- context 'with a network error' do
- before do
- expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).and_raise(Faraday::TimeoutError)
- end
+ context 'with a network error' do
+ before do
+ expect(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:new).and_raise(Faraday::TimeoutError)
+ end
- it 'raises an error' do
- expect { subject }.to raise_error(Faraday::TimeoutError)
+ it 'raises an error' do
+ expect { subject }.to raise_error(Faraday::TimeoutError)
- expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
- expect(repository.reload.cleanup_unfinished?).to be_truthy
- expect(repository.expiration_policy_started_at).not_to eq(nil)
- expect(repository.expiration_policy_completed_at).to eq(nil)
- end
+ expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
+ expect(repository.reload.cleanup_unfinished?).to be_truthy
+ expect(repository.expiration_policy_started_at).not_to eq(nil)
+ expect(repository.expiration_policy_completed_at).to eq(nil)
end
end
- context 'with loopless enabled' do
- let(:policy) { repository.project.container_expiration_policy }
+ context 'next run scheduling' do
+ let_it_be_with_reload(:repository2) { create(:container_repository, project: project) }
+ let_it_be_with_reload(:repository3) { create(:container_repository, project: project) }
before do
- policy.update!(enabled: true)
- policy.update_column(:next_run_at, 5.minutes.ago)
+ cleanup_tags_service = instance_double(Projects::ContainerRepository::CleanupTagsService)
+ allow(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:new).and_return(cleanup_tags_service)
+ allow(cleanup_tags_service).to receive(:execute).and_return(status: :success)
end
- it_behaves_like 'cleaning up a container repository'
-
- context 'next run scheduling' do
- let_it_be_with_reload(:repository2) { create(:container_repository, project: project) }
- let_it_be_with_reload(:repository3) { create(:container_repository, project: project) }
+ shared_examples 'not scheduling the next run' do
+ it 'does not scheduled the next run' do
+ expect(policy).not_to receive(:schedule_next_run!)
- before do
- cleanup_tags_service = instance_double(Projects::ContainerRepository::CleanupTagsService)
- allow(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).and_return(cleanup_tags_service)
- allow(cleanup_tags_service).to receive(:execute).and_return(status: :success)
- end
-
- shared_examples 'not scheduling the next run' do
- it 'does not scheduled the next run' do
- expect(policy).not_to receive(:schedule_next_run!)
-
- expect { subject }.not_to change { policy.reload.next_run_at }
- end
+ expect { subject }.not_to change { policy.reload.next_run_at }
end
+ end
- shared_examples 'scheduling the next run' do
- it 'schedules the next run' do
- expect(policy).to receive(:schedule_next_run!).and_call_original
+ shared_examples 'scheduling the next run' do
+ it 'schedules the next run' do
+ expect(policy).to receive(:schedule_next_run!).and_call_original
- expect { subject }.to change { policy.reload.next_run_at }
- end
+ expect { subject }.to change { policy.reload.next_run_at }
end
+ end
- context 'with cleanups started_at before policy next_run_at' do
- before do
- ContainerRepository.update_all(expiration_policy_started_at: 10.minutes.ago)
- end
-
- it_behaves_like 'not scheduling the next run'
+ context 'with cleanups started_at before policy next_run_at' do
+ before do
+ ContainerRepository.update_all(expiration_policy_started_at: 10.minutes.ago)
end
- context 'with cleanups started_at around policy next_run_at' do
- before do
- repository3.update!(expiration_policy_started_at: policy.next_run_at + 10.minutes.ago)
- end
+ it_behaves_like 'not scheduling the next run'
+ end
- it_behaves_like 'not scheduling the next run'
+ context 'with cleanups started_at around policy next_run_at' do
+ before do
+ repository3.update!(expiration_policy_started_at: policy.next_run_at + 10.minutes.ago)
end
- context 'with only the current repository started_at before the policy next_run_at' do
- before do
- repository2.update!(expiration_policy_started_at: policy.next_run_at + 10.minutes)
- repository3.update!(expiration_policy_started_at: policy.next_run_at + 12.minutes)
- end
+ it_behaves_like 'not scheduling the next run'
+ end
- it_behaves_like 'scheduling the next run'
+ context 'with only the current repository started_at before the policy next_run_at' do
+ before do
+ repository2.update!(expiration_policy_started_at: policy.next_run_at + 10.minutes)
+ repository3.update!(expiration_policy_started_at: policy.next_run_at + 12.minutes)
end
- context 'with cleanups started_at after policy next_run_at' do
- before do
- ContainerRepository.update_all(expiration_policy_started_at: policy.next_run_at + 10.minutes)
- end
+ it_behaves_like 'scheduling the next run'
+ end
- it_behaves_like 'scheduling the next run'
+ context 'with cleanups started_at after policy next_run_at' do
+ before do
+ ContainerRepository.update_all(expiration_policy_started_at: policy.next_run_at + 10.minutes)
end
- context 'with a future policy next_run_at' do
- before do
- policy.update_column(:next_run_at, 5.minutes.from_now)
- end
+ it_behaves_like 'scheduling the next run'
+ end
- it_behaves_like 'not scheduling the next run'
+ context 'with a future policy next_run_at' do
+ before do
+ policy.update_column(:next_run_at, 5.minutes.from_now)
end
- end
- end
- context 'with loopless disabled' do
- before do
- stub_feature_flags(container_registry_expiration_policies_loopless: false)
+ it_behaves_like 'not scheduling the next run'
end
-
- it_behaves_like 'cleaning up a container repository'
end
end
end
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index 4d15258a186..6996563fdb8 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -95,6 +95,42 @@ RSpec.describe Deployments::UpdateEnvironmentService do
end
end
+ context 'when external URL is specified and the tier is unset' do
+ let(:options) { { name: 'production', url: external_url } }
+
+ before do
+ environment.update_columns(external_url: external_url, tier: nil)
+ job.update!(environment: 'production')
+ end
+
+ context 'when external URL is valid' do
+ let(:external_url) { 'https://google.com' }
+
+ it 'succeeds to update the tier automatically' do
+ expect { subject.execute }.to change { environment.tier }.from(nil).to('production')
+ end
+ end
+
+ context 'when external URL is invalid' do
+ let(:external_url) { 'google.com' }
+
+ it 'fails to update the tier due to validation error' do
+ expect { subject.execute }.not_to change { environment.tier }
+ end
+
+ it 'tracks an exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(an_instance_of(described_class::EnvironmentUpdateFailure),
+ project_id: project.id,
+ environment_id: environment.id,
+ reason: %q{External url is blocked: Only allowed schemes are http, https})
+ .once
+
+ subject.execute
+ end
+ end
+ end
+
context 'when variables are used' do
let(:options) do
{ name: 'review-apps/$CI_COMMIT_REF_NAME',
diff --git a/spec/services/design_management/copy_design_collection/copy_service_spec.rb b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
index 03242487b53..186d2481c19 100644
--- a/spec/services/design_management/copy_design_collection/copy_service_spec.rb
+++ b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
@@ -195,6 +195,14 @@ RSpec.describe DesignManagement::CopyDesignCollection::CopyService, :clean_gitla
expect { subject }.to change { target_repository.branch_names }.from([]).to(['master'])
end
+ it 'does not create default branch when one exists' do
+ target_repository.create_if_not_exists
+ target_repository.create_file(user, '.meta', '.gitlab', branch_name: 'new-branch', message: 'message')
+
+ expect { subject }.not_to change { target_repository.branch_names }
+ expect(target_repository.branch_names).to eq(['new-branch'])
+ end
+
it 'leaves the design collection in the correct copy state' do
subject
diff --git a/spec/services/discussions/resolve_service_spec.rb b/spec/services/discussions/resolve_service_spec.rb
index 2e30455eb0a..24de1d90526 100644
--- a/spec/services/discussions/resolve_service_spec.rb
+++ b/spec/services/discussions/resolve_service_spec.rb
@@ -121,5 +121,50 @@ RSpec.describe Discussions::ResolveService do
service.execute
end
end
+
+ context 'when resolving a discussion' do
+ def resolve_discussion(discussion, user)
+ described_class.new(project, user, one_or_more_discussions: discussion).execute
+ end
+
+ context 'in a design' do
+ let_it_be(:design) { create(:design, :with_file, issue: create(:issue, project: project)) }
+ let_it_be(:user_1) { create(:user) }
+ let_it_be(:user_2) { create(:user) }
+ let_it_be(:discussion_1) { create(:diff_note_on_design, noteable: design, project: project, author: user_1).to_discussion }
+ let_it_be(:discussion_2) { create(:diff_note_on_design, noteable: design, project: project, author: user_2).to_discussion }
+
+ before do
+ project.add_developer(user_1)
+ project.add_developer(user_2)
+ end
+
+ context 'when user resolving discussion has open todos' do
+ let!(:user_1_todo_for_discussion_1) { create(:todo, :pending, user: user_1, target: design, note: discussion_1.notes.first, project: project) }
+ let!(:user_1_todo_2_for_discussion_1) { create(:todo, :pending, user: user_1, target: design, note: discussion_1.notes.first, project: project) }
+ let!(:user_1_todo_for_discussion_2) { create(:todo, :pending, user: user_1, target: design, note: discussion_2.notes.first, project: project) }
+ let!(:user_2_todo_for_discussion_1) { create(:todo, :pending, user: user_2, target: design, note: discussion_1.notes.first, project: project) }
+
+ it 'marks user todos for given discussion as done' do
+ resolve_discussion(discussion_1, user_1)
+
+ expect(user_1_todo_for_discussion_1.reload).to be_done
+ expect(user_1_todo_2_for_discussion_1.reload).to be_done
+ expect(user_1_todo_for_discussion_2.reload).to be_pending
+ expect(user_2_todo_for_discussion_1.reload).to be_pending
+ end
+ end
+ end
+
+ context 'in a merge request' do
+ let!(:user_todo_for_discussion) { create(:todo, :pending, user: user, target: merge_request, note: discussion.notes.first, project: project) }
+
+ it 'does not mark user todo as done' do
+ resolve_discussion(discussion, user)
+
+ expect(user_todo_for_discussion).to be_pending
+ end
+ end
+ end
end
end
diff --git a/spec/services/feature_flags/disable_service_spec.rb b/spec/services/feature_flags/disable_service_spec.rb
deleted file mode 100644
index 4b2137be35c..00000000000
--- a/spec/services/feature_flags/disable_service_spec.rb
+++ /dev/null
@@ -1,92 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe FeatureFlags::DisableService do
- include FeatureFlagHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
-
- let(:params) { {} }
- let(:service) { described_class.new(project, user, params) }
-
- before_all do
- project.add_developer(user)
- end
-
- describe '#execute' do
- subject { service.execute }
-
- context 'with params to disable default strategy on prd scope' do
- let(:params) do
- {
- name: 'awesome',
- environment_scope: 'prd',
- strategy: { name: 'userWithId', parameters: { 'userIds': 'User:1' } }.deep_stringify_keys
- }
- end
-
- context 'when there is a persisted feature flag' do
- let!(:feature_flag) { create_flag(project, params[:name]) }
-
- context 'when there is a persisted scope' do
- let!(:scope) do
- create_scope(feature_flag, params[:environment_scope], true, strategies)
- end
-
- context 'when there is a persisted strategy' do
- let(:strategies) do
- [
- { name: 'userWithId', parameters: { 'userIds': 'User:1' } }.deep_stringify_keys,
- { name: 'userWithId', parameters: { 'userIds': 'User:2' } }.deep_stringify_keys
- ]
- end
-
- it 'deletes the specified strategy' do
- subject
-
- scope.reload
- expect(scope.strategies.count).to eq(1)
- expect(scope.strategies).not_to include(params[:strategy])
- end
-
- context 'when strategies will be empty' do
- let(:strategies) { [params[:strategy]] }
-
- it 'deletes the persisted scope' do
- subject
-
- expect(feature_flag.scopes.exists?(environment_scope: params[:environment_scope]))
- .to eq(false)
- end
- end
- end
-
- context 'when there is no persisted strategy' do
- let(:strategies) { [{ name: 'default', parameters: {} }] }
-
- it 'returns error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to include('Strategy not found')
- end
- end
- end
-
- context 'when there is no persisted scope' do
- it 'returns error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to include('Feature Flag Scope not found')
- end
- end
- end
-
- context 'when there is no persisted feature flag' do
- it 'returns error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to include('Feature Flag not found')
- end
- end
- end
- end
-end
diff --git a/spec/services/feature_flags/enable_service_spec.rb b/spec/services/feature_flags/enable_service_spec.rb
deleted file mode 100644
index c0008b1933f..00000000000
--- a/spec/services/feature_flags/enable_service_spec.rb
+++ /dev/null
@@ -1,154 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe FeatureFlags::EnableService do
- include FeatureFlagHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
-
- let(:params) { {} }
- let(:service) { described_class.new(project, user, params) }
-
- before_all do
- project.add_developer(user)
- end
-
- describe '#execute' do
- subject { service.execute }
-
- context 'with params to enable default strategy on prd scope' do
- let(:params) do
- {
- name: 'awesome',
- environment_scope: 'prd',
- strategy: { name: 'default', parameters: {} }.stringify_keys
- }
- end
-
- context 'when there is no persisted feature flag' do
- it 'creates a new feature flag with scope' do
- feature_flag = subject[:feature_flag]
- scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
- expect(subject[:status]).to eq(:success)
- expect(feature_flag.name).to eq(params[:name])
- expect(feature_flag.default_scope).not_to be_active
- expect(scope).to be_active
- expect(scope.strategies).to include(params[:strategy])
- end
-
- context 'when params include default scope' do
- let(:params) do
- {
- name: 'awesome',
- environment_scope: '*',
- strategy: { name: 'userWithId', parameters: { 'userIds': 'abc' } }.deep_stringify_keys
- }
- end
-
- it 'create a new feature flag with an active default scope with the specified strategy' do
- feature_flag = subject[:feature_flag]
- expect(subject[:status]).to eq(:success)
- expect(feature_flag.default_scope).to be_active
- expect(feature_flag.default_scope.strategies).to include(params[:strategy])
- end
- end
- end
-
- context 'when there is a persisted feature flag' do
- let!(:feature_flag) { create_flag(project, params[:name]) }
-
- context 'when there is no persisted scope' do
- it 'creates a new scope for the persisted feature flag' do
- feature_flag = subject[:feature_flag]
- scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
- expect(subject[:status]).to eq(:success)
- expect(feature_flag.name).to eq(params[:name])
- expect(scope).to be_active
- expect(scope.strategies).to include(params[:strategy])
- end
- end
-
- context 'when there is a persisted scope' do
- let!(:feature_flag_scope) do
- create_scope(feature_flag, params[:environment_scope], active, strategies)
- end
-
- let(:active) { true }
-
- context 'when the persisted scope does not have the specified strategy yet' do
- let(:strategies) { [{ name: 'userWithId', parameters: { 'userIds': 'abc' } }] }
-
- it 'adds the specified strategy to the scope' do
- subject
-
- feature_flag_scope.reload
- expect(feature_flag_scope.strategies).to include(params[:strategy])
- end
-
- context 'when the persisted scope is inactive' do
- let(:active) { false }
-
- it 'reactivates the scope' do
- expect { subject }
- .to change { feature_flag_scope.reload.active }.from(false).to(true)
- end
- end
- end
-
- context 'when the persisted scope has the specified strategy already' do
- let(:strategies) { [params[:strategy]] }
-
- it 'does not add a duplicated strategy to the scope' do
- expect { subject }
- .not_to change { feature_flag_scope.reload.strategies.count }
- end
- end
- end
- end
- end
-
- context 'when strategy is not specified in params' do
- let(:params) do
- {
- name: 'awesome',
- environment_scope: 'prd'
- }
- end
-
- it 'returns error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to include('Scopes strategies must be an array of strategy hashes')
- end
- end
-
- context 'when environment scope is not specified in params' do
- let(:params) do
- {
- name: 'awesome',
- strategy: { name: 'default', parameters: {} }.stringify_keys
- }
- end
-
- it 'returns error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to include("Scopes environment scope can't be blank")
- end
- end
-
- context 'when name is not specified in params' do
- let(:params) do
- {
- environment_scope: 'prd',
- strategy: { name: 'default', parameters: {} }.stringify_keys
- }
- end
-
- it 'returns error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to include("Name can't be blank")
- end
- end
- end
-end
diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb
index 1a127a0d472..d838549891a 100644
--- a/spec/services/feature_flags/update_service_spec.rb
+++ b/spec/services/feature_flags/update_service_spec.rb
@@ -121,150 +121,5 @@ RSpec.describe FeatureFlags::UpdateService do
subject
end
end
-
- context 'when scope active state is changed' do
- let(:params) do
- {
- scopes_attributes: [{ id: feature_flag.scopes.first.id, active: false }]
- }
- end
-
- it 'creates audit event about changing active state' do
- expect { subject }.to change { AuditEvent.count }.by(1)
- expect(audit_event_message).to(
- include("Updated rule <strong>*</strong> active state "\
- "from <strong>true</strong> to <strong>false</strong>.")
- )
- end
- end
-
- context 'when scope is renamed' do
- let(:changed_scope) { feature_flag.scopes.create!(environment_scope: 'review', active: true) }
- let(:params) do
- {
- scopes_attributes: [{ id: changed_scope.id, environment_scope: 'staging' }]
- }
- end
-
- it 'creates audit event with changed name' do
- expect { subject }.to change { AuditEvent.count }.by(1)
- expect(audit_event_message).to(
- include("Updated rule <strong>staging</strong> environment scope "\
- "from <strong>review</strong> to <strong>staging</strong>.")
- )
- end
-
- context 'when scope can not be updated' do
- let(:params) do
- {
- scopes_attributes: [{ id: changed_scope.id, environment_scope: '' }]
- }
- end
-
- it 'returns error status' do
- expect(subject[:status]).to eq(:error)
- end
-
- it 'returns error messages' do
- expect(subject[:message]).to include("Scopes environment scope can't be blank")
- end
-
- it 'does not create audit event' do
- expect { subject }.not_to change { AuditEvent.count }
- end
- end
- end
-
- context 'when scope is deleted' do
- let(:deleted_scope) { feature_flag.scopes.create!(environment_scope: 'review', active: true) }
- let(:params) do
- {
- scopes_attributes: [{ id: deleted_scope.id, '_destroy': true }]
- }
- end
-
- it 'creates audit event with deleted scope' do
- expect { subject }.to change { AuditEvent.count }.by(1)
- expect(audit_event_message).to include("Deleted rule <strong>review</strong>.")
- end
-
- context 'when scope can not be deleted' do
- before do
- allow(deleted_scope).to receive(:destroy).and_return(false)
- end
-
- it 'does not create audit event' do
- expect do
- subject
- end.to not_change { AuditEvent.count }.and raise_error(ActiveRecord::RecordNotDestroyed)
- end
- end
- end
-
- context 'when new scope is being added' do
- let(:new_environment_scope) { 'review' }
- let(:params) do
- {
- scopes_attributes: [{ environment_scope: new_environment_scope, active: true }]
- }
- end
-
- it 'creates audit event with new scope' do
- expected = 'Created rule <strong>review</strong> and set it as <strong>active</strong> '\
- 'with strategies <strong>[{"name"=>"default", "parameters"=>{}}]</strong>.'
-
- subject
-
- expect(audit_event_message).to include(expected)
- end
-
- context 'when scope can not be created' do
- let(:new_environment_scope) { '' }
-
- it 'returns error status' do
- expect(subject[:status]).to eq(:error)
- end
-
- it 'returns error messages' do
- expect(subject[:message]).to include("Scopes environment scope can't be blank")
- end
-
- it 'does not create audit event' do
- expect { subject }.not_to change { AuditEvent.count }
- end
- end
- end
-
- context 'when the strategy is changed' do
- let(:scope) do
- create(:operations_feature_flag_scope,
- feature_flag: feature_flag,
- environment_scope: 'sandbox',
- strategies: [{ name: "default", parameters: {} }])
- end
-
- let(:params) do
- {
- scopes_attributes: [{
- id: scope.id,
- environment_scope: 'sandbox',
- strategies: [{
- name: 'gradualRolloutUserId',
- parameters: {
- groupId: 'mygroup',
- percentage: "40"
- }
- }]
- }]
- }
- end
-
- it 'creates an audit event' do
- expected = %r{Updated rule <strong>sandbox</strong> strategies from <strong>.*</strong> to <strong>.*</strong>.}
-
- expect { subject }.to change { AuditEvent.count }.by(1)
- expect(audit_event_message).to match(expected)
- end
- end
end
end
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index dca5497de06..b59ee894fe8 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -140,7 +140,7 @@ RSpec.describe Groups::CreateService, '#execute' do
end
it 'create the chat team with the group' do
- allow_any_instance_of(Mattermost::Team).to receive(:create)
+ allow_any_instance_of(::Mattermost::Team).to receive(:create)
.and_return({ 'name' => 'tanuki', 'id' => 'lskdjfwlekfjsdifjj' })
expect { subject }.to change { ChatTeam.count }.from(0).to(1)
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index a5fce315d91..c794acdf76d 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Groups::DestroyService do
let!(:chat_team) { create(:chat_team, namespace: group) }
it 'destroys the team too' do
- expect_next_instance_of(Mattermost::Team) do |instance|
+ expect_next_instance_of(::Mattermost::Team) do |instance|
expect(instance).to receive(:destroy)
end
diff --git a/spec/services/groups/group_links/create_service_spec.rb b/spec/services/groups/group_links/create_service_spec.rb
index df994b9f2a3..b1bb9a8de23 100644
--- a/spec/services/groups/group_links/create_service_spec.rb
+++ b/spec/services/groups/group_links/create_service_spec.rb
@@ -11,8 +11,8 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
let_it_be(:group) { create(:group, :private, parent: group_parent) }
let_it_be(:group_child) { create(:group, :private, parent: group) }
- let_it_be(:shared_group_parent) { create(:group, :private) }
- let_it_be(:shared_group) { create(:group, :private, parent: shared_group_parent) }
+ let_it_be(:shared_group_parent, refind: true) { create(:group, :private) }
+ let_it_be(:shared_group, refind: true) { create(:group, :private, parent: shared_group_parent) }
let_it_be(:shared_group_child) { create(:group, :private, parent: shared_group) }
let_it_be(:project_parent) { create(:project, group: shared_group_parent) }
@@ -28,7 +28,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
let(:user) { group_user }
- subject { described_class.new(group, user, opts) }
+ subject { described_class.new(shared_group, group, user, opts) }
before do
group.add_guest(group_user)
@@ -36,11 +36,11 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
end
it 'adds group to another group' do
- expect { subject.execute(shared_group) }.to change { group.shared_group_links.count }.from(0).to(1)
+ expect { subject.execute }.to change { group.shared_group_links.count }.from(0).to(1)
end
it 'returns false if shared group is blank' do
- expect { subject.execute(nil) }.not_to change { group.shared_group_links.count }
+ expect { described_class.new(nil, group, user, opts) }.not_to change { group.shared_group_links.count }
end
context 'user does not have access to group' do
@@ -51,7 +51,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
end
it 'returns error' do
- result = subject.execute(shared_group)
+ result = subject.execute
expect(result[:status]).to eq(:error)
expect(result[:http_status]).to eq(404)
@@ -67,7 +67,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
end
it 'returns error' do
- result = subject.execute(shared_group)
+ result = subject.execute
expect(result[:status]).to eq(:error)
expect(result[:http_status]).to eq(404)
@@ -85,7 +85,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
it 'is executed only for the direct members of the group' do
expect(UserProjectAccessChangedService).to receive(:new).with(contain_exactly(group_user.id)).and_call_original
- subject.execute(shared_group)
+ subject.execute
end
end
@@ -94,7 +94,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
let(:user) { group_user }
it 'create proper authorizations' do
- subject.execute(shared_group)
+ subject.execute
expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
expect(Ability.allowed?(user, :read_project, project)).to be_truthy
@@ -106,7 +106,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
let(:user) { parent_group_user }
it 'create proper authorizations' do
- subject.execute(shared_group)
+ subject.execute
expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
expect(Ability.allowed?(user, :read_project, project)).to be_falsey
@@ -118,7 +118,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
let(:user) { child_group_user }
it 'create proper authorizations' do
- subject.execute(shared_group)
+ subject.execute
expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
expect(Ability.allowed?(user, :read_project, project)).to be_falsey
@@ -127,4 +127,28 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
end
end
end
+
+ context 'sharing outside the hierarchy is disabled' do
+ before do
+ shared_group_parent.namespace_settings.update!(prevent_sharing_groups_outside_hierarchy: true)
+ end
+
+ it 'prevents sharing with a group outside the hierarchy' do
+ result = subject.execute
+
+ expect(group.reload.shared_group_links.count).to eq(0)
+ expect(result[:status]).to eq(:error)
+ expect(result[:http_status]).to eq(404)
+ end
+
+ it 'allows sharing with a group within the hierarchy' do
+ sibling_group = create(:group, :private, parent: shared_group_parent)
+ sibling_group.add_guest(group_user)
+
+ result = described_class.new(shared_group, sibling_group, user, opts).execute
+
+ expect(sibling_group.reload.shared_group_links.count).to eq(1)
+ expect(result[:status]).to eq(:success)
+ end
+ end
end
diff --git a/spec/services/groups/participants_service_spec.rb b/spec/services/groups/participants_service_spec.rb
new file mode 100644
index 00000000000..750aead277f
--- /dev/null
+++ b/spec/services/groups/participants_service_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::ParticipantsService do
+ describe '#group_members' do
+ let(:user) { create(:user) }
+ let(:parent_group) { create(:group) }
+ let(:group) { create(:group, parent: parent_group) }
+ let(:subgroup) { create(:group, parent: group) }
+ let(:subproject) { create(:project, group: subgroup) }
+
+ it 'returns all members in parent groups, sub-groups, and sub-projects' do
+ parent_group.add_developer(create(:user))
+ subgroup.add_developer(create(:user))
+ subproject.add_developer(create(:user))
+
+ result = described_class.new(group, user).execute(nil)
+
+ expected_users = (group.self_and_hierarchy.flat_map(&:users) + subproject.users)
+ .map { |user| user_to_autocompletable(user) }
+
+ expect(expected_users.count).to eq(3)
+ expect(result).to include(*expected_users)
+ end
+ end
+
+ def user_to_autocompletable(user)
+ {
+ type: user.class.name,
+ username: user.username,
+ name: user.name,
+ avatar_url: user.avatar_url,
+ availability: user&.status&.availability
+ }
+ end
+end
diff --git a/spec/services/import_export_clean_up_service_spec.rb b/spec/services/import_export_clean_up_service_spec.rb
index 4101b13adf9..2bcdfa6dd8f 100644
--- a/spec/services/import_export_clean_up_service_spec.rb
+++ b/spec/services/import_export_clean_up_service_spec.rb
@@ -8,7 +8,13 @@ RSpec.describe ImportExportCleanUpService do
let(:tmp_import_export_folder) { 'tmp/gitlab_exports' }
- context 'when the import/export directory does not exist' do
+ before do
+ allow_next_instance_of(Gitlab::Import::Logger) do |logger|
+ allow(logger).to receive(:info)
+ end
+ end
+
+ context 'when the import/export tmp storage directory does not exist' do
it 'does not remove any archives' do
path = '/invalid/path/'
stub_repository_downloads_path(path)
@@ -19,49 +25,84 @@ RSpec.describe ImportExportCleanUpService do
end
end
- context 'when the import/export directory exists' do
- it 'removes old files' do
- in_directory_with_files(mtime: 2.days.ago) do |dir, files|
- service.execute
-
- files.each { |file| expect(File.exist?(file)).to eq false }
- expect(File.directory?(dir)).to eq false
+ context 'when the import/export tmp storage directory exists' do
+ shared_examples 'removes old tmp files' do |subdir|
+ it 'removes old files and logs' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ message: 'Removed Import/Export tmp directory',
+ dir_path: anything
+ )
+ end
+
+ validate_cleanup(subdir: subdir, mtime: 2.days.ago, expected: false)
end
- end
- it 'does not remove new files' do
- in_directory_with_files(mtime: 2.hours.ago) do |dir, files|
- service.execute
+ it 'does not remove new files or logs' do
+ expect(Gitlab::Import::Logger).not_to receive(:new)
- files.each { |file| expect(File.exist?(file)).to eq true }
- expect(File.directory?(dir)).to eq true
+ validate_cleanup(subdir: subdir, mtime: 2.hours.ago, expected: true)
end
end
+
+ include_examples 'removes old tmp files', '@hashed'
+ include_examples 'removes old tmp files', '@groups'
end
context 'with uploader exports' do
- it 'removes old files' do
+ it 'removes old files and logs' do
upload = create(:import_export_upload,
updated_at: 2.days.ago,
export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ message: 'Removed Import/Export export_file',
+ project_id: upload.project_id,
+ group_id: upload.group_id
+ )
+ end
+
expect { service.execute }.to change { upload.reload.export_file.file.nil? }.to(true)
+
+ expect(ImportExportUpload.where(export_file: nil)).to include(upload)
end
- it 'does not remove new files' do
+ it 'does not remove new files or logs' do
upload = create(:import_export_upload,
updated_at: 1.hour.ago,
export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
+ expect(Gitlab::Import::Logger).not_to receive(:new)
+
expect { service.execute }.not_to change { upload.reload.export_file.file.nil? }
+
+ expect(ImportExportUpload.where.not(export_file: nil)).to include(upload)
+ end
+ end
+
+ def validate_cleanup(subdir:, mtime:, expected:)
+ in_directory_with_files(mtime: mtime, subdir: subdir) do |dir, files|
+ service.execute
+
+ files.each { |file| expect(File.exist?(file)).to eq(expected) }
+ expect(File.directory?(dir)).to eq(expected)
end
end
- def in_directory_with_files(mtime:)
+ def in_directory_with_files(mtime:, subdir:)
Dir.mktmpdir do |tmpdir|
stub_repository_downloads_path(tmpdir)
- dir = File.join(tmpdir, tmp_import_export_folder, 'subfolder')
+ hashed = Digest::SHA2.hexdigest(subdir)
+ subdir_path = [subdir, hashed[0..1], hashed[2..3], hashed, hashed[4..10]]
+ dir = File.join(tmpdir, tmp_import_export_folder, *[subdir_path])
+
FileUtils.mkdir_p(dir)
+ File.utime(mtime.to_i, mtime.to_i, dir)
files = FileUtils.touch(file_list(dir) + [dir], mtime: mtime.to_time)
diff --git a/spec/services/issue_rebalancing_service_spec.rb b/spec/services/issue_rebalancing_service_spec.rb
index 1c7f74264b7..76ccb6d89ea 100644
--- a/spec/services/issue_rebalancing_service_spec.rb
+++ b/spec/services/issue_rebalancing_service_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe IssueRebalancingService do
shared_examples 'IssueRebalancingService shared examples' do
it 'rebalances a set of issues with clumps at the end and start' do
all_issues = start_clump + unclumped + end_clump.reverse
- service = described_class.new(project.issues.first)
+ service = described_class.new(Project.id_in([project.id]))
expect { service.execute }.not_to change { issues_in_position_order.map(&:id) }
@@ -55,7 +55,7 @@ RSpec.describe IssueRebalancingService do
end
it 'is idempotent' do
- service = described_class.new(project.issues.first)
+ service = described_class.new(Project.id_in(project))
expect do
service.execute
@@ -70,17 +70,17 @@ RSpec.describe IssueRebalancingService do
issue.project.group
old_pos = issue.relative_position
- service = described_class.new(issue)
+ service = described_class.new(Project.id_in(project))
expect { service.execute }.not_to exceed_query_limit(0)
expect(old_pos).to eq(issue.reload.relative_position)
end
- it 'acts if the flag is enabled for the project' do
+ it 'acts if the flag is enabled for the root namespace' do
issue = create(:issue, project: project, author: user, relative_position: max_pos)
- stub_feature_flags(rebalance_issues: issue.project)
+ stub_feature_flags(rebalance_issues: project.root_namespace)
- service = described_class.new(issue)
+ service = described_class.new(Project.id_in(project))
expect { service.execute }.to change { issue.reload.relative_position }
end
@@ -90,23 +90,22 @@ RSpec.describe IssueRebalancingService do
project.update!(group: create(:group))
stub_feature_flags(rebalance_issues: issue.project.group)
- service = described_class.new(issue)
+ service = described_class.new(Project.id_in(project))
expect { service.execute }.to change { issue.reload.relative_position }
end
it 'aborts if there are too many issues' do
- issue = project.issues.first
base = double(count: 10_001)
- allow(Issue).to receive(:relative_positioning_query_base).with(issue).and_return(base)
+ allow(Issue).to receive(:in_projects).and_return(base)
- expect { described_class.new(issue).execute }.to raise_error(described_class::TooManyIssues)
+ expect { described_class.new(Project.id_in(project)).execute }.to raise_error(described_class::TooManyIssues)
end
end
shared_examples 'rebalancing is retried on statement timeout exceptions' do
- subject { described_class.new(project.issues.first) }
+ subject { described_class.new(Project.id_in(project)) }
it 'retries update statement' do
call_count = 0
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 8950bdd465f..0b315422be8 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -3,50 +3,22 @@
require 'spec_helper'
RSpec.describe Issues::CloseService do
- subject(:close_issue) { described_class.new(project: project, current_user: user).close_issue(issue) }
-
- let_it_be(:project, refind: true) { create(:project, :repository) }
- let_it_be(:label1) { create(:label, project: project) }
- let_it_be(:label2) { create(:label, project: project, remove_on_close: true) }
- let_it_be(:author) { create(:user) }
- let_it_be(:user) { create(:user, email: "user@example.com") }
- let_it_be(:user2) { create(:user, email: "user2@example.com") }
- let_it_be(:guest) { create(:user) }
- let_it_be(:closing_merge_request) { create(:merge_request, source_project: project) }
-
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user, email: "user@example.com") }
+ let(:user2) { create(:user, email: "user2@example.com") }
+ let(:guest) { create(:user) }
+ let(:issue) { create(:issue, title: "My issue", project: project, assignees: [user2], author: create(:user)) }
let(:external_issue) { ExternalIssue.new('JIRA-123', project) }
- let!(:issue) { create(:issue, title: "My issue", project: project, assignees: [user2], author: author) }
+ let(:closing_merge_request) { create(:merge_request, source_project: project) }
+ let(:closing_commit) { create(:commit, project: project) }
+ let!(:todo) { create(:todo, :assigned, user: user, project: project, target: issue, author: user2) }
- before_all do
+ before do
project.add_maintainer(user)
project.add_developer(user2)
project.add_guest(guest)
end
- shared_examples 'removes labels marked for removal from issue when closed' do
- before do
- issue.update!(label_ids: [label1.id, label2.id])
- end
-
- it 'removes labels marked for removal' do
- expect do
- close_issue
- end.to change { issue.reload.label_ids }.from(containing_exactly(label1.id, label2.id)).to(containing_exactly(label1.id))
- end
-
- it 'creates system notes for the removed labels' do
- expect do
- close_issue
- end.to change(ResourceLabelEvent, :count).by(1)
-
- expect(ResourceLabelEvent.last.slice(:action, :issue_id, :label_id)).to eq(
- 'action' => 'remove',
- 'issue_id' => issue.id,
- 'label_id' => label2.id
- )
- end
- end
-
describe '#execute' do
let(:service) { described_class.new(project: project, current_user: user) }
@@ -131,7 +103,7 @@ RSpec.describe Issues::CloseService do
end
context 'with an active external issue tracker not supporting close_issue' do
- let!(:external_issue_tracker) { create(:bugzilla_service, project: project) }
+ let!(:external_issue_tracker) { create(:bugzilla_integration, project: project) }
it 'does not close the issue on the external issue tracker' do
project.reload
@@ -149,8 +121,6 @@ RSpec.describe Issues::CloseService do
end
end
- it_behaves_like 'removes labels marked for removal from issue when closed'
-
it 'mentions closure via a merge request' do
close_issue
@@ -214,18 +184,10 @@ RSpec.describe Issues::CloseService do
end
context "closed by a commit", :sidekiq_might_not_need_inline do
- subject(:close_issue) do
+ it 'mentions closure via a commit' do
perform_enqueued_jobs do
described_class.new(project: project, current_user: user).close_issue(issue, closed_via: closing_commit)
end
- end
-
- let(:closing_commit) { create(:commit, project: project) }
-
- it_behaves_like 'removes labels marked for removal from issue when closed'
-
- it 'mentions closure via a commit' do
- close_issue
email = ActionMailer::Base.deliveries.last
@@ -237,8 +199,9 @@ RSpec.describe Issues::CloseService do
context 'when user cannot read the commit' do
it 'does not mention the commit id' do
project.project_feature.update_attribute(:repository_access_level, ProjectFeature::DISABLED)
-
- close_issue
+ perform_enqueued_jobs do
+ described_class.new(project: project, current_user: user).close_issue(issue, closed_via: closing_commit)
+ end
email = ActionMailer::Base.deliveries.last
body_text = email.body.parts.map(&:body).join(" ")
@@ -259,14 +222,12 @@ RSpec.describe Issues::CloseService do
it 'verifies the number of queries' do
recorded = ActiveRecord::QueryRecorder.new { close_issue }
- expected_queries = 32
+ expected_queries = 24
expect(recorded.count).to be <= expected_queries
expect(recorded.cached_count).to eq(0)
end
- it_behaves_like 'removes labels marked for removal from issue when closed'
-
it 'closes the issue' do
close_issue
@@ -296,8 +257,6 @@ RSpec.describe Issues::CloseService do
end
it 'marks todos as done' do
- todo = create(:todo, :assigned, user: user, project: project, target: issue, author: user2)
-
close_issue
expect(todo.reload).to be_done
@@ -362,32 +321,26 @@ RSpec.describe Issues::CloseService do
end
context 'when issue is not confidential' do
- it_behaves_like 'removes labels marked for removal from issue when closed'
-
it 'executes issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
- close_issue
+ described_class.new(project: project, current_user: user).close_issue(issue)
end
end
context 'when issue is confidential' do
- let(:issue) { create(:issue, :confidential, project: project) }
-
- it_behaves_like 'removes labels marked for removal from issue when closed'
-
it 'executes confidential issue hooks' do
+ issue = create(:issue, :confidential, project: project)
+
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
- close_issue
+ described_class.new(project: project, current_user: user).close_issue(issue)
end
end
context 'internal issues disabled' do
- let!(:todo) { create(:todo, :assigned, user: user, project: project, target: issue, author: user2) }
-
before do
project.issues_enabled = false
project.save!
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 9c84242d8ae..94810d6134a 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -18,8 +18,8 @@ RSpec.describe Issues::CreateService do
let_it_be(:labels) { create_pair(:label, project: project) }
before_all do
- project.add_maintainer(user)
- project.add_maintainer(assignee)
+ project.add_guest(user)
+ project.add_guest(assignee)
end
let(:opts) do
@@ -78,8 +78,8 @@ RSpec.describe Issues::CreateService do
opts.merge!(title: '')
end
- it 'does not create an incident label prematurely' do
- expect { subject }.not_to change(Label, :count)
+ it 'does not apply an incident label prematurely' do
+ expect { subject }.to not_change(LabelLink, :count).and not_change(Issue, :count)
end
end
end
@@ -88,15 +88,11 @@ RSpec.describe Issues::CreateService do
expect { issue }.to change { project.open_issues_count }.from(0).to(1)
end
- context 'when current user cannot admin issues in the project' do
- let_it_be(:guest) { create(:user) }
-
- before_all do
- project.add_guest(guest)
- end
+ context 'when current user cannot set issue metadata in the project' do
+ let_it_be(:non_member) { create(:user) }
- it 'filters out params that cannot be set without the :admin_issue permission' do
- issue = described_class.new(project: project, current_user: guest, params: opts).execute
+ it 'filters out params that cannot be set without the :set_issue_metadata permission' do
+ issue = described_class.new(project: project, current_user: non_member, params: opts).execute
expect(issue).to be_persisted
expect(issue.title).to eq('Awesome issue')
@@ -107,8 +103,8 @@ RSpec.describe Issues::CreateService do
expect(issue.due_date).to be_nil
end
- it 'creates confidential issues' do
- issue = described_class.new(project: project, current_user: guest, params: { confidential: true }).execute
+ it 'can create confidential issues' do
+ issue = described_class.new(project: project, current_user: non_member, params: { confidential: true }).execute
expect(issue.confidential).to be_truthy
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 8c97dd95ced..b95d94e3784 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -158,6 +158,90 @@ RSpec.describe Issues::UpdateService, :mailer do
end
end
+ context 'changing issue_type' do
+ let!(:label_1) { create(:label, project: project, title: 'incident') }
+ let!(:label_2) { create(:label, project: project, title: 'missed-sla') }
+
+ before do
+ stub_licensed_features(quality_management: true)
+ end
+
+ context 'from issue to incident' do
+ it 'adds a `incident` label if one does not exist' do
+ expect { update_issue(issue_type: 'incident') }.to change(issue.labels, :count).by(1)
+ expect(issue.labels.pluck(:title)).to eq(['incident'])
+ end
+
+ context 'for an issue with multiple labels' do
+ let(:issue) { create(:incident, project: project, labels: [label_1]) }
+
+ before do
+ update_issue(issue_type: 'incident')
+ end
+
+ it 'does not add an `incident` label if one already exist' do
+ expect(issue.labels).to eq([label_1])
+ end
+ end
+
+ context 'filtering the incident label' do
+ let(:params) { { add_label_ids: [] } }
+
+ before do
+ update_issue(issue_type: 'incident')
+ end
+
+ it 'creates and add a incident label id to add_label_ids' do
+ expect(issue.label_ids).to contain_exactly(label_1.id)
+ end
+ end
+ end
+
+ context 'from incident to issue' do
+ let(:issue) { create(:incident, project: project) }
+
+ context 'for an incident with multiple labels' do
+ let(:issue) { create(:incident, project: project, labels: [label_1, label_2]) }
+
+ before do
+ update_issue(issue_type: 'issue')
+ end
+
+ it 'removes an `incident` label if one exists on the incident' do
+ expect(issue.labels).to eq([label_2])
+ end
+ end
+
+ context 'filtering the incident label' do
+ let(:issue) { create(:incident, project: project, labels: [label_1, label_2]) }
+ let(:params) { { label_ids: [label_1.id, label_2.id], remove_label_ids: [] } }
+
+ before do
+ update_issue(issue_type: 'issue')
+ end
+
+ it 'adds an incident label id to remove_label_ids for it to be removed' do
+ expect(issue.label_ids).to contain_exactly(label_2.id)
+ end
+ end
+ end
+
+ context 'from issue to restricted issue types' do
+ context 'without sufficient permissions' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_guest(user)
+ end
+
+ it 'does nothing to the labels' do
+ expect { update_issue(issue_type: 'issue') }.not_to change(issue.labels, :count)
+ expect(issue.reload.labels).to eq([])
+ end
+ end
+ end
+ end
+
it 'updates open issue counter for assignees when issue is reassigned' do
update_issue(assignee_ids: [user2.id])
@@ -225,7 +309,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts[:move_between_ids] = [issue1.id, issue2.id]
- expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, project.id)
+ expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id)
update_issue(opts)
expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
@@ -239,7 +323,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts[:move_between_ids] = [issue1.id, issue2.id]
- expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, project.id)
+ expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id)
update_issue(opts)
expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
@@ -253,7 +337,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts[:move_between_ids] = [issue1.id, issue2.id]
- expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, project.id)
+ expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id)
update_issue(opts)
expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
diff --git a/spec/services/issues/zoom_link_service_spec.rb b/spec/services/issues/zoom_link_service_spec.rb
index 19db892fcae..d662d9fa978 100644
--- a/spec/services/issues/zoom_link_service_spec.rb
+++ b/spec/services/issues/zoom_link_service_spec.rb
@@ -53,7 +53,10 @@ RSpec.describe Issues::ZoomLinkService do
category: 'IncidentManagement::ZoomIntegration',
action: 'add_zoom_meeting',
label: 'Issue ID',
- value: issue.id
+ value: issue.id,
+ user: user,
+ project: project,
+ namespace: project.namespace
)
end
@@ -192,7 +195,10 @@ RSpec.describe Issues::ZoomLinkService do
category: 'IncidentManagement::ZoomIntegration',
action: 'remove_zoom_meeting',
label: 'Issue ID',
- value: issue.id
+ value: issue.id,
+ user: user,
+ project: project,
+ namespace: project.namespace
)
end
end
diff --git a/spec/services/jira_import/users_importer_spec.rb b/spec/services/jira_import/users_importer_spec.rb
index c825f899f80..2e8c556d62c 100644
--- a/spec/services/jira_import/users_importer_spec.rb
+++ b/spec/services/jira_import/users_importer_spec.rb
@@ -43,39 +43,37 @@ RSpec.describe JiraImport::UsersImporter do
end
end
- RSpec.shared_examples 'maps jira users to gitlab users' do
+ RSpec.shared_examples 'maps Jira users to GitLab users' do |users_mapper_service:|
context 'when Jira import is configured correctly' do
- let_it_be(:jira_service) { create(:jira_service, project: project, active: true) }
- let(:client) { double }
+ let_it_be(:jira_service) { create(:jira_service, project: project, active: true, url: "http://jira.example.net") }
- before do
- expect(importer).to receive(:client).at_least(1).and_return(client)
- allow(client).to receive_message_chain(:ServerInfo, :all, :deploymentType).and_return(deployment_type)
- end
-
- context 'when jira client raises an error' do
+ context 'when users mapper service raises an error' do
let(:error) { Timeout::Error.new }
it 'returns an error response' do
- expect(client).to receive(:get).and_raise(error)
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(error, project_id: project.id)
+ expect_next_instance_of(users_mapper_service) do |instance|
+ expect(instance).to receive(:execute).and_raise(error)
+ end
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(error, project_id: project.id)
expect(subject.error?).to be_truthy
expect(subject.message).to include('There was an error when communicating to Jira')
end
end
- context 'when jira client returns result' do
- context 'when jira client returns an empty array' do
- let(:jira_users) { [] }
-
+ context 'when users mapper service returns result' do
+ context 'when users mapper service returns an empty array' do
it 'returns nil payload' do
+ expect_next_instance_of(users_mapper_service) do |instance|
+ expect(instance).to receive(:execute).and_return([])
+ end
+
expect(subject.success?).to be_truthy
expect(subject.payload).to be_empty
end
end
- context 'when jira client returns an results' do
+ context 'when Jira client returns any users' do
let_it_be(:project_member) { create(:user, email: 'sample@jira.com') }
let_it_be(:group_member) { create(:user, name: 'user-name2') }
let_it_be(:other_user) { create(:user) }
@@ -86,6 +84,10 @@ RSpec.describe JiraImport::UsersImporter do
end
it 'returns the mapped users' do
+ expect_next_instance_of(users_mapper_service) do |instance|
+ expect(instance).to receive(:execute).and_return(mapped_users)
+ end
+
expect(subject.success?).to be_truthy
expect(subject.payload).to eq(mapped_users)
end
@@ -95,43 +97,23 @@ RSpec.describe JiraImport::UsersImporter do
end
context 'when Jira instance is of Server deployment type' do
- let(:deployment_type) { 'Server' }
- let(:url) { "/rest/api/2/user/search?username=''&maxResults=50&startAt=#{start_at}" }
- let(:jira_users) do
- [
- { 'key' => 'acc1', 'name' => 'user-name1', 'emailAddress' => 'sample@jira.com' },
- { 'key' => 'acc2', 'name' => 'user-name2' }
- ]
- end
-
before do
- allow_next_instance_of(JiraImport::ServerUsersMapperService) do |instance|
- allow(instance).to receive(:client).and_return(client)
- allow(client).to receive(:get).with(url).and_return(jira_users)
- end
+ allow(project).to receive(:jira_service).and_return(jira_service)
+
+ jira_service.data_fields.deployment_server!
end
- it_behaves_like 'maps jira users to gitlab users'
+ it_behaves_like 'maps Jira users to GitLab users', users_mapper_service: JiraImport::ServerUsersMapperService
end
- context 'when Jira instance is of Cloud deploymet type' do
- let(:deployment_type) { 'Cloud' }
- let(:url) { "/rest/api/2/users?maxResults=50&startAt=#{start_at}" }
- let(:jira_users) do
- [
- { 'accountId' => 'acc1', 'displayName' => 'user-name1', 'emailAddress' => 'sample@jira.com' },
- { 'accountId' => 'acc2', 'displayName' => 'user-name2' }
- ]
- end
-
+ context 'when Jira instance is of Cloud deployment type' do
before do
- allow_next_instance_of(JiraImport::CloudUsersMapperService) do |instance|
- allow(instance).to receive(:client).and_return(client)
- allow(client).to receive(:get).with(url).and_return(jira_users)
- end
+ allow(project).to receive(:jira_service).and_return(jira_service)
+
+ jira_service.data_fields.deployment_cloud!
end
- it_behaves_like 'maps jira users to gitlab users'
+ it_behaves_like 'maps Jira users to GitLab users', users_mapper_service: JiraImport::CloudUsersMapperService
end
end
end
diff --git a/spec/services/lfs/push_service_spec.rb b/spec/services/lfs/push_service_spec.rb
index 58fb2f3fb9b..e1564ca2359 100644
--- a/spec/services/lfs/push_service_spec.rb
+++ b/spec/services/lfs/push_service_spec.rb
@@ -8,13 +8,14 @@ RSpec.describe Lfs::PushService do
let_it_be(:project) { create(:forked_project_with_submodules) }
let_it_be(:remote_mirror) { create(:remote_mirror, project: project, enabled: true) }
- let_it_be(:lfs_object) { create_linked_lfs_object(project, :project) }
let(:params) { { url: remote_mirror.bare_url, credentials: remote_mirror.credentials } }
subject(:service) { described_class.new(project, nil, params) }
describe "#execute" do
+ let_it_be(:lfs_object) { create_linked_lfs_object(project, :project) }
+
it 'uploads the object when upload is requested' do
stub_lfs_batch(lfs_object)
@@ -25,14 +26,6 @@ RSpec.describe Lfs::PushService do
expect(service.execute).to eq(status: :success)
end
- it 'does nothing if there are no LFS objects' do
- lfs_object.destroy!
-
- expect(lfs_client).not_to receive(:upload!)
-
- expect(service.execute).to eq(status: :success)
- end
-
it 'does not upload the object when upload is not requested' do
stub_lfs_batch(lfs_object, upload: false)
@@ -88,6 +81,12 @@ RSpec.describe Lfs::PushService do
end
end
+ it 'does nothing if there are no LFS objects' do
+ expect(lfs_client).not_to receive(:upload!)
+
+ expect(service.execute).to eq(status: :success)
+ end
+
def create_linked_lfs_object(project, type)
create(:lfs_objects_project, project: project, repository_type: type).lfs_object
end
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index 916941e1111..ffe63a8a94b 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -8,7 +8,8 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
let_it_be(:member) { create(:user) }
let_it_be(:user_ids) { member.id.to_s }
let_it_be(:access_level) { Gitlab::Access::GUEST }
- let(:params) { { user_ids: user_ids, access_level: access_level } }
+ let(:additional_params) { { invite_source: '_invite_source_' } }
+ let(:params) { { user_ids: user_ids, access_level: access_level }.merge(additional_params) }
subject(:execute_service) { described_class.new(user, params.merge({ source: source })).execute }
@@ -82,4 +83,46 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
expect(OnboardingProgress.completed?(source.namespace, :user_added)).to be(false)
end
end
+
+ context 'when tracking the invite source', :snowplow do
+ context 'when invite_source is not passed' do
+ let(:additional_params) { {} }
+
+ it 'tracks the invite source as unknown' do
+ expect { execute_service }.to raise_error(ArgumentError, 'No invite source provided.')
+
+ expect_no_snowplow_event
+ end
+ end
+
+ context 'when invite_source is passed' do
+ it 'tracks the invite source from params' do
+ execute_service
+
+ expect_snowplow_event(
+ category: described_class.name,
+ action: 'create_member',
+ label: '_invite_source_',
+ property: 'existing_user',
+ user: user
+ )
+ end
+ end
+
+ context 'when it is a net_new_user' do
+ let(:additional_params) { { invite_source: '_invite_source_', user_ids: 'email@example.org' } }
+
+ it 'tracks the invite source from params' do
+ execute_service
+
+ expect_snowplow_event(
+ category: described_class.name,
+ action: 'create_member',
+ label: '_invite_source_',
+ property: 'net_new_user',
+ user: user
+ )
+ end
+ end
+ end
end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index d7fd7d5b2ca..c530e3d0c53 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -3,12 +3,12 @@
require 'spec_helper'
RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_shared_state, :sidekiq_inline do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
let_it_be(:user) { project.owner }
let_it_be(:project_user) { create(:user) }
let_it_be(:namespace) { project.namespace }
let(:params) { {} }
- let(:base_params) { { access_level: Gitlab::Access::GUEST, source: project } }
+ let(:base_params) { { access_level: Gitlab::Access::GUEST, source: project, invite_source: '_invite_source_' } }
subject(:result) { described_class.new(user, base_params.merge(params) ).execute }
@@ -23,6 +23,18 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
it_behaves_like 'records an onboarding progress action', :user_added
end
+ context 'when email belongs to an existing user as a secondary email' do
+ let(:secondary_email) { create(:email, email: 'secondary@example.com', user: project_user) }
+ let(:params) { { email: secondary_email.email } }
+
+ it 'adds an existing user to members', :aggregate_failures do
+ expect_to_create_members(count: 1)
+ expect(result[:status]).to eq(:success)
+ expect(project.users).to include project_user
+ expect(project.members.last).not_to be_invite
+ end
+ end
+
context 'when email is not a valid email' do
let(:params) { { email: '_bogus_' } }
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index 5a6a9df3f44..d10f82289bd 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -251,22 +251,22 @@ RSpec.describe MergeRequests::BuildService do
end
context 'when the source branch matches an issue' do
- where(:issue_tracker, :source_branch, :closing_message) do
- :jira | 'FOO-123-fix-issue' | 'Closes FOO-123'
- :jira | 'fix-issue' | nil
- :custom_issue_tracker | '123-fix-issue' | 'Closes #123'
- :custom_issue_tracker | 'fix-issue' | nil
- :internal | '123-fix-issue' | 'Closes #123'
- :internal | 'fix-issue' | nil
+ where(:factory, :source_branch, :closing_message) do
+ :jira_service | 'FOO-123-fix-issue' | 'Closes FOO-123'
+ :jira_service | 'fix-issue' | nil
+ :custom_issue_tracker_integration | '123-fix-issue' | 'Closes #123'
+ :custom_issue_tracker_integration | 'fix-issue' | nil
+ nil | '123-fix-issue' | 'Closes #123'
+ nil | 'fix-issue' | nil
end
with_them do
before do
- if issue_tracker == :internal
- issue.update!(iid: 123)
- else
- create(:"#{issue_tracker}_service", project: project)
+ if factory
+ create(factory, project: project)
project.reload
+ else
+ issue.update!(iid: 123)
end
end
@@ -350,23 +350,23 @@ RSpec.describe MergeRequests::BuildService do
end
context 'when the source branch matches an issue' do
- where(:issue_tracker, :source_branch, :title, :closing_message) do
- :jira | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123'
- :jira | 'fix-issue' | 'Fix issue' | nil
- :custom_issue_tracker | '123-fix-issue' | 'Resolve #123 "Fix issue"' | 'Closes #123'
- :custom_issue_tracker | 'fix-issue' | 'Fix issue' | nil
- :internal | '123-fix-issue' | 'Resolve "A bug"' | 'Closes #123'
- :internal | 'fix-issue' | 'Fix issue' | nil
- :internal | '124-fix-issue' | '124 fix issue' | nil
+ where(:factory, :source_branch, :title, :closing_message) do
+ :jira_service | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123'
+ :jira_service | 'fix-issue' | 'Fix issue' | nil
+ :custom_issue_tracker_integration | '123-fix-issue' | 'Resolve #123 "Fix issue"' | 'Closes #123'
+ :custom_issue_tracker_integration | 'fix-issue' | 'Fix issue' | nil
+ nil | '123-fix-issue' | 'Resolve "A bug"' | 'Closes #123'
+ nil | 'fix-issue' | 'Fix issue' | nil
+ nil | '124-fix-issue' | '124 fix issue' | nil
end
with_them do
before do
- if issue_tracker == :internal
- issue.update!(iid: 123)
- else
- create(:"#{issue_tracker}_service", project: project)
+ if factory
+ create(factory, project: project)
project.reload
+ else
+ issue.update!(iid: 123)
end
end
@@ -399,23 +399,23 @@ RSpec.describe MergeRequests::BuildService do
end
context 'when the source branch matches an issue' do
- where(:issue_tracker, :source_branch, :title, :closing_message) do
- :jira | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123'
- :jira | 'fix-issue' | 'Fix issue' | nil
- :custom_issue_tracker | '123-fix-issue' | 'Resolve #123 "Fix issue"' | 'Closes #123'
- :custom_issue_tracker | 'fix-issue' | 'Fix issue' | nil
- :internal | '123-fix-issue' | 'Resolve "A bug"' | 'Closes #123'
- :internal | 'fix-issue' | 'Fix issue' | nil
- :internal | '124-fix-issue' | '124 fix issue' | nil
+ where(:factory, :source_branch, :title, :closing_message) do
+ :jira_service | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123'
+ :jira_service | 'fix-issue' | 'Fix issue' | nil
+ :custom_issue_tracker_integration | '123-fix-issue' | 'Resolve #123 "Fix issue"' | 'Closes #123'
+ :custom_issue_tracker_integration | 'fix-issue' | 'Fix issue' | nil
+ nil | '123-fix-issue' | 'Resolve "A bug"' | 'Closes #123'
+ nil | 'fix-issue' | 'Fix issue' | nil
+ nil | '124-fix-issue' | '124 fix issue' | nil
end
with_them do
before do
- if issue_tracker == :internal
- issue.update!(iid: 123)
- else
- create(:"#{issue_tracker}_service", project: project)
+ if factory
+ create(factory, project: project)
project.reload
+ else
+ issue.update!(iid: 123)
end
end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index b2351ab53bd..da547716e1e 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
let(:opts) do
{
title: 'Awesome merge_request',
- description: "well this is not done yet\n/wip",
+ description: "well this is not done yet\n/draft",
source_branch: 'feature',
target_branch: 'master',
assignees: [user2]
diff --git a/spec/services/merge_requests/handle_assignees_change_service_spec.rb b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
index 0bf18f16abb..f9eed6eea2d 100644
--- a/spec/services/merge_requests/handle_assignees_change_service_spec.rb
+++ b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:assignee) { create(:user) }
- let_it_be(:merge_request) { create(:merge_request, author: user, source_project: project, assignees: [assignee]) }
+ let_it_be_with_reload(:merge_request) { create(:merge_request, author: user, source_project: project, assignees: [assignee]) }
let_it_be(:old_assignees) { create_list(:user, 3) }
let(:options) { {} }
@@ -45,13 +45,27 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do
service.execute(merge_request, old_assignees, options)
end
+ let(:note) { merge_request.notes.system.last }
+ let(:removed_note) { "unassigned #{old_assignees.map(&:to_reference).to_sentence}" }
+
+ context 'when unassigning all users' do
+ before do
+ merge_request.update!(assignee_ids: [])
+ end
+
+ it 'creates assignee note' do
+ execute
+
+ expect(note).not_to be_nil
+ expect(note.note).to eq removed_note
+ end
+ end
+
it 'creates assignee note' do
execute
- note = merge_request.notes.last
-
expect(note).not_to be_nil
- expect(note.note).to include "assigned to #{assignee.to_reference} and unassigned #{old_assignees.map(&:to_reference).to_sentence}"
+ expect(note.note).to include "assigned to #{assignee.to_reference} and #{removed_note}"
end
it 'sends email notifications to old and new assignees', :mailer, :sidekiq_inline do
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index ac39fb59c62..503c0282bd6 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -181,7 +181,7 @@ RSpec.describe MergeRequests::MergeService do
commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
allow(merge_request).to receive(:commits).and_return([commit])
- expect_any_instance_of(JiraService).to receive(:close_issue).with(merge_request, jira_issue, user).once
+ expect_any_instance_of(Integrations::Jira).to receive(:close_issue).with(merge_request, jira_issue, user).once
service.execute(merge_request)
end
@@ -193,7 +193,7 @@ RSpec.describe MergeRequests::MergeService do
commit = double('commit', safe_message: "Fixes #{jira_issue.to_reference}")
allow(merge_request).to receive(:commits).and_return([commit])
- expect_any_instance_of(JiraService).not_to receive(:close_issue)
+ expect_any_instance_of(Integrations::Jira).not_to receive(:close_issue)
service.execute(merge_request)
end
diff --git a/spec/services/merge_requests/update_assignees_service_spec.rb b/spec/services/merge_requests/update_assignees_service_spec.rb
index 076161c9029..3a0b17c2768 100644
--- a/spec/services/merge_requests/update_assignees_service_spec.rb
+++ b/spec/services/merge_requests/update_assignees_service_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
description: "FYI #{user2.to_reference}",
assignee_ids: [user3.id],
source_project: project,
+ target_project: project,
author: create(:user))
end
@@ -24,6 +25,7 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
project.add_maintainer(user)
project.add_developer(user2)
project.add_developer(user3)
+ merge_request.errors.clear
end
let(:service) { described_class.new(project: project, current_user: user, params: opts) }
@@ -32,35 +34,53 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
describe 'execute' do
def update_merge_request
service.execute(merge_request)
- merge_request.reload
+ end
+
+ shared_examples 'removing all assignees' do
+ it 'removes all assignees' do
+ expect(update_merge_request).to have_attributes(assignees: be_empty, errors: be_none)
+ end
+
+ it 'enqueues the correct background work' do
+ expect_next(MergeRequests::HandleAssigneesChangeService, project: project, current_user: user) do |service|
+ expect(service)
+ .to receive(:async_execute)
+ .with(merge_request, [user3], execute_hooks: true)
+ end
+
+ update_merge_request
+ end
end
context 'when the parameters are valid' do
context 'when using sentinel values' do
- let(:opts) { { assignee_ids: [0] } }
+ context 'when using assignee_ids' do
+ let(:opts) { { assignee_ids: [0] } }
+
+ it_behaves_like 'removing all assignees'
+ end
- it 'removes all assignees' do
- expect { update_merge_request }.to change(merge_request, :assignees).to([])
+ context 'when using assignee_id' do
+ let(:opts) { { assignee_id: 0 } }
+
+ it_behaves_like 'removing all assignees'
end
end
- context 'the assignee_ids parameter is the empty list' do
+ context 'when the assignee_ids parameter is the empty list' do
let(:opts) { { assignee_ids: [] } }
- it 'removes all assignees' do
- expect { update_merge_request }.to change(merge_request, :assignees).to([])
- end
+ it_behaves_like 'removing all assignees'
end
it 'updates the MR, and queues the more expensive work for later' do
expect_next(MergeRequests::HandleAssigneesChangeService, project: project, current_user: user) do |service|
expect(service)
- .to receive(:async_execute)
- .with(merge_request, [user3], execute_hooks: true)
+ .to receive(:async_execute).with(merge_request, [user3], execute_hooks: true)
end
expect { update_merge_request }
- .to change(merge_request, :assignees).to([user2])
+ .to change { merge_request.reload.assignees }.from([user3]).to([user2])
.and change(merge_request, :updated_at)
.and change(merge_request, :updated_by).to(user)
end
@@ -68,7 +88,10 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
it 'does not update the assignees if they do not have access' do
opts[:assignee_ids] = [create(:user).id]
- expect { update_merge_request }.not_to change(merge_request, :assignee_ids)
+ expect(update_merge_request).to have_attributes(
+ assignees: [user3],
+ errors: be_any
+ )
end
it 'is more efficient than using the full update-service' do
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index a85fbd77d70..6ec2b158d30 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -297,6 +297,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
reviewers: [],
milestone: nil,
total_time_spent: 0,
+ time_change: 0,
description: "FYI #{user2.to_reference}"
}
)
@@ -768,6 +769,13 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
update_merge_request({ target_branch: 'target' })
end
+
+ it "does not try to mark as unchecked if it's already unchecked" do
+ expect(merge_request).to receive(:unchecked?).and_return(true)
+ expect(merge_request).not_to receive(:mark_as_unchecked)
+
+ update_merge_request({ target_branch: "target" })
+ end
end
context 'when auto merge is enabled and target branch changed' do
diff --git a/spec/services/namespace_settings/update_service_spec.rb b/spec/services/namespace_settings/update_service_spec.rb
index 887d56df099..8e176dbc6cd 100644
--- a/spec/services/namespace_settings/update_service_spec.rb
+++ b/spec/services/namespace_settings/update_service_spec.rb
@@ -75,5 +75,37 @@ RSpec.describe NamespaceSettings::UpdateService do
end
end
end
+
+ context "updating :prevent_sharing_groups_outside_hierarchy" do
+ let(:settings) { { prevent_sharing_groups_outside_hierarchy: true } }
+
+ context 'when user is a group owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'changes settings' do
+ expect { service.execute }
+ .to change { group.namespace_settings.prevent_sharing_groups_outside_hierarchy }
+ .from(false).to(true)
+ end
+ end
+
+ context 'when user is not a group owner' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'does not change settings' do
+ expect { service.execute }.not_to change { group.namespace_settings.prevent_sharing_groups_outside_hierarchy }
+ end
+
+ it 'returns the group owner error' do
+ service.execute
+
+ expect(group.namespace_settings.errors.messages[:prevent_sharing_groups_outside_hierarchy]).to include('can only be changed by a group admin.')
+ end
+ end
+ end
end
end
diff --git a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
index 3094f574184..2bf02e541f9 100644
--- a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
+++ b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
@@ -41,22 +41,23 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
using RSpec::Parameterized::TableSyntax
where(:track, :interval, :actions_completed) do
- :create | 1 | { created_at: frozen_time - 2.days }
- :create | 5 | { created_at: frozen_time - 6.days }
- :create | 10 | { created_at: frozen_time - 11.days }
- :verify | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days }
- :verify | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days }
- :verify | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days }
- :trial | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days }
- :trial | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days }
- :trial | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days }
- :team | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days, trial_started_at: frozen_time - 2.days }
- :team | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days, trial_started_at: frozen_time - 6.days }
- :team | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days, trial_started_at: frozen_time - 11.days }
+ :create | 1 | { created_at: frozen_time - 2.days }
+ :create | 5 | { created_at: frozen_time - 6.days }
+ :create | 10 | { created_at: frozen_time - 11.days }
+ :verify | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days }
+ :verify | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days }
+ :verify | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days }
+ :trial | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days }
+ :trial | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days }
+ :trial | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days }
+ :team | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days, trial_started_at: frozen_time - 2.days }
+ :team | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days, trial_started_at: frozen_time - 6.days }
+ :team | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days, trial_started_at: frozen_time - 11.days }
+ :experience | 30 | { created_at: frozen_time - 31.days, git_write_at: frozen_time - 31.days }
end
with_them do
- it { is_expected.to send_in_product_marketing_email(user.id, group.id, track, described_class::INTERVAL_DAYS.index(interval)) }
+ it { is_expected.to send_in_product_marketing_email(user.id, group.id, track, described_class::TRACKS[track][:interval_days].index(interval)) }
end
end
@@ -235,7 +236,7 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
let(:track) { :foo }
before do
- stub_const("#{described_class}::TRACKS", { bar: :git_write })
+ stub_const("#{described_class}::TRACKS", { bar: {} })
end
it { expect { subject }.to raise_error(ArgumentError, 'Track foo not defined') }
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 31263feb947..5b4d6188b66 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -307,7 +307,7 @@ RSpec.describe Notes::CreateService do
),
# Set WIP status
QuickAction.new(
- action_text: "/wip",
+ action_text: "/draft",
before_action: -> {
issuable.reload.update!(title: "title")
},
@@ -317,7 +317,7 @@ RSpec.describe Notes::CreateService do
),
# Remove WIP status
QuickAction.new(
- action_text: "/wip",
+ action_text: "/draft",
before_action: -> {
issuable.reload.update!(title: "WIP: title")
},
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index 9692bb08379..cb7d0163cac 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -130,6 +130,17 @@ RSpec.describe Notes::QuickActionsService do
end
end
+ describe '/estimate' do
+ let(:note_text) { '/estimate 1h' }
+
+ it 'adds time estimate to noteable' do
+ content = execute(note)
+
+ expect(content).to be_empty
+ expect(note.noteable.time_estimate).to eq(3600)
+ end
+ end
+
describe 'note with command & text' do
describe '/close, /label, /assign & /milestone' do
let(:note_text) do
@@ -302,6 +313,11 @@ RSpec.describe Notes::QuickActionsService do
end
it_behaves_like 'note on noteable that supports quick actions' do
+ let_it_be(:incident, reload: true) { create(:incident, project: project) }
+ let(:note) { build(:note_on_issue, project: project, noteable: incident) }
+ end
+
+ it_behaves_like 'note on noteable that supports quick actions' do
let(:merge_request) { create(:merge_request, source_project: project) }
let(:note) { build(:note_on_merge_request, project: project, noteable: merge_request) }
end
diff --git a/spec/services/notification_recipients/builder/default_spec.rb b/spec/services/notification_recipients/builder/default_spec.rb
index 994138ea828..c142cc11384 100644
--- a/spec/services/notification_recipients/builder/default_spec.rb
+++ b/spec/services/notification_recipients/builder/default_spec.rb
@@ -160,21 +160,7 @@ RSpec.describe NotificationRecipients::Builder::Default do
end
end
- before do
- stub_feature_flags(notification_setting_recipient_refactor: enabled)
- end
-
- context 'with notification_setting_recipient_refactor enabled' do
- let(:enabled) { true }
-
- it_behaves_like 'custom notification recipients'
- end
-
- context 'with notification_setting_recipient_refactor disabled' do
- let(:enabled) { false }
-
- it_behaves_like 'custom notification recipients'
- end
+ it_behaves_like 'custom notification recipients'
end
end
end
diff --git a/spec/services/packages/debian/create_distribution_service_spec.rb b/spec/services/packages/debian/create_distribution_service_spec.rb
index 87cf1070075..ecf82c6a1db 100644
--- a/spec/services/packages/debian/create_distribution_service_spec.rb
+++ b/spec/services/packages/debian/create_distribution_service_spec.rb
@@ -4,8 +4,12 @@ require 'spec_helper'
RSpec.describe Packages::Debian::CreateDistributionService do
RSpec.shared_examples 'Create Debian Distribution' do |expected_message, expected_components, expected_architectures|
+ let_it_be(:container) { create(container_type) } # rubocop:disable Rails/SaveBang
+
it 'returns ServiceResponse', :aggregate_failures do
if expected_message.nil?
+ expect(::Packages::Debian::GenerateDistributionWorker).to receive(:perform_async).with(container_type, an_instance_of(Integer))
+
expect { response }
.to change { container.debian_distributions.klass.all.count }
.from(0).to(1)
@@ -18,6 +22,7 @@ RSpec.describe Packages::Debian::CreateDistributionService do
.and not_change { Packages::Debian::ProjectComponentFile.count }
.and not_change { Packages::Debian::GroupComponentFile.count }
else
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
expect { response }
.to not_change { container.debian_distributions.klass.all.count }
.and not_change { container.debian_distributions.count }
@@ -109,13 +114,13 @@ RSpec.describe Packages::Debian::CreateDistributionService do
let(:response) { subject.execute }
context 'within a projet' do
- let_it_be(:container) { create(:project) }
+ let_it_be(:container_type) { :project }
it_behaves_like 'Debian Create Distribution Service'
end
context 'within a group' do
- let_it_be(:container) { create(:group) }
+ let_it_be(:container_type) { :group }
it_behaves_like 'Debian Create Distribution Service'
end
diff --git a/spec/services/packages/debian/destroy_distribution_service_spec.rb b/spec/services/packages/debian/destroy_distribution_service_spec.rb
deleted file mode 100644
index e4c43884bb4..00000000000
--- a/spec/services/packages/debian/destroy_distribution_service_spec.rb
+++ /dev/null
@@ -1,78 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Packages::Debian::DestroyDistributionService do
- RSpec.shared_examples 'Destroy Debian Distribution' do |expected_message|
- it 'returns ServiceResponse', :aggregate_failures do
- if expected_message.nil?
- expect { response }
- .to change { container.debian_distributions.klass.all.count }
- .from(1).to(0)
- .and change { container.debian_distributions.count }
- .from(1).to(0)
- .and change { component1.class.all.count }
- .from(2).to(0)
- .and change { architecture1.class.all.count }
- .from(3).to(0)
- .and change { component_file1.class.all.count }
- .from(4).to(0)
- else
- expect { response }
- .to not_change { container.debian_distributions.klass.all.count }
- .and not_change { container.debian_distributions.count }
- .and not_change { component1.class.all.count }
- .and not_change { architecture1.class.all.count }
- .and not_change { component_file1.class.all.count }
- end
-
- expect(response).to be_a(ServiceResponse)
- expect(response.success?).to eq(expected_message.nil?)
- expect(response.error?).to eq(!expected_message.nil?)
- expect(response.message).to eq(expected_message)
-
- if expected_message.nil?
- expect(response.payload).to eq({})
- else
- expect(response.payload).to eq(distribution: distribution)
- end
- end
- end
-
- RSpec.shared_examples 'Debian Destroy Distribution Service' do |container_type, can_freeze|
- context "with a Debian #{container_type} distribution" do
- let_it_be(:container, freeze: can_freeze) { create(container_type) } # rubocop:disable Rails/SaveBang
- let_it_be(:distribution, freeze: can_freeze) { create("debian_#{container_type}_distribution", container: container) }
- let_it_be(:component1, freeze: can_freeze) { create("debian_#{container_type}_component", distribution: distribution, name: 'component1') }
- let_it_be(:component2, freeze: can_freeze) { create("debian_#{container_type}_component", distribution: distribution, name: 'component2') }
- let_it_be(:architecture0, freeze: true) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'all') }
- let_it_be(:architecture1, freeze: can_freeze) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'architecture1') }
- let_it_be(:architecture2, freeze: can_freeze) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'architecture2') }
- let_it_be(:component_file1, freeze: can_freeze) { create("debian_#{container_type}_component_file", :source, component: component1) }
- let_it_be(:component_file2, freeze: can_freeze) { create("debian_#{container_type}_component_file", component: component1, architecture: architecture1) }
- let_it_be(:component_file3, freeze: can_freeze) { create("debian_#{container_type}_component_file", :source, component: component2) }
- let_it_be(:component_file4, freeze: can_freeze) { create("debian_#{container_type}_component_file", component: component2, architecture: architecture2) }
-
- subject { described_class.new(distribution) }
-
- let(:response) { subject.execute }
-
- context 'with a distribution' do
- it_behaves_like 'Destroy Debian Distribution'
- end
-
- context 'when destroy fails' do
- let(:distribution) { create("debian_#{container_type}_distribution", container: container) }
-
- before do
- expect(distribution).to receive(:destroy).and_return(false)
- end
-
- it_behaves_like 'Destroy Debian Distribution', "Unable to destroy Debian #{container_type} distribution"
- end
- end
- end
-
- it_behaves_like 'Debian Destroy Distribution Service', :project, true
- it_behaves_like 'Debian Destroy Distribution Service', :group, false
-end
diff --git a/spec/services/packages/debian/extract_changes_metadata_service_spec.rb b/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
index 2a92b8ed26e..ced846866c2 100644
--- a/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
+++ b/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
@@ -6,8 +6,10 @@ RSpec.describe Packages::Debian::ExtractChangesMetadataService do
let_it_be(:distribution) { create(:debian_project_distribution, codename: 'unstable') }
let_it_be(:incoming) { create(:debian_incoming, project: distribution.project) }
- let(:package_file) { incoming.package_files.last }
- let(:service) { described_class.new(package_file) }
+ let(:source_file) { incoming.package_files.first }
+ let(:dsc_file) { incoming.package_files.second }
+ let(:changes_file) { incoming.package_files.last }
+ let(:service) { described_class.new(changes_file) }
subject { service.execute }
@@ -23,7 +25,7 @@ RSpec.describe Packages::Debian::ExtractChangesMetadataService do
end
context 'with invalid package file' do
- let(:package_file) { incoming.package_files.first }
+ let(:changes_file) { incoming.package_files.first }
it 'raise ArgumentError', :aggregate_failures do
expect { subject }.to raise_error(described_class::ExtractionError, "is not a changes file")
@@ -31,14 +33,14 @@ RSpec.describe Packages::Debian::ExtractChangesMetadataService do
end
context 'with invalid metadata' do
- let(:md5_dsc) { '3b0817804f669e16cdefac583ad88f0e 671 libs optional sample_1.2.3~alpha2.dsc' }
- let(:md5_source) { 'd79b34f58f61ff4ad696d9bd0b8daa68 864 libs optional sample_1.2.3~alpha2.tar.xz' }
+ let(:md5_dsc) { "#{dsc_file.file_md5} 671 libs optional sample_1.2.3~alpha2.dsc" }
+ let(:md5_source) { "#{source_file.file_md5} 864 libs optional sample_1.2.3~alpha2.tar.xz" }
let(:md5s) { "#{md5_dsc}\n#{md5_source}" }
- let(:sha1_dsc) { '32ecbd674f0bfd310df68484d87752490685a8d6 671 sample_1.2.3~alpha2.dsc' }
- let(:sha1_source) { '5f8bba5574eb01ac3b1f5e2988e8c29307788236 864 sample_1.2.3~alpha2.tar.xz' }
+ let(:sha1_dsc) { "#{dsc_file.file_sha1} 671 sample_1.2.3~alpha2.dsc" }
+ let(:sha1_source) { "#{source_file.file_sha1} 864 sample_1.2.3~alpha2.tar.xz" }
let(:sha1s) { "#{sha1_dsc}\n#{sha1_source}" }
- let(:sha256_dsc) { '844f79825b7e8aaa191e514b58a81f9ac1e58e2180134b0c9512fa66d896d7ba 671 sample_1.2.3~alpha2.dsc' }
- let(:sha256_source) { 'b5a599e88e7cbdda3bde808160a21ba1dd1ec76b2ec8d4912aae769648d68362 864 sample_1.2.3~alpha2.tar.xz' }
+ let(:sha256_dsc) { "#{dsc_file.file_sha256} 671 sample_1.2.3~alpha2.dsc" }
+ let(:sha256_source) { "#{source_file.file_sha256} 864 sample_1.2.3~alpha2.tar.xz" }
let(:sha256s) { "#{sha256_dsc}\n#{sha256_source}" }
let(:fields) { { 'Files' => md5s, 'Checksums-Sha1' => sha1s, 'Checksums-Sha256' => sha256s } }
let(:metadata) { { file_type: :changes, architecture: 'amd64', fields: fields } }
@@ -82,7 +84,7 @@ RSpec.describe Packages::Debian::ExtractChangesMetadataService do
end
context 'with different size in Checksums-Sha1' do
- let(:sha1_dsc) { '32ecbd674f0bfd310df68484d87752490685a8d6 42 sample_1.2.3~alpha2.dsc' }
+ let(:sha1_dsc) { "#{dsc_file.file_sha1} 42 sample_1.2.3~alpha2.dsc" }
it 'raise ArgumentError', :aggregate_failures do
expect { subject }.to raise_error(described_class::ExtractionError, "Size for sample_1.2.3~alpha2.dsc in Files and Checksums-Sha1 differ")
@@ -99,7 +101,7 @@ RSpec.describe Packages::Debian::ExtractChangesMetadataService do
end
context 'with different size in Checksums-Sha256' do
- let(:sha256_dsc) { '844f79825b7e8aaa191e514b58a81f9ac1e58e2180134b0c9512fa66d896d7ba 42 sample_1.2.3~alpha2.dsc' }
+ let(:sha256_dsc) { "#{dsc_file.file_sha256} 42 sample_1.2.3~alpha2.dsc" }
it 'raise ArgumentError', :aggregate_failures do
expect { subject }.to raise_error(described_class::ExtractionError, "Size for sample_1.2.3~alpha2.dsc in Files and Checksums-Sha256 differ")
@@ -126,7 +128,7 @@ RSpec.describe Packages::Debian::ExtractChangesMetadataService do
let(:md5_dsc) { '1234567890123456789012345678012 671 libs optional sample_1.2.3~alpha2.dsc' }
it 'raise ArgumentError', :aggregate_failures do
- expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Md5sum mismatch for sample_1.2.3~alpha2.dsc: 3b0817804f669e16cdefac583ad88f0e != 1234567890123456789012345678012")
+ expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Md5sum mismatch for sample_1.2.3~alpha2.dsc: #{dsc_file.file_md5} != 1234567890123456789012345678012")
end
end
@@ -134,7 +136,7 @@ RSpec.describe Packages::Debian::ExtractChangesMetadataService do
let(:sha1_dsc) { '1234567890123456789012345678901234567890 671 sample_1.2.3~alpha2.dsc' }
it 'raise ArgumentError', :aggregate_failures do
- expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Sha1sum mismatch for sample_1.2.3~alpha2.dsc: 32ecbd674f0bfd310df68484d87752490685a8d6 != 1234567890123456789012345678901234567890")
+ expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Sha1sum mismatch for sample_1.2.3~alpha2.dsc: #{dsc_file.file_sha1} != 1234567890123456789012345678901234567890")
end
end
@@ -142,7 +144,7 @@ RSpec.describe Packages::Debian::ExtractChangesMetadataService do
let(:sha256_dsc) { '1234567890123456789012345678901234567890123456789012345678901234 671 sample_1.2.3~alpha2.dsc' }
it 'raise ArgumentError', :aggregate_failures do
- expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Sha256sum mismatch for sample_1.2.3~alpha2.dsc: 844f79825b7e8aaa191e514b58a81f9ac1e58e2180134b0c9512fa66d896d7ba != 1234567890123456789012345678901234567890123456789012345678901234")
+ expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Sha256sum mismatch for sample_1.2.3~alpha2.dsc: #{dsc_file.file_sha256} != 1234567890123456789012345678901234567890123456789012345678901234")
end
end
end
diff --git a/spec/services/packages/debian/generate_distribution_service_spec.rb b/spec/services/packages/debian/generate_distribution_service_spec.rb
index 0547d18c8bc..a162e492e7e 100644
--- a/spec/services/packages/debian/generate_distribution_service_spec.rb
+++ b/spec/services/packages/debian/generate_distribution_service_spec.rb
@@ -1,182 +1,25 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe Packages::Debian::GenerateDistributionService do
- let_it_be(:group) { create(:group, :public) }
- let_it_be(:project) { create(:project, :public, group: group) }
- let_it_be(:project_distribution) { create("debian_project_distribution", container: project, codename: 'unstable', valid_time_duration_seconds: 48.hours.to_i) }
-
- let_it_be(:incoming) { create(:debian_incoming, project: project) }
-
- before_all do
- ::Packages::Debian::ProcessChangesService.new(incoming.package_files.last, nil).execute
- end
-
- let(:service) { described_class.new(distribution) }
-
describe '#execute' do
- subject { service.execute }
-
- shared_examples 'Generate Distribution' do |container_type|
- context "for #{container_type}" do
- if container_type == :group
- let_it_be(:container) { group }
- let_it_be(:distribution, reload: true) { create('debian_group_distribution', container: group, codename: 'unstable', valid_time_duration_seconds: 48.hours.to_i) }
- else
- let_it_be(:container) { project }
- let_it_be(:distribution, reload: true) { project_distribution }
- end
-
- context 'with components and architectures' do
- let_it_be(:component_main ) { create("debian_#{container_type}_component", distribution: distribution, name: 'main') }
- let_it_be(:component_contrib) { create("debian_#{container_type}_component", distribution: distribution, name: 'contrib') }
-
- let_it_be(:architecture_all ) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'all') }
- let_it_be(:architecture_amd64) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'amd64') }
- let_it_be(:architecture_arm64) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'arm64') }
-
- let_it_be(:component_file1) { create("debian_#{container_type}_component_file", component: component_main, architecture: architecture_all, created_at: '2020-01-24T09:00:00.000Z') } # destroyed
- let_it_be(:component_file2) { create("debian_#{container_type}_component_file", component: component_main, architecture: architecture_amd64, created_at: '2020-01-24T10:29:59.000Z') } # destroyed
- let_it_be(:component_file3) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_all, created_at: '2020-01-24T10:30:00.000Z') } # kept
- let_it_be(:component_file4) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_amd64, created_at: '2020-01-24T11:30:00.000Z') } # kept
-
- def check_component_file(component_name, component_file_type, architecture_name, expected_content)
- component_file = distribution
- .component_files
- .with_component_name(component_name)
- .with_file_type(component_file_type)
- .with_architecture_name(architecture_name)
- .last
+ subject { described_class.new(distribution).execute }
- expect(component_file).not_to be_nil
- expect(component_file.file.exists?).to eq(!expected_content.nil?)
+ include_context 'with published Debian package'
- unless expected_content.nil?
- component_file.file.use_file do |file_path|
- expect(File.read(file_path)).to eq(expected_content)
- end
- end
- end
-
- it 'updates distribution and component files', :aggregate_failures do
- travel_to(Time.utc(2020, 01, 25, 15, 17, 18, 123456)) do
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
-
- expect { subject }
- .to not_change { Packages::Package.count }
- .and not_change { Packages::PackageFile.count }
- .and change { distribution.component_files.count }.from(4).to(2 + 6)
-
- expected_main_amd64_content = <<~EOF
- Package: libsample0
- Source: sample
- Version: 1.2.3~alpha2
- Installed-Size: 7
- Maintainer: John Doe <john.doe@example.com>
- Architecture: amd64
- Description: Some mostly empty lib
- Used in GitLab tests.
- .
- Testing another paragraph.
- Multi-Arch: same
- Homepage: https://gitlab.com/
- Section: libs
- Priority: optional
- Filename: pool/unstable/#{project.id}/s/sample/libsample0_1.2.3~alpha2_amd64.deb
- Size: 409600
- MD5sum: fb0842b21adc44207996296fe14439dd
- SHA256: 1c383a525bfcba619c7305ccd106d61db501a6bbaf0003bf8d0c429fbdb7fcc1
-
- Package: sample-dev
- Source: sample (1.2.3~alpha2)
- Version: 1.2.3~binary
- Installed-Size: 7
- Maintainer: John Doe <john.doe@example.com>
- Architecture: amd64
- Depends: libsample0 (= 1.2.3~binary)
- Description: Some mostly empty developpement files
- Used in GitLab tests.
- .
- Testing another paragraph.
- Multi-Arch: same
- Homepage: https://gitlab.com/
- Section: libdevel
- Priority: optional
- Filename: pool/unstable/#{project.id}/s/sample/sample-dev_1.2.3~binary_amd64.deb
- Size: 409600
- MD5sum: d2afbd28e4d74430d22f9504e18bfdf5
- SHA256: 9fbeee2191ce4dab5288fad5ecac1bd369f58fef9a992a880eadf0caf25f086d
- EOF
-
- check_component_file('main', :packages, 'all', nil)
- check_component_file('main', :packages, 'amd64', expected_main_amd64_content)
- check_component_file('main', :packages, 'arm64', nil)
-
- check_component_file('contrib', :packages, 'all', nil)
- check_component_file('contrib', :packages, 'amd64', nil)
- check_component_file('contrib', :packages, 'arm64', nil)
-
- size = expected_main_amd64_content.length
- md5sum = Digest::MD5.hexdigest(expected_main_amd64_content)
- sha256 = Digest::SHA256.hexdigest(expected_main_amd64_content)
-
- expected_release_content = <<~EOF
- Codename: unstable
- Date: Sat, 25 Jan 2020 15:17:18 +0000
- Valid-Until: Mon, 27 Jan 2020 15:17:18 +0000
- Architectures: all amd64 arm64
- Components: contrib main
- MD5Sum:
- d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-all/Packages
- d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-amd64/Packages
- d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-arm64/Packages
- d41d8cd98f00b204e9800998ecf8427e 0 main/binary-all/Packages
- #{md5sum} #{size} main/binary-amd64/Packages
- d41d8cd98f00b204e9800998ecf8427e 0 main/binary-arm64/Packages
- SHA256:
- e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-all/Packages
- e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-amd64/Packages
- e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-arm64/Packages
- e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-all/Packages
- #{sha256} #{size} main/binary-amd64/Packages
- e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-arm64/Packages
- EOF
+ [:project, :group].each do |container_type|
+ context "for #{container_type}" do
+ include_context 'with Debian distribution', container_type
- distribution.file.use_file do |file_path|
- expect(File.read(file_path)).to eq(expected_release_content)
- end
- end
- end
+ context 'with Debian components and architectures' do
+ it_behaves_like 'Generate Debian Distribution and component files'
end
context 'without components and architectures' do
- it 'updates distribution and component files', :aggregate_failures do
- travel_to(Time.utc(2020, 01, 25, 15, 17, 18, 123456)) do
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
-
- expect { subject }
- .to not_change { Packages::Package.count }
- .and not_change { Packages::PackageFile.count }
- .and not_change { distribution.component_files.count }
-
- expected_release_content = <<~EOF
- Codename: unstable
- Date: Sat, 25 Jan 2020 15:17:18 +0000
- Valid-Until: Mon, 27 Jan 2020 15:17:18 +0000
- MD5Sum:
- SHA256:
- EOF
-
- distribution.file.use_file do |file_path|
- expect(File.read(file_path)).to eq(expected_release_content)
- end
- end
- end
+ it_behaves_like 'Generate minimal Debian Distribution'
end
end
end
-
- it_behaves_like 'Generate Distribution', :project
- it_behaves_like 'Generate Distribution', :group
end
end
diff --git a/spec/services/packages/debian/parse_debian822_service_spec.rb b/spec/services/packages/debian/parse_debian822_service_spec.rb
index f43e38991ce..cad4e81f350 100644
--- a/spec/services/packages/debian/parse_debian822_service_spec.rb
+++ b/spec/services/packages/debian/parse_debian822_service_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe Packages::Debian::ParseDebian822Service do
'Architecture' => 'any',
'Multi-Arch' => 'same',
'Depends' => 'libsample0 (= ${binary:Version}), ${misc:Depends}',
- 'Description' => "Some mostly empty developpement files\nUsed in GitLab tests.\n\nTesting another paragraph."
+ 'Description' => "Some mostly empty development files\nUsed in GitLab tests.\n\nTesting another paragraph."
},
'Package: libsample0' => {
'Package' => 'libsample0',
diff --git a/spec/services/packages/debian/process_changes_service_spec.rb b/spec/services/packages/debian/process_changes_service_spec.rb
index f23471659bc..3069a2806b2 100644
--- a/spec/services/packages/debian/process_changes_service_spec.rb
+++ b/spec/services/packages/debian/process_changes_service_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Packages::Debian::ProcessChangesService do
context 'with valid package file' do
it 'updates package and package file', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker).to receive(:perform_async).with(:project, distribution.id)
expect { subject.execute }
.to change { Packages::Package.count }.from(1).to(2)
.and not_change { Packages::PackageFile.count }
@@ -30,6 +31,7 @@ RSpec.describe Packages::Debian::ProcessChangesService do
let(:package_file) { incoming.package_files.first }
it 'raise ExtractionError', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
expect { subject.execute }
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
@@ -47,6 +49,7 @@ RSpec.describe Packages::Debian::ProcessChangesService do
end
it 'remove the package file', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
expect { subject.execute }
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
diff --git a/spec/services/packages/debian/update_distribution_service_spec.rb b/spec/services/packages/debian/update_distribution_service_spec.rb
index 852fc713e34..2aa34a62111 100644
--- a/spec/services/packages/debian/update_distribution_service_spec.rb
+++ b/spec/services/packages/debian/update_distribution_service_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe Packages::Debian::UpdateDistributionService do
RSpec.shared_examples 'Update Debian Distribution' do |expected_message, expected_components, expected_architectures, component_file_delta = 0|
it 'returns ServiceResponse', :aggregate_failures do
expect(distribution).to receive(:update).with(simple_params).and_call_original if expected_message.nil?
+ expect(::Packages::Debian::GenerateDistributionWorker).to receive(:perform_async).with(distribution.class.container_type, distribution.id).and_call_original if expected_message.nil?
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async) unless expected_message.nil?
if component_file_delta.zero?
expect { response }
diff --git a/spec/services/packages/helm/extract_file_metadata_service_spec.rb b/spec/services/packages/helm/extract_file_metadata_service_spec.rb
new file mode 100644
index 00000000000..ea196190e24
--- /dev/null
+++ b/spec/services/packages/helm/extract_file_metadata_service_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Helm::ExtractFileMetadataService do
+ let_it_be(:package_file) { create(:helm_package_file) }
+
+ let(:service) { described_class.new(package_file) }
+
+ let(:expected) do
+ {
+ 'apiVersion' => 'v2',
+ 'description' => 'File, Block, and Object Storage Services for your Cloud-Native Environment',
+ 'icon' => 'https://rook.io/images/rook-logo.svg',
+ 'name' => 'rook-ceph',
+ 'sources' => ['https://github.com/rook/rook'],
+ 'version' => 'v1.5.8'
+ }
+ end
+
+ subject { service.execute }
+
+ context 'with a valid file' do
+ it { is_expected.to eq(expected) }
+ end
+
+ context 'without Chart.yaml' do
+ before do
+ expect_next_instances_of(Gem::Package::TarReader::Entry, 14) do |entry|
+ expect(entry).to receive(:full_name).exactly(:once).and_wrap_original do |m, *args|
+ m.call(*args) + '_suffix'
+ end
+ end
+ end
+
+ it { expect { subject }.to raise_error(described_class::ExtractionError, 'Chart.yaml not found within a directory') }
+ end
+
+ context 'with Chart.yaml at root' do
+ before do
+ expect_next_instances_of(Gem::Package::TarReader::Entry, 14) do |entry|
+ expect(entry).to receive(:full_name).exactly(:once) do
+ 'Chart.yaml'
+ end
+ end
+ end
+
+ it { expect { subject }.to raise_error(described_class::ExtractionError, 'Chart.yaml not found within a directory') }
+ end
+
+ context 'with an invalid YAML' do
+ before do
+ expect_next_instance_of(Gem::Package::TarReader::Entry) do |entry|
+ expect(entry).to receive(:read).and_return('{')
+ end
+ end
+
+ it { expect { subject }.to raise_error(described_class::ExtractionError, 'Error while parsing Chart.yaml: (<unknown>): did not find expected node content while parsing a flow node at line 2 column 1') }
+ end
+end
diff --git a/spec/services/packages/nuget/metadata_extraction_service_spec.rb b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
index 39fc0f9e6a1..79428b58bd9 100644
--- a/spec/services/packages/nuget/metadata_extraction_service_spec.rb
+++ b/spec/services/packages/nuget/metadata_extraction_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Packages::Nuget::MetadataExtractionService do
- let(:package_file) { create(:nuget_package).package_files.first }
+ let_it_be(:package_file) { create(:nuget_package).package_files.first }
+
let(:service) { described_class.new(package_file.id) }
describe '#execute' do
@@ -28,7 +29,7 @@ RSpec.describe Packages::Nuget::MetadataExtractionService do
context 'with nuspec file' do
before do
- allow(service).to receive(:nuspec_file).and_return(fixture_file(nuspec_filepath))
+ allow(service).to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
end
context 'with dependencies' do
@@ -57,7 +58,7 @@ RSpec.describe Packages::Nuget::MetadataExtractionService do
let_it_be(:nuspec_filepath) { 'packages/nuget/with_metadata.nuspec' }
before do
- allow(service).to receive(:nuspec_file).and_return(fixture_file(nuspec_filepath))
+ allow(service).to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
end
it { expect(subject[:license_url]).to eq('https://opensource.org/licenses/MIT') }
diff --git a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
index c1cce46a54c..ffe1a5b7646 100644
--- a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
+++ b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
let(:package_version) { '1.0.0' }
let(:package_file_name) { 'dummyproject.dummypackage.1.0.0.nupkg' }
- RSpec.shared_examples 'raising an' do |error_class|
+ shared_examples 'raising an' do |error_class|
it "raises an #{error_class}" do
expect { subject }.to raise_error(error_class)
end
@@ -21,11 +21,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
describe '#execute' do
subject { service.execute }
- before do
- stub_package_file_object_storage(enabled: true, direct_upload: true)
- end
-
- RSpec.shared_examples 'taking the lease' do
+ shared_examples 'taking the lease' do
before do
allow(service).to receive(:lease_release?).and_return(false)
end
@@ -39,7 +35,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
end
end
- RSpec.shared_examples 'not updating the package if the lease is taken' do
+ shared_examples 'not updating the package if the lease is taken' do
context 'without obtaining the exclusive lease' do
let(:lease_key) { "packages:nuget:update_package_from_metadata_service:package:#{package_id}" }
let(:metadata) { { package_name: package_name, package_version: package_version } }
@@ -117,9 +113,10 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
let(:expected_tags) { %w(foo bar test tag1 tag2 tag3 tag4 tag5) }
before do
- allow_any_instance_of(Packages::Nuget::MetadataExtractionService)
- .to receive(:nuspec_file)
- .and_return(fixture_file(nuspec_filepath))
+ allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
+ allow(service)
+ .to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
+ end
end
it 'creates tags' do
@@ -172,9 +169,10 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
let(:package_file_name) { 'test.package.3.5.2.nupkg' }
before do
- allow_any_instance_of(Packages::Nuget::MetadataExtractionService)
- .to receive(:nuspec_file)
- .and_return(fixture_file(nuspec_filepath))
+ allow_next_instance_of(Packages::Nuget::MetadataExtractionService) do |service|
+ allow(service)
+ .to receive(:nuspec_file_content).and_return(fixture_file(nuspec_filepath))
+ end
end
it 'updates package and package file' do
@@ -195,7 +193,9 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
context 'with package file not containing a nuspec file' do
before do
- allow_any_instance_of(Zip::File).to receive(:glob).and_return([])
+ allow_next_instance_of(Zip::File) do |file|
+ allow(file).to receive(:glob).and_return([])
+ end
end
it_behaves_like 'raising an', ::Packages::Nuget::MetadataExtractionService::ExtractionError
diff --git a/spec/services/pages/delete_service_spec.rb b/spec/services/pages/delete_service_spec.rb
index a79c89a1c35..295abe15bf0 100644
--- a/spec/services/pages/delete_service_spec.rb
+++ b/spec/services/pages/delete_service_spec.rb
@@ -6,7 +6,6 @@ RSpec.describe Pages::DeleteService do
let_it_be(:admin) { create(:admin) }
let(:project) { create(:project, path: "my.project")}
- let!(:domain) { create(:pages_domain, project: project) }
let(:service) { described_class.new(project, admin)}
before do
@@ -14,8 +13,6 @@ RSpec.describe Pages::DeleteService do
end
it 'deletes published pages', :sidekiq_inline do
- expect(project.pages_deployed?).to be(true)
-
expect_next_instance_of(Gitlab::PagesTransfer) do |pages_transfer|
expect(pages_transfer).to receive(:rename_project).and_return true
end
@@ -23,11 +20,9 @@ RSpec.describe Pages::DeleteService do
expect(PagesWorker).to receive(:perform_in).with(5.minutes, :remove, project.namespace.full_path, anything)
service.execute
-
- expect(project.pages_deployed?).to be(false)
end
- it "doesn't remove anything from the legacy storage", :sidekiq_inline do
+ it "doesn't remove anything from the legacy storage if local_store is disabled", :sidekiq_inline do
allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
expect(project.pages_deployed?).to be(true)
@@ -38,12 +33,20 @@ RSpec.describe Pages::DeleteService do
expect(project.pages_deployed?).to be(false)
end
- it 'deletes all domains', :sidekiq_inline do
- expect(project.pages_domains.count).to eq(1)
+ it 'marks pages as not deployed' do
+ expect do
+ service.execute
+ end.to change { project.reload.pages_deployed? }.from(true).to(false)
+ end
+
+ it 'deletes all domains' do
+ domain = create(:pages_domain, project: project)
+ unrelated_domain = create(:pages_domain)
service.execute
- expect(project.reload.pages_domains.count).to eq(0)
+ expect(PagesDomain.find_by_id(domain.id)).to eq(nil)
+ expect(PagesDomain.find_by_id(unrelated_domain.id)).to be
end
it 'schedules a destruction of pages deployments' do
@@ -61,20 +64,4 @@ RSpec.describe Pages::DeleteService do
service.execute
end.to change { PagesDeployment.count }.by(-1)
end
-
- it 'marks pages as not deployed, deletes domains and schedules worker to remove pages from disk' do
- expect(project.pages_deployed?).to eq(true)
- expect(project.pages_domains.count).to eq(1)
-
- service.execute
-
- expect(project.pages_deployed?).to eq(false)
- expect(project.pages_domains.count).to eq(0)
-
- expect_next_instance_of(Gitlab::PagesTransfer) do |pages_transfer|
- expect(pages_transfer).to receive(:rename_project).and_return true
- end
-
- Sidekiq::Worker.drain_all
- end
end
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
index e233abcd96a..598b162aee4 100644
--- a/spec/services/pod_logs/elasticsearch_service_spec.rb
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -34,11 +34,11 @@ RSpec.describe ::PodLogs::ElasticsearchService do
describe '#get_raw_pods' do
before do
- create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
+ create(:clusters_integrations_elastic_stack, cluster: cluster)
end
it 'returns success with elasticsearch response' do
- allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ allow_any_instance_of(::Clusters::Integrations::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Pods)
@@ -53,7 +53,7 @@ RSpec.describe ::PodLogs::ElasticsearchService do
end
it 'returns an error when ES is unreachable' do
- allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ allow_any_instance_of(::Clusters::Integrations::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(nil)
@@ -64,7 +64,7 @@ RSpec.describe ::PodLogs::ElasticsearchService do
end
it 'handles server errors from elasticsearch' do
- allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ allow_any_instance_of(::Clusters::Integrations::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Pods)
@@ -247,11 +247,11 @@ RSpec.describe ::PodLogs::ElasticsearchService do
let(:expected_cursor) { '9999934,1572449784442' }
before do
- create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
+ create(:clusters_integrations_elastic_stack, cluster: cluster)
end
it 'returns the logs' do
- allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ allow_any_instance_of(::Clusters::Integrations::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
@@ -267,7 +267,7 @@ RSpec.describe ::PodLogs::ElasticsearchService do
end
it 'returns an error when ES is unreachable' do
- allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ allow_any_instance_of(::Clusters::Integrations::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(nil)
@@ -278,7 +278,7 @@ RSpec.describe ::PodLogs::ElasticsearchService do
end
it 'handles server errors from elasticsearch' do
- allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ allow_any_instance_of(::Clusters::Integrations::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
@@ -292,7 +292,7 @@ RSpec.describe ::PodLogs::ElasticsearchService do
end
it 'handles cursor errors from elasticsearch' do
- allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ allow_any_instance_of(::Clusters::Integrations::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index cd659bf5e60..ac0b6cc8ef1 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -82,6 +82,34 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
+ describe 'topics' do
+ subject(:project) { create_project(user, opts) }
+
+ context "with 'topics' parameter" do
+ let(:opts) { { topics: 'topics' } }
+
+ it 'keeps them as specified' do
+ expect(project.topic_list).to eq(%w[topics])
+ end
+ end
+
+ context "with 'topic_list' parameter" do
+ let(:opts) { { topic_list: 'topic_list' } }
+
+ it 'keeps them as specified' do
+ expect(project.topic_list).to eq(%w[topic_list])
+ end
+ end
+
+ context "with 'tag_list' parameter (deprecated)" do
+ let(:opts) { { tag_list: 'tag_list' } }
+
+ it 'keeps them as specified' do
+ expect(project.topic_list).to eq(%w[tag_list])
+ end
+ end
+ end
+
context 'user namespace' do
it do
project = create_project(user, opts)
@@ -270,7 +298,7 @@ RSpec.describe Projects::CreateService, '#execute' do
context 'error handling' do
it 'handles invalid options' do
- opts[:default_branch] = 'master'
+ opts[:invalid] = 'option'
expect(create_project(user, opts)).to eq(nil)
end
end
@@ -663,7 +691,7 @@ RSpec.describe Projects::CreateService, '#execute' do
stub_feature_flags(projects_post_creation_worker: false)
end
- context 'Prometheus application is shared via group cluster' do
+ context 'Prometheus integration is shared via group cluster' do
let(:cluster) { create(:cluster, :group, groups: [group]) }
let(:group) do
create(:group).tap do |group|
@@ -672,7 +700,7 @@ RSpec.describe Projects::CreateService, '#execute' do
end
before do
- create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ create(:clusters_integrations_prometheus, cluster: cluster)
end
it 'creates PrometheusService record', :aggregate_failures do
@@ -685,11 +713,11 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
- context 'Prometheus application is shared via instance cluster' do
+ context 'Prometheus integration is shared via instance cluster' do
let(:cluster) { create(:cluster, :instance) }
before do
- create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ create(:clusters_integrations_prometheus, cluster: cluster)
end
it 'creates PrometheusService record', :aggregate_failures do
@@ -712,7 +740,7 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
- context 'shared Prometheus application is not available' do
+ context 'shared Prometheus integration is not available' do
it 'does not persist PrometheusService record', :aggregate_failures do
project = create_project(user, opts)
@@ -778,7 +806,7 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
- context 'with specialized_project_authorization_workers' do
+ context 'with specialized project_authorization workers' do
let_it_be(:other_user) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -809,7 +837,7 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(AuthorizedProjectUpdate::ProjectCreateWorker).to(
receive(:perform_async).and_call_original
)
- expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to(
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
receive(:bulk_perform_in)
.with(1.hour,
array_including([user.id], [other_user.id]),
@@ -819,34 +847,6 @@ RSpec.describe Projects::CreateService, '#execute' do
create_project(user, opts)
end
-
- context 'when feature is disabled' do
- before do
- stub_feature_flags(specialized_project_authorization_workers: false)
- end
-
- it 'updates authorization for current_user' do
- project = create_project(user, opts)
-
- expect(
- Ability.allowed?(user, :read_project, project)
- ).to be_truthy
- end
-
- it 'uses AuthorizedProjectsWorker' do
- expect(AuthorizedProjectsWorker).to(
- receive(:bulk_perform_async).with(array_including([user.id], [other_user.id])).and_call_original
- )
- expect(AuthorizedProjectUpdate::ProjectCreateWorker).not_to(
- receive(:perform_async)
- )
- expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).not_to(
- receive(:bulk_perform_in)
- )
-
- create_project(user, opts)
- end
- end
end
def create_project(user, opts)
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index ff582279d71..c6b2b1e2b21 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -447,23 +447,6 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
it_behaves_like 'handles errors thrown during async destroy', "Failed to remove webhooks"
end
-
- context 'when "destroy_webhooks_before_the_project" flag is disabled' do
- before do
- stub_feature_flags(destroy_webhooks_before_the_project: false)
- end
-
- it 'does not call WebHooks::DestroyService' do
- expect(WebHooks::DestroyService).not_to receive(:new)
-
- expect do
- destroy_project(project, user)
- end.to change(WebHook, :count).by(-2)
- .and change(WebHookLog, :count).by(-1)
-
- expect(another_project_web_hook.reload).to be
- end
- end
end
context 'error while destroying', :sidekiq_inline do
diff --git a/spec/services/projects/group_links/create_service_spec.rb b/spec/services/projects/group_links/create_service_spec.rb
index c249a51fc56..9bc780fe177 100644
--- a/spec/services/projects/group_links/create_service_spec.rb
+++ b/spec/services/projects/group_links/create_service_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do
expect { subject.execute(create(:group)) }.not_to change { project.project_group_links.count }
end
- context 'with specialized_project_authorization_workers' do
+ context 'with specialized project_authorization workers' do
let_it_be(:other_user) { create(:user) }
before do
@@ -54,7 +54,7 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do
.with(project.id, group.id, group_access)
.and_call_original
)
- expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to(
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
receive(:bulk_perform_in)
.with(1.hour,
array_including([user.id], [other_user.id]),
@@ -64,25 +64,5 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do
subject.execute(group)
end
-
- context 'when feature is disabled' do
- before do
- stub_feature_flags(specialized_project_authorization_project_share_worker: false)
- end
-
- it 'uses AuthorizedProjectsWorker' do
- expect(AuthorizedProjectsWorker).to(
- receive(:bulk_perform_async).with(array_including([user.id], [other_user.id])).and_call_original
- )
- expect(AuthorizedProjectUpdate::ProjectCreateWorker).not_to(
- receive(:perform_async)
- )
- expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).not_to(
- receive(:bulk_perform_in)
- )
-
- subject.execute(group)
- end
- end
end
end
diff --git a/spec/services/projects/group_links/destroy_service_spec.rb b/spec/services/projects/group_links/destroy_service_spec.rb
index 459b79b2d7d..d60e9a01e54 100644
--- a/spec/services/projects/group_links/destroy_service_spec.rb
+++ b/spec/services/projects/group_links/destroy_service_spec.rb
@@ -14,12 +14,60 @@ RSpec.describe Projects::GroupLinks::DestroyService, '#execute' do
expect { subject.execute(group_link) }.to change { project.project_group_links.count }.from(1).to(0)
end
- it 'updates authorization' do
- group.add_maintainer(user)
+ context 'project authorizations refresh' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ context 'when the feature flag `use_specialized_worker_for_project_auth_recalculation` is enabled' do
+ before do
+ stub_feature_flags(use_specialized_worker_for_project_auth_recalculation: true)
+ end
+
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
+ expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
+ .to receive(:perform_async).with(group_link.project.id)
+
+ subject.execute(group_link)
+ end
+
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
+
+ subject.execute(group_link)
+ end
- expect { subject.execute(group_link) }.to(
- change { Ability.allowed?(user, :read_project, project) }
- .from(true).to(false))
+ it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
+ expect { subject.execute(group_link) }.to(
+ change { Ability.allowed?(user, :read_project, project) }
+ .from(true).to(false))
+ end
+ end
+
+ context 'when the feature flag `use_specialized_worker_for_project_auth_recalculation` is disabled' do
+ before do
+ stub_feature_flags(use_specialized_worker_for_project_auth_recalculation: false)
+ end
+
+ it 'calls UserProjectAccessChangedService to update project authorizations' do
+ expect_next_instance_of(UserProjectAccessChangedService, [user.id]) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject.execute(group_link)
+ end
+
+ it 'updates project authorizations of users who had access to the project via the group share' do
+ expect { subject.execute(group_link) }.to(
+ change { Ability.allowed?(user, :read_project, project) }
+ .from(true).to(false))
+ end
+ end
end
it 'returns false if group_link is blank' do
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index bfc8225b654..5235c64d451 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -45,9 +45,9 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
before do
- create(:clusters_applications_prometheus, :installed,
+ create(:clusters_integrations_prometheus,
cluster: prd_cluster, alert_manager_token: token)
- create(:clusters_applications_prometheus, :installed,
+ create(:clusters_integrations_prometheus,
cluster: stg_cluster, alert_manager_token: nil)
end
@@ -62,41 +62,6 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
end
- context 'with project specific cluster using prometheus application' do
- where(:cluster_enabled, :status, :configured_token, :token_input, :result) do
- true | :installed | token | token | :success
- true | :installed | nil | nil | :success
- true | :updated | token | token | :success
- true | :updating | token | token | :failure
- true | :installed | token | 'x' | :failure
- true | :installed | nil | token | :failure
- true | :installed | token | nil | :failure
- true | nil | token | token | :failure
- false | :installed | token | token | :failure
- end
-
- with_them do
- before do
- cluster.update!(enabled: cluster_enabled)
-
- if status
- create(:clusters_applications_prometheus, status,
- cluster: cluster,
- alert_manager_token: configured_token)
- end
- end
-
- case result = params[:result]
- when :success
- include_examples 'processes one firing and one resolved prometheus alerts'
- when :failure
- it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
- else
- raise "invalid result: #{result.inspect}"
- end
- end
- end
-
context 'with project specific cluster using prometheus integration' do
where(:cluster_enabled, :integration_enabled, :configured_token, :token_input, :result) do
true | true | token | token | :success
diff --git a/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb b/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb
index 2dc4a56368b..76830396104 100644
--- a/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb
+++ b/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::ScheduleBulkRepositoryShardMovesService do
it_behaves_like 'moves repository shard in bulk' do
- let_it_be_with_reload(:container) { create(:project, :repository).tap { |project| project.track_project_repository } }
+ let_it_be_with_reload(:container) { create(:project, :repository) }
let(:move_service_klass) { Projects::RepositoryStorageMove }
let(:bulk_worker_klass) { ::Projects::ScheduleBulkRepositoryShardMovesWorker }
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 8498b752610..3171abfb36f 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -195,8 +195,6 @@ RSpec.describe Projects::TransferService do
end
it 'does not update storage location' do
- create(:project_repository, project: project)
-
attempt_project_transfer
expect(project.project_repository).to have_attributes(
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index b9e909e8615..e1b22da2e61 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -587,6 +587,31 @@ RSpec.describe Projects::UpdateService do
it_behaves_like 'the transfer was not scheduled'
end
end
+
+ describe 'when updating topics' do
+ let(:project) { create(:project, topic_list: 'topic1, topic2') }
+
+ it 'update using topics' do
+ result = update_project(project, user, { topics: 'topics' })
+
+ expect(result[:status]).to eq(:success)
+ expect(project.topic_list).to eq(%w[topics])
+ end
+
+ it 'update using topic_list' do
+ result = update_project(project, user, { topic_list: 'topic_list' })
+
+ expect(result[:status]).to eq(:success)
+ expect(project.topic_list).to eq(%w[topic_list])
+ end
+
+ it 'update using tag_list (deprecated)' do
+ result = update_project(project, user, { tag_list: 'tag_list' })
+
+ expect(result[:status]).to eq(:success)
+ expect(project.topic_list).to eq(%w[tag_list])
+ end
+ end
end
describe '#run_auto_devops_pipeline?' do
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index f3ad69bae13..4af76bc65ab 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -1202,16 +1202,6 @@ RSpec.describe QuickActions::InterpretService do
end
it_behaves_like 'draft command' do
- let(:content) { '/wip' }
- let(:issuable) { merge_request }
- end
-
- it_behaves_like 'undraft command' do
- let(:content) { '/wip' }
- let(:issuable) { merge_request }
- end
-
- it_behaves_like 'draft command' do
let(:content) { '/draft' }
let(:issuable) { merge_request }
end
diff --git a/spec/services/security/ci_configuration/sast_parser_service_spec.rb b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
index 4ebaddcfa4e..4fe99f20879 100644
--- a/spec/services/security/ci_configuration/sast_parser_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe Security::CiConfiguration::SastParserService do
let(:configuration) { described_class.new(project).configuration }
let(:secure_analyzers_prefix) { configuration['global'][0] }
let(:sast_excluded_paths) { configuration['global'][1] }
- let(:sast_analyzer_image_tag) { configuration['global'][2] }
let(:sast_pipeline_stage) { configuration['pipeline'][0] }
let(:sast_search_max_depth) { configuration['pipeline'][1] }
let(:bandit) { configuration['analyzers'][0] }
@@ -19,7 +18,6 @@ RSpec.describe Security::CiConfiguration::SastParserService do
it 'parses the configuration for SAST' do
expect(secure_analyzers_prefix['default_value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers')
expect(sast_excluded_paths['default_value']).to eql('spec, test, tests, tmp')
- expect(sast_analyzer_image_tag['default_value']).to eql('2')
expect(sast_pipeline_stage['default_value']).to eql('test')
expect(sast_search_max_depth['default_value']).to eql('4')
expect(brakeman['enabled']).to be(true)
@@ -32,7 +30,6 @@ RSpec.describe Security::CiConfiguration::SastParserService do
allow(project.repository).to receive(:blob_data_at).and_return(gitlab_ci_yml_content)
expect(secure_analyzers_prefix['value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers2')
expect(sast_excluded_paths['value']).to eql('spec, executables')
- expect(sast_analyzer_image_tag['value']).to eql('2')
expect(sast_pipeline_stage['value']).to eql('our_custom_security_stage')
expect(sast_search_max_depth['value']).to eql('8')
expect(brakeman['enabled']).to be(false)
@@ -40,15 +37,6 @@ RSpec.describe Security::CiConfiguration::SastParserService do
expect(sast_brakeman_level['value']).to eql('2')
end
- context 'SAST_DEFAULT_ANALYZERS is set' do
- it 'enables analyzers correctly' do
- allow(project.repository).to receive(:blob_data_at).and_return(gitlab_ci_yml_default_analyzers_content)
-
- expect(brakeman['enabled']).to be(false)
- expect(bandit['enabled']).to be(true)
- end
- end
-
context 'SAST_EXCLUDED_ANALYZERS is set' do
it 'enables analyzers correctly' do
allow(project.repository).to receive(:blob_data_at).and_return(gitlab_ci_yml_excluded_analyzers_content)
@@ -64,7 +52,23 @@ RSpec.describe Security::CiConfiguration::SastParserService do
allow(project.repository).to receive(:blob_data_at).and_return(nil)
expect(secure_analyzers_prefix['value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers')
expect(sast_excluded_paths['value']).to eql('spec, test, tests, tmp')
- expect(sast_analyzer_image_tag['value']).to eql('2')
+ expect(sast_pipeline_stage['value']).to eql('test')
+ expect(sast_search_max_depth['value']).to eql('4')
+ expect(brakeman['enabled']).to be(true)
+ expect(sast_brakeman_level['value']).to eql('1')
+ end
+ end
+
+ context 'when .gitlab-ci.yml does not include the sast job' do
+ before do
+ allow(project.repository).to receive(:blob_data_at).and_return(
+ File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+ )
+ end
+
+ it 'populates the current values with the default values' do
+ expect(secure_analyzers_prefix['value']).to eql('registry.gitlab.com/gitlab-org/security-products/analyzers')
+ expect(sast_excluded_paths['value']).to eql('spec, test, tests, tmp')
expect(sast_pipeline_stage['value']).to eql('test')
expect(sast_search_max_depth['value']).to eql('4')
expect(brakeman['enabled']).to be(true)
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index 32a09e1afc8..eb6e85eb408 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Snippets::CreateService do
let(:extra_opts) { {} }
let(:creator) { admin }
- subject { described_class.new(project, creator, opts).execute }
+ subject { described_class.new(project: project, current_user: creator, params: opts).execute }
let(:snippet) { subject.payload[:snippet] }
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index e737c00ae67..46bc62e11ef 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Snippets::UpdateService do
let(:extra_opts) { {} }
let(:options) { base_opts.merge(extra_opts) }
let(:updater) { user }
- let(:service) { Snippets::UpdateService.new(project, updater, options) }
+ let(:service) { Snippets::UpdateService.new(project: project, current_user: updater, params: options) }
subject { service.execute(snippet) }
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index ae18bc23c17..0eb327ea7f1 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -735,7 +735,7 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
it 'is true with issue tracker not supporting referencing' do
- create(:bugzilla_service, project: project)
+ create(:bugzilla_integration, project: project)
project.reload
expect(service.cross_reference_disallowed?(noteable)).to be_truthy
diff --git a/spec/services/user_project_access_changed_service_spec.rb b/spec/services/user_project_access_changed_service_spec.rb
index 070782992e7..4723619afd2 100644
--- a/spec/services/user_project_access_changed_service_spec.rb
+++ b/spec/services/user_project_access_changed_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe UserProjectAccessChangedService do
end
it 'permits low-priority operation' do
- expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to(
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
receive(:bulk_perform_in).with(
described_class::DELAY,
[[1], [2]],
@@ -31,4 +31,37 @@ RSpec.describe UserProjectAccessChangedService do
priority: described_class::LOW_PRIORITY)
end
end
+
+ context 'with load balancing enabled' do
+ let(:service) { UserProjectAccessChangedService.new([1, 2]) }
+
+ before do
+ allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
+
+ expect(AuthorizedProjectsWorker).to receive(:bulk_perform_and_wait)
+ .with([[1], [2]])
+ .and_return(10)
+ end
+
+ it 'sticks all the updated users and returns the original result', :aggregate_failures do
+ expect(Gitlab::Database::LoadBalancing::Sticking).to receive(:bulk_stick).with(:user, [1, 2])
+
+ expect(service.execute).to eq(10)
+ end
+
+ it 'avoids N+1 cached queries', :use_sql_query_cache, :request_store do
+ # Run this once to establish a baseline
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ service.execute
+ end
+
+ service = UserProjectAccessChangedService.new([1, 2, 3, 4, 5])
+
+ allow(AuthorizedProjectsWorker).to receive(:bulk_perform_and_wait)
+ .with([[1], [2], [3], [4], [5]])
+ .and_return(10)
+
+ expect { service.execute }.not_to exceed_all_query_limit(control_count.count)
+ end
+ end
end
diff --git a/spec/services/users/activity_service_spec.rb b/spec/services/users/activity_service_spec.rb
index 4bbf6a2bcb8..cfafa9eff45 100644
--- a/spec/services/users/activity_service_spec.rb
+++ b/spec/services/users/activity_service_spec.rb
@@ -84,4 +84,51 @@ RSpec.describe Users::ActivityService do
end
end
end
+
+ context 'with DB Load Balancing', :request_store, :redis, :clean_gitlab_redis_shared_state do
+ include_context 'clear DB Load Balancing configuration'
+
+ let(:user) { create(:user, last_activity_on: last_activity_on) }
+
+ context 'when last activity is in the past' do
+ let(:user) { create(:user, last_activity_on: Date.today - 1.week) }
+
+ context 'database load balancing is configured' do
+ before do
+ # Do not pollute AR for other tests, but rather simulate effect of configure_proxy.
+ allow(ActiveRecord::Base.singleton_class).to receive(:prepend)
+ ::Gitlab::Database::LoadBalancing.configure_proxy
+ allow(ActiveRecord::Base).to receive(:connection).and_return(::Gitlab::Database::LoadBalancing.proxy)
+ end
+
+ let(:service) do
+ service = described_class.new(user)
+
+ ::Gitlab::Database::LoadBalancing::Session.clear_session
+
+ service
+ end
+
+ it 'does not stick to primary' do
+ expect(::Gitlab::Database::LoadBalancing::Session.current).not_to be_performed_write
+
+ service.execute
+
+ expect(user.last_activity_on).to eq(Date.today)
+ expect(::Gitlab::Database::LoadBalancing::Session.current).to be_performed_write
+ expect(::Gitlab::Database::LoadBalancing::Session.current).not_to be_using_primary
+ end
+ end
+
+ context 'database load balancing is not configured' do
+ let(:service) { described_class.new(user) }
+
+ it 'updates user without error' do
+ service.execute
+
+ expect(user.last_activity_on).to eq(Date.today)
+ end
+ end
+ end
+ end
end
diff --git a/spec/services/users/authorized_build_service_spec.rb b/spec/services/users/authorized_build_service_spec.rb
new file mode 100644
index 00000000000..57a122cbf35
--- /dev/null
+++ b/spec/services/users/authorized_build_service_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::AuthorizedBuildService do
+ describe '#execute' do
+ let_it_be(:current_user) { create(:user) }
+
+ let(:params) { build_stubbed(:user).slice(:first_name, :last_name, :username, :email, :password) }
+
+ subject(:user) { described_class.new(current_user, params).execute }
+
+ it_behaves_like 'common user build items'
+ it_behaves_like 'current user not admin build items'
+ end
+end
diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb
index e8786c677d1..98fe6d9b5ba 100644
--- a/spec/services/users/build_service_spec.rb
+++ b/spec/services/users/build_service_spec.rb
@@ -11,148 +11,19 @@ RSpec.describe Users::BuildService do
let(:params) { build_stubbed(:user).slice(:first_name, :last_name, :username, :email, :password) }
let(:service) { described_class.new(current_user, params) }
- shared_examples_for 'common build items' do
- it { is_expected.to be_valid }
-
- it 'sets the created_by_id' do
- expect(user.created_by_id).to eq(current_user&.id)
- end
-
- it 'calls UpdateCanonicalEmailService' do
- expect(Users::UpdateCanonicalEmailService).to receive(:new).and_call_original
-
- user
- end
-
- context 'when user_type is provided' do
- context 'when project_bot' do
- before do
- params.merge!({ user_type: :project_bot })
- end
-
- it { expect(user.project_bot?).to be true }
- end
-
- context 'when not a project_bot' do
- before do
- params.merge!({ user_type: :alert_bot })
- end
-
- it { expect(user).to be_human }
- end
- end
- end
-
- shared_examples_for 'current user not admin' do
- context 'with "user_default_external" application setting' do
- where(:user_default_external, :external, :email, :user_default_internal_regex, :result) do
- true | nil | 'fl@example.com' | nil | true
- true | true | 'fl@example.com' | nil | true
- true | false | 'fl@example.com' | nil | true # admin difference
-
- true | nil | 'fl@example.com' | '' | true
- true | true | 'fl@example.com' | '' | true
- true | false | 'fl@example.com' | '' | true # admin difference
-
- true | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
- true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
- true | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
-
- true | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
- true | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
- true | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true # admin difference
-
- false | nil | 'fl@example.com' | nil | false
- false | true | 'fl@example.com' | nil | false # admin difference
- false | false | 'fl@example.com' | nil | false
-
- false | nil | 'fl@example.com' | '' | false
- false | true | 'fl@example.com' | '' | false # admin difference
- false | false | 'fl@example.com' | '' | false
-
- false | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
- false | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
- false | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
-
- false | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
- false | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
- false | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
- end
-
- with_them do
- before do
- stub_application_setting(user_default_external: user_default_external)
- stub_application_setting(user_default_internal_regex: user_default_internal_regex)
-
- params.merge!({ external: external, email: email }.compact)
- end
-
- it 'sets the value of Gitlab::CurrentSettings.user_default_external' do
- expect(user.external).to eq(result)
- end
- end
- end
-
- context 'when "send_user_confirmation_email" application setting is true' do
- before do
- stub_application_setting(send_user_confirmation_email: true, signup_enabled?: true)
- end
-
- it 'does not confirm the user' do
- expect(user).not_to be_confirmed
- end
- end
-
- context 'when "send_user_confirmation_email" application setting is false' do
- before do
- stub_application_setting(send_user_confirmation_email: false, signup_enabled?: true)
- end
-
- it 'confirms the user' do
- expect(user).to be_confirmed
- end
- end
-
- context 'with allowed params' do
- let(:params) do
- {
- email: 1,
- name: 1,
- password: 1,
- password_automatically_set: 1,
- username: 1,
- user_type: 'project_bot'
- }
- end
-
- it 'sets all allowed attributes' do
- expect(User).to receive(:new).with(hash_including(params)).and_call_original
-
- user
- end
- end
- end
-
context 'with nil current_user' do
subject(:user) { service.execute }
- it_behaves_like 'common build items'
- it_behaves_like 'current user not admin'
+ it_behaves_like 'common user build items'
+ it_behaves_like 'current user not admin build items'
end
context 'with non admin current_user' do
let_it_be(:current_user) { create(:user) }
- let(:service) { described_class.new(current_user, params) }
-
- subject(:user) { service.execute(skip_authorization: true) }
-
- it 'raises AccessDeniedError exception when authorization is not skipped' do
- expect { service.execute }.to raise_error Gitlab::Access::AccessDeniedError
+ it 'raises AccessDeniedError exception' do
+ expect { described_class.new(current_user, params).execute }.to raise_error Gitlab::Access::AccessDeniedError
end
-
- it_behaves_like 'common build items'
- it_behaves_like 'current user not admin'
end
context 'with an admin current_user' do
@@ -163,7 +34,7 @@ RSpec.describe Users::BuildService do
subject(:user) { service.execute }
- it_behaves_like 'common build items'
+ it_behaves_like 'common user build items'
context 'with allowed params' do
let(:params) do
diff --git a/spec/services/users/update_assigned_open_issue_count_service_spec.rb b/spec/services/users/update_assigned_open_issue_count_service_spec.rb
deleted file mode 100644
index 55fc60a7893..00000000000
--- a/spec/services/users/update_assigned_open_issue_count_service_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Users::UpdateAssignedOpenIssueCountService do
- let_it_be(:user) { create(:user) }
-
- describe '#initialize' do
- context 'incorrect arguments provided' do
- it 'raises an error if there are no target user' do
- expect { described_class.new(target_user: nil) }.to raise_error(ArgumentError, /Please provide a target user/)
- expect { described_class.new(target_user: "nonsense") }.to raise_error(ArgumentError, /Please provide a target user/)
- end
- end
-
- context 'when correct arguments provided' do
- it 'is successful' do
- expect { described_class.new(target_user: user) }.not_to raise_error
- end
- end
- end
-
- describe "#execute", :clean_gitlab_redis_cache do
- let(:fake_update_service) { double }
- let(:fake_issue_count_service) { double }
- let(:provided_value) { nil }
-
- subject { described_class.new(target_user: user).execute }
-
- context 'successful' do
- it 'returns a success response' do
- expect(subject).to be_success
- end
-
- it 'writes the cache with the new value' do
- expect(Rails.cache).to receive(:write).with(['users', user.id, 'assigned_open_issues_count'], 0, expires_in: User::COUNT_CACHE_VALIDITY_PERIOD)
-
- subject
- end
-
- it 'calls the issues finder to get the latest value' do
- expect(IssuesFinder).to receive(:new).with(user, assignee_id: user.id, state: 'opened', non_archived: true).and_return(fake_issue_count_service)
- expect(fake_issue_count_service).to receive(:execute)
-
- subject
- end
- end
- end
-end
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index b3fd4e33640..5f53d6f34d8 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -128,11 +128,10 @@ RSpec.describe WebHookService do
end
it 'handles exceptions' do
- exceptions = [
- SocketError, OpenSSL::SSL::SSLError, Errno::ECONNRESET, Errno::ECONNREFUSED,
- Errno::EHOSTUNREACH, Net::OpenTimeout, Net::ReadTimeout,
- Gitlab::HTTP::BlockedUrlError, Gitlab::HTTP::RedirectionTooDeep
+ exceptions = Gitlab::HTTP::HTTP_ERRORS + [
+ Gitlab::Json::LimitedEncoder::LimitExceeded, URI::InvalidURIError
]
+
exceptions.each do |exception_class|
exception = exception_class.new('Exception message')
project_hook.enable!
@@ -175,13 +174,19 @@ RSpec.describe WebHookService do
context 'execution logging' do
let(:hook_log) { project_hook.web_hook_logs.last }
+ def run_service
+ service_instance.execute
+ ::WebHooks::LogExecutionWorker.drain
+ project_hook.reload
+ end
+
context 'with success' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
end
it 'log successful execution' do
- service_instance.execute
+ run_service
expect(hook_log.trigger).to eq('push_hooks')
expect(hook_log.url).to eq(project_hook.url)
@@ -192,12 +197,16 @@ RSpec.describe WebHookService do
expect(hook_log.internal_error_message).to be_nil
end
+ it 'does not log in the service itself' do
+ expect { service_instance.execute }.not_to change(::WebHookLog, :count)
+ end
+
it 'does not increment the failure count' do
- expect { service_instance.execute }.not_to change(project_hook, :recent_failures)
+ expect { run_service }.not_to change(project_hook, :recent_failures)
end
it 'does not change the disabled_until attribute' do
- expect { service_instance.execute }.not_to change(project_hook, :disabled_until)
+ expect { run_service }.not_to change(project_hook, :disabled_until)
end
context 'when the hook had previously failed' do
@@ -206,7 +215,7 @@ RSpec.describe WebHookService do
end
it 'resets the failure count' do
- expect { service_instance.execute }.to change(project_hook, :recent_failures).to(0)
+ expect { run_service }.to change(project_hook, :recent_failures).to(0)
end
end
end
@@ -217,7 +226,7 @@ RSpec.describe WebHookService do
end
it 'logs failed execution' do
- service_instance.execute
+ run_service
expect(hook_log).to have_attributes(
trigger: eq('push_hooks'),
@@ -231,17 +240,17 @@ RSpec.describe WebHookService do
end
it 'increments the failure count' do
- expect { service_instance.execute }.to change(project_hook, :recent_failures).by(1)
+ expect { run_service }.to change(project_hook, :recent_failures).by(1)
end
it 'does not change the disabled_until attribute' do
- expect { service_instance.execute }.not_to change(project_hook, :disabled_until)
+ expect { run_service }.not_to change(project_hook, :disabled_until)
end
it 'does not allow the failure count to overflow' do
project_hook.update!(recent_failures: 32767)
- expect { service_instance.execute }.not_to change(project_hook, :recent_failures)
+ expect { run_service }.not_to change(project_hook, :recent_failures)
end
context 'when the web_hooks_disable_failed FF is disabled' do
@@ -253,7 +262,7 @@ RSpec.describe WebHookService do
it 'does not allow the failure count to overflow' do
project_hook.update!(recent_failures: 32767)
- expect { service_instance.execute }.not_to change(project_hook, :recent_failures)
+ expect { run_service }.not_to change(project_hook, :recent_failures)
end
end
end
@@ -264,7 +273,7 @@ RSpec.describe WebHookService do
end
it 'log failed execution' do
- service_instance.execute
+ run_service
expect(hook_log.trigger).to eq('push_hooks')
expect(hook_log.url).to eq(project_hook.url)
@@ -276,16 +285,15 @@ RSpec.describe WebHookService do
end
it 'does not increment the failure count' do
- expect { service_instance.execute }.not_to change(project_hook, :recent_failures)
+ expect { run_service }.not_to change(project_hook, :recent_failures)
end
- it 'sets the disabled_until attribute' do
- expect { service_instance.execute }
- .to change(project_hook, :disabled_until).to(project_hook.next_backoff.from_now)
+ it 'backs off' do
+ expect { run_service }.to change(project_hook, :disabled_until)
end
it 'increases the backoff count' do
- expect { service_instance.execute }.to change(project_hook, :backoff_count).by(1)
+ expect { run_service }.to change(project_hook, :backoff_count).by(1)
end
context 'when the previous cool-off was near the maximum' do
@@ -294,11 +302,7 @@ RSpec.describe WebHookService do
end
it 'sets the disabled_until attribute' do
- expect { service_instance.execute }.to change(project_hook, :disabled_until).to(1.day.from_now)
- end
-
- it 'sets the last_backoff attribute' do
- expect { service_instance.execute }.to change(project_hook, :backoff_count).by(1)
+ expect { run_service }.to change(project_hook, :disabled_until).to(1.day.from_now)
end
end
@@ -308,11 +312,7 @@ RSpec.describe WebHookService do
end
it 'sets the disabled_until attribute' do
- expect { service_instance.execute }.to change(project_hook, :disabled_until).to(1.day.from_now)
- end
-
- it 'sets the last_backoff attribute' do
- expect { service_instance.execute }.to change(project_hook, :backoff_count).by(1)
+ expect { run_service }.to change(project_hook, :disabled_until).to(1.day.from_now)
end
end
end
@@ -320,7 +320,7 @@ RSpec.describe WebHookService do
context 'with unsafe response body' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: "\xBB")
- service_instance.execute
+ run_service
end
it 'log successful execution' do
@@ -375,15 +375,18 @@ RSpec.describe WebHookService do
it 'does not queue a worker and logs an error' do
expect(WebHookWorker).not_to receive(:perform_async)
- payload = {
- message: 'Webhook rate limit exceeded',
- hook_id: project_hook.id,
- hook_type: 'ProjectHook',
- hook_name: 'push_hooks'
- }
-
- expect(Gitlab::AuthLogger).to receive(:error).with(payload)
- expect(Gitlab::AppLogger).to receive(:error).with(payload)
+ expect(Gitlab::AuthLogger).to receive(:error).with(
+ include(
+ message: 'Webhook rate limit exceeded',
+ hook_id: project_hook.id,
+ hook_type: 'ProjectHook',
+ hook_name: 'push_hooks',
+ "correlation_id" => kind_of(String),
+ "meta.project" => project.full_path,
+ "meta.related_class" => 'ProjectHook',
+ "meta.root_namespace" => project.root_namespace.full_path
+ )
+ )
service_instance.async_execute
end
@@ -403,7 +406,6 @@ RSpec.describe WebHookService do
it 'stops queueing workers and logs errors' do
expect(Gitlab::AuthLogger).to receive(:error).twice
- expect(Gitlab::AppLogger).to receive(:error).twice
2.times { service_instance.async_execute }
end
@@ -430,5 +432,19 @@ RSpec.describe WebHookService do
end
end
end
+
+ context 'when hook has custom context attributes' do
+ it 'includes the attributes in the worker context' do
+ expect(WebHookWorker).to receive(:perform_async) do
+ expect(Gitlab::ApplicationContext.current).to include(
+ 'meta.project' => project_hook.project.full_path,
+ 'meta.root_namespace' => project.root_ancestor.path,
+ 'meta.related_class' => 'ProjectHook'
+ )
+ end
+
+ service_instance.async_execute
+ end
+ end
end
end