summaryrefslogtreecommitdiff
path: root/spec/services
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-12-20 14:22:11 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-12-20 14:22:11 +0000
commit0c872e02b2c822e3397515ec324051ff540f0cd5 (patch)
treece2fb6ce7030e4dad0f4118d21ab6453e5938cdd /spec/services
parentf7e05a6853b12f02911494c4b3fe53d9540d74fc (diff)
downloadgitlab-ce-0c872e02b2c822e3397515ec324051ff540f0cd5.tar.gz
Add latest changes from gitlab-org/gitlab@15-7-stable-eev15.7.0-rc42
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/admin/set_feature_flag_service_spec.rb199
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb118
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb80
-rw-r--r--spec/services/chat_names/find_user_service_spec.rb20
-rw-r--r--spec/services/ci/append_build_trace_service_spec.rb32
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb237
-rw-r--r--spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/environment_spec.rb18
-rw-r--r--spec/services/ci/create_pipeline_service/logger_spec.rb43
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/partitioning_spec.rb21
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb8
-rw-r--r--spec/services/ci/create_pipeline_service/scripts_spec.rb112
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb33
-rw-r--r--spec/services/ci/enqueue_job_service_spec.rb81
-rw-r--r--spec/services/ci/generate_kubeconfig_service_spec.rb110
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb2
-rw-r--r--spec/services/ci/pipeline_schedules/calculate_next_run_service_spec.rb107
-rw-r--r--spec/services/ci/pipeline_trigger_service_spec.rb3
-rw-r--r--spec/services/ci/pipelines/add_job_service_spec.rb4
-rw-r--r--spec/services/ci/process_build_service_spec.rb72
-rw-r--r--spec/services/ci/reset_skipped_jobs_service_spec.rb (renamed from spec/services/ci/after_requeue_job_service_spec.rb)18
-rw-r--r--spec/services/ci/retry_job_service_spec.rb45
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb5
-rw-r--r--spec/services/ci/runners/assign_runner_service_spec.rb2
-rw-r--r--spec/services/ci/runners/bulk_delete_runners_service_spec.rb2
-rw-r--r--spec/services/ci/runners/process_runner_version_update_service_spec.rb2
-rw-r--r--spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb2
-rw-r--r--spec/services/ci/runners/register_runner_service_spec.rb2
-rw-r--r--spec/services/ci/runners/reset_registration_token_service_spec.rb2
-rw-r--r--spec/services/ci/runners/set_runner_associated_projects_service_spec.rb2
-rw-r--r--spec/services/ci/runners/unassign_runner_service_spec.rb2
-rw-r--r--spec/services/ci/runners/unregister_runner_service_spec.rb2
-rw-r--r--spec/services/ci/runners/update_runner_service_spec.rb2
-rw-r--r--spec/services/ci/test_failure_history_service_spec.rb37
-rw-r--r--spec/services/ci/track_failed_build_service_spec.rb23
-rw-r--r--spec/services/ci/unlock_artifacts_service_spec.rb27
-rw-r--r--spec/services/clusters/agents/filter_authorizations_service_spec.rb100
-rw-r--r--spec/services/clusters/agents/refresh_authorization_service_spec.rb10
-rw-r--r--spec/services/clusters/applications/install_service_spec.rb80
-rw-r--r--spec/services/clusters/applications/prometheus_config_service_spec.rb162
-rw-r--r--spec/services/clusters/applications/upgrade_service_spec.rb80
-rw-r--r--spec/services/database/consistency_check_service_spec.rb2
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb2
-rw-r--r--spec/services/environments/create_for_build_service_spec.rb8
-rw-r--r--spec/services/environments/stop_service_spec.rb5
-rw-r--r--spec/services/event_create_service_spec.rb109
-rw-r--r--spec/services/feature_flags/hook_service_spec.rb8
-rw-r--r--spec/services/google_cloud/fetch_google_ip_list_service_spec.rb4
-rw-r--r--spec/services/groups/destroy_service_spec.rb20
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb42
-rw-r--r--spec/services/import/bitbucket_server_service_spec.rb19
-rw-r--r--spec/services/import/github/gists_import_service_spec.rb47
-rw-r--r--spec/services/import/github_service_spec.rb23
-rw-r--r--spec/services/incident_management/incidents/create_service_spec.rb24
-rw-r--r--spec/services/incident_management/link_alerts/create_service_spec.rb98
-rw-r--r--spec/services/incident_management/link_alerts/destroy_service_spec.rb88
-rw-r--r--spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb14
-rw-r--r--spec/services/incident_management/pager_duty/process_webhook_service_spec.rb2
-rw-r--r--spec/services/incident_management/timeline_events/create_service_spec.rb18
-rw-r--r--spec/services/incident_management/timeline_events/destroy_service_spec.rb9
-rw-r--r--spec/services/incident_management/timeline_events/update_service_spec.rb168
-rw-r--r--spec/services/issuable/discussions_list_service_spec.rb13
-rw-r--r--spec/services/issue_links/create_service_spec.rb8
-rw-r--r--spec/services/issue_links/destroy_service_spec.rb8
-rw-r--r--spec/services/issues/close_service_spec.rb44
-rw-r--r--spec/services/issues/create_service_spec.rb27
-rw-r--r--spec/services/issues/move_service_spec.rb21
-rw-r--r--spec/services/issues/reopen_service_spec.rb8
-rw-r--r--spec/services/issues/update_service_spec.rb31
-rw-r--r--spec/services/issues/zoom_link_service_spec.rb8
-rw-r--r--spec/services/jira_connect/create_asymmetric_jwt_service_spec.rb27
-rw-r--r--spec/services/jira_connect_installations/proxy_lifecycle_event_service_spec.rb154
-rw-r--r--spec/services/jira_connect_installations/update_service_spec.rb186
-rw-r--r--spec/services/markup/rendering_service_spec.rb51
-rw-r--r--spec/services/merge_requests/after_create_service_spec.rb2
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb8
-rw-r--r--spec/services/merge_requests/build_service_spec.rb99
-rw-r--r--spec/services/merge_requests/create_pipeline_service_spec.rb21
-rw-r--r--spec/services/merge_requests/remove_approval_service_spec.rb8
-rw-r--r--spec/services/ml/experiment_tracking/candidate_repository_spec.rb78
-rw-r--r--spec/services/ml/experiment_tracking/experiment_repository_spec.rb39
-rw-r--r--spec/services/notes/create_service_spec.rb8
-rw-r--r--spec/services/notification_service_spec.rb10
-rw-r--r--spec/services/packages/debian/process_changes_service_spec.rb4
-rw-r--r--spec/services/packages/debian/process_package_file_service_spec.rb161
-rw-r--r--spec/services/pages_domains/retry_acme_order_service_spec.rb36
-rw-r--r--spec/services/personal_access_tokens/revoke_service_spec.rb44
-rw-r--r--spec/services/projects/after_rename_service_spec.rb29
-rw-r--r--spec/services/projects/container_repository/destroy_service_spec.rb51
-rw-r--r--spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb16
-rw-r--r--spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb8
-rw-r--r--spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb10
-rw-r--r--spec/services/projects/create_service_spec.rb35
-rw-r--r--spec/services/projects/destroy_service_spec.rb33
-rw-r--r--spec/services/projects/download_service_spec.rb4
-rw-r--r--spec/services/projects/import_export/export_service_spec.rb17
-rw-r--r--spec/services/projects/import_export/parallel_export_service_spec.rb98
-rw-r--r--spec/services/projects/import_service_spec.rb20
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb64
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb16
-rw-r--r--spec/services/projects/lfs_pointers/lfs_import_service_spec.rb14
-rw-r--r--spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb64
-rw-r--r--spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb3
-rw-r--r--spec/services/projects/transfer_service_spec.rb30
-rw-r--r--spec/services/projects/update_pages_service_spec.rb7
-rw-r--r--spec/services/projects/update_service_spec.rb39
-rw-r--r--spec/services/protected_branches/api_service_spec.rb69
-rw-r--r--spec/services/protected_branches/cache_service_spec.rb186
-rw-r--r--spec/services/protected_branches/create_service_spec.rb97
-rw-r--r--spec/services/protected_branches/destroy_service_spec.rb59
-rw-r--r--spec/services/protected_branches/update_service_spec.rb74
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb18
-rw-r--r--spec/services/repositories/housekeeping_service_spec.rb2
-rw-r--r--spec/services/search_service_spec.rb2
-rw-r--r--spec/services/security/merge_reports_service_spec.rb26
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb2
-rw-r--r--spec/services/timelogs/create_service_spec.rb6
-rw-r--r--spec/services/todo_service_spec.rb99
-rw-r--r--spec/services/users/assigned_issues_count_service_spec.rb57
-rw-r--r--spec/services/users/keys_count_service_spec.rb6
-rw-r--r--spec/services/users/migrate_records_to_ghost_user_service_spec.rb14
-rw-r--r--spec/services/users/registrations_build_service_spec.rb4
-rw-r--r--spec/services/web_hooks/log_execution_service_spec.rb39
-rw-r--r--spec/services/work_items/create_and_link_service_spec.rb4
-rw-r--r--spec/services/work_items/create_service_spec.rb4
-rw-r--r--spec/services/work_items/parent_links/create_service_spec.rb6
-rw-r--r--spec/services/work_items/update_service_spec.rb2
-rw-r--r--spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb6
129 files changed, 3779 insertions, 1286 deletions
diff --git a/spec/services/admin/set_feature_flag_service_spec.rb b/spec/services/admin/set_feature_flag_service_spec.rb
index 9a9c5545e23..45ee914558a 100644
--- a/spec/services/admin/set_feature_flag_service_spec.rb
+++ b/spec/services/admin/set_feature_flag_service_spec.rb
@@ -13,11 +13,69 @@ RSpec.describe Admin::SetFeatureFlagService do
# Find any `development` feature flag name
let(:known_feature_flag) do
Feature::Definition.definitions
- .values.find(&:development?)
+ .values.find { |defn| defn.development? && !defn.default_enabled }
+ end
+
+ describe 'sequences of executions' do
+ subject(:flag) do
+ Feature.get(feature_name) # rubocop: disable Gitlab/AvoidFeatureGet
+ end
+
+ context 'if we enable_percentage_of_actors and then disable' do
+ before do
+ described_class
+ .new(feature_flag_name: feature_name, params: { key: 'percentage_of_actors', value: '50.0' })
+ .execute
+
+ described_class
+ .new(feature_flag_name: feature_name, params: { key: 'percentage_of_actors', value: '0.0' })
+ .execute
+ end
+
+ it 'leaves the flag off' do
+ expect(flag.state).to eq(:off)
+ end
+ end
+
+ context 'if we enable and then enable_percentage_of_actors' do
+ before do
+ described_class
+ .new(feature_flag_name: feature_name, params: { key: 'percentage_of_actors', value: '100.0' })
+ .execute
+ end
+
+ it 'reports an error' do
+ result = described_class
+ .new(feature_flag_name: feature_name, params: { key: 'percentage_of_actors', value: '50.0' })
+ .execute
+
+ expect(flag.state).to eq(:on)
+ expect(result).to be_error
+ end
+
+ context 'if we disable the flag first' do
+ before do
+ described_class
+ .new(feature_flag_name: feature_name, params: { value: 'false' })
+ .execute
+ end
+
+ it 'sets the percentage of actors' do
+ result = described_class
+ .new(feature_flag_name: feature_name, params: { key: 'percentage_of_actors', value: '50.0' })
+ .execute
+
+ expect(flag.state).to eq(:conditional)
+ expect(result).not_to be_error
+ end
+ end
+ end
end
describe '#execute' do
before do
+ unstub_all_feature_flags
+
Feature.reset
Flipper.unregister_groups
Flipper.register(:perf_team) do |actor|
@@ -25,7 +83,87 @@ RSpec.describe Admin::SetFeatureFlagService do
end
end
- subject { service.execute }
+ subject(:result) { service.execute }
+
+ context 'when we cannot interpret the operation' do
+ let(:params) { { value: 'wibble', key: 'unknown' } }
+
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes(reason: :illegal_operation) }
+ it { is_expected.to have_attributes(message: %(Cannot set '#{feature_name}' ("unknown") to "wibble")) }
+
+ context 'when the key is absent' do
+ let(:params) { { value: 'wibble' } }
+
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes(reason: :illegal_operation) }
+ it { is_expected.to have_attributes(message: %(Cannot set '#{feature_name}' to "wibble")) }
+ end
+ end
+
+ context 'when the value to set cannot be parsed' do
+ let(:params) { { value: 'wibble', key: 'percentage_of_actors' } }
+
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes(reason: :illegal_operation) }
+ it { is_expected.to have_attributes(message: 'Not a percentage') }
+ end
+
+ context 'when value is "remove_opt_out"' do
+ before do
+ Feature.enable(feature_name)
+ end
+
+ context 'without a target' do
+ let(:params) { { value: 'remove_opt_out' } }
+
+ it 'returns an error' do
+ expect(result).to be_error
+ expect(result.reason).to eq(:illegal_operation)
+ end
+ end
+
+ context 'with a target' do
+ let(:params) { { value: 'remove_opt_out', user: user.username } }
+
+ context 'when there is currently no opt-out' do
+ it 'returns an error' do
+ expect(result).to be_error
+ expect(result.reason).to eq(:illegal_operation)
+ expect(Feature).to be_enabled(feature_name, user)
+ end
+ end
+
+ context 'when there is currently an opt-out' do
+ before do
+ Feature.opt_out(feature_name, user)
+ end
+
+ it 'removes the opt out' do
+ expect(result).to be_success
+ expect(Feature).to be_enabled(feature_name, user)
+ end
+ end
+ end
+ end
+
+ context 'when value is "opt_out"' do
+ let(:params) { { value: 'opt_out', namespace: group.full_path, user: user.username } }
+
+ it 'opts the user and group out' do
+ expect(Feature).to receive(:opt_out).with(feature_name, user)
+ expect(Feature).to receive(:opt_out).with(feature_name, group)
+ expect(result).to be_success
+ end
+
+ context 'without a target' do
+ let(:params) { { value: 'opt_out' } }
+
+ it { is_expected.to be_error }
+
+ it { is_expected.to have_attributes(reason: :illegal_operation) }
+ end
+ end
context 'when enabling the feature flag' do
let(:params) { { value: 'true' } }
@@ -38,6 +176,18 @@ RSpec.describe Admin::SetFeatureFlagService do
expect(feature_flag.name).to eq(feature_name)
end
+ context 'when the flag is default_enabled' do
+ let(:known_feature_flag) do
+ Feature::Definition.definitions
+ .values.find { |defn| defn.development? && defn.default_enabled }
+ end
+
+ it 'leaves the flag enabled' do
+ expect(subject).to be_success
+ expect(Feature).to be_enabled(feature_name)
+ end
+ end
+
it 'logs the event' do
expect(Feature.logger).to receive(:info).once
@@ -52,6 +202,31 @@ RSpec.describe Admin::SetFeatureFlagService do
expect(subject).to be_success
end
+ context 'when the flag has been opted out for user' do
+ before do
+ Feature.enable(feature_name)
+ Feature.opt_out(feature_name, user)
+ end
+
+ it 'records an error' do
+ expect(subject).to be_error
+ expect(subject.reason).to eq(:illegal_operation)
+ expect(Feature).not_to be_enabled(feature_name, user)
+ end
+ end
+
+ context 'when the flag is default_enabled' do
+ let(:known_feature_flag) do
+ Feature::Definition.definitions
+ .values.find { |defn| defn.development? && defn.default_enabled }
+ end
+
+ it 'leaves the feature enabled' do
+ expect(subject).to be_success
+ expect(Feature).to be_enabled(feature_name, user)
+ end
+ end
+
context 'when user does not exist' do
let(:params) { { value: 'true', user: 'unknown-user' } }
@@ -166,6 +341,16 @@ RSpec.describe Admin::SetFeatureFlagService do
expect(subject).to be_success
end
end
+
+ context 'with a target' do
+ before do
+ params[:user] = user.username
+ end
+
+ it { is_expected.to be_error }
+
+ it { is_expected.to have_attributes(reason: :illegal_operation) }
+ end
end
context 'when enabling given a percentage of actors' do
@@ -184,6 +369,16 @@ RSpec.describe Admin::SetFeatureFlagService do
expect(subject).to be_success
end
end
+
+ context 'with a target' do
+ before do
+ params[:user] = user.username
+ end
+
+ it { is_expected.to be_error }
+
+ it { is_expected.to have_attributes(reason: :illegal_operation) }
+ end
end
end
diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index bf174f5d5a2..f1e5533139e 100644
--- a/spec/services/bulk_imports/create_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe BulkImports::CreateService do
let(:user) { create(:user) }
let(:credentials) { { url: 'http://gitlab.example', access_token: 'token' } }
+ let(:destination_group) { create(:group, path: 'destination1') }
+ let_it_be(:parent_group) { create(:group, path: 'parent-group') }
let(:params) do
[
{
@@ -39,10 +41,12 @@ RSpec.describe BulkImports::CreateService do
before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
allow(instance).to receive(:instance_version).and_return(source_version)
+ allow(instance).to receive(:instance_enterprise).and_return(false)
end
end
it 'creates bulk import' do
+ parent_group.add_owner(user)
expect { subject.execute }.to change { BulkImport.count }.by(1)
last_bulk_import = BulkImport.last
@@ -50,11 +54,21 @@ RSpec.describe BulkImports::CreateService do
expect(last_bulk_import.user).to eq(user)
expect(last_bulk_import.source_version).to eq(source_version.to_s)
expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_enterprise).to eq(false)
+
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'bulk_import_group'
)
+
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
+ )
end
it 'creates bulk import entities' do
@@ -87,5 +101,109 @@ RSpec.describe BulkImports::CreateService do
expect(result).to be_error
expect(result.message).to eq("Validation failed: Source full path can't be blank")
end
+
+ describe '#user-role' do
+ context 'when there is a parent_namespace and the user is a member' do
+ let(:group2) { create(:group, path: 'destination200', source_id: parent_group.id ) }
+ let(:params) do
+ [
+ {
+ source_type: 'group_entity',
+ source_full_path: 'full/path/to/group1',
+ destination_slug: 'destination200',
+ destination_namespace: 'parent-group'
+ }
+ ]
+ end
+
+ it 'defines access_level from parent namespace membership' do
+ parent_group.add_guest(user)
+ subject.execute
+
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Guest', import_type: 'bulk_import_group' }
+ )
+ end
+ end
+
+ context 'when there is a parent_namespace and the user is not a member' do
+ let(:params) do
+ [
+ {
+ source_type: 'group_entity',
+ source_full_path: 'full/path/to/group1',
+ destination_slug: 'destination-group-1',
+ destination_namespace: 'parent-group'
+ }
+ ]
+ end
+
+ it 'defines access_level as not a member' do
+ subject.execute
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Not a member', import_type: 'bulk_import_group' }
+ )
+ end
+ end
+
+ context 'when there is a destination_namespace but no parent_namespace' do
+ let(:params) do
+ [
+ {
+ source_type: 'group_entity',
+ source_full_path: 'full/path/to/group1',
+ destination_slug: 'destination-group-1',
+ destination_namespace: 'destination1'
+ }
+ ]
+ end
+
+ it 'defines access_level from destination_namespace' do
+ destination_group.add_developer(user)
+ subject.execute
+
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Developer', import_type: 'bulk_import_group' }
+ )
+ end
+ end
+
+ context 'when there is no destination_namespace or parent_namespace' do
+ let(:params) do
+ [
+ {
+ source_type: 'group_entity',
+ source_full_path: 'full/path/to/group1',
+ destination_slug: 'destinationational mcdestiny',
+ destination_namespace: 'destinational-mcdestiny'
+ }
+ ]
+ end
+
+ it 'defines access_level as owner' do
+ subject.execute
+
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
+ )
+ end
+ end
+ end
end
end
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index ec9cc719e24..27f77b678e3 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -14,22 +14,19 @@ RSpec.describe BulkImports::FileDownloadService do
let_it_be(:filepath) { File.join(tmpdir, filename) }
let_it_be(:content_length) { 1000 }
- let(:chunk_double) { double('chunk', size: 100, code: 200) }
-
- let(:response_double) do
- double(
- code: 200,
- success?: true,
- parsed_response: {},
- headers: {
- 'content-length' => content_length,
- 'content-type' => content_type,
- 'content-disposition' => content_disposition
- }
- )
+ let(:headers) do
+ {
+ 'content-length' => content_length,
+ 'content-type' => content_type,
+ 'content-disposition' => content_disposition
+ }
end
- subject do
+ let(:chunk_size) { 100 }
+ let(:chunk_code) { 200 }
+ let(:chunk_double) { double('chunk', size: chunk_size, code: chunk_code, http_response: double(to_hash: headers)) }
+
+ subject(:service) do
described_class.new(
configuration: config,
relative_url: '/test',
@@ -42,9 +39,10 @@ RSpec.describe BulkImports::FileDownloadService do
before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
- allow(client).to receive(:head).and_return(response_double)
allow(client).to receive(:stream).and_yield(chunk_double)
end
+
+ allow(service).to receive(:response_headers).and_return(headers)
end
shared_examples 'downloads file' do
@@ -136,7 +134,7 @@ RSpec.describe BulkImports::FileDownloadService do
end
context 'when chunk code is not 200' do
- let(:chunk_double) { double('chunk', size: 1000, code: 500) }
+ let(:chunk_code) { 500 }
it 'raises an error' do
expect { subject.execute }.to raise_error(
@@ -146,7 +144,7 @@ RSpec.describe BulkImports::FileDownloadService do
end
context 'when chunk code is redirection' do
- let(:chunk_double) { double('redirection', size: 1000, code: 303) }
+ let(:chunk_code) { 303 }
it 'does not write a redirection chunk' do
expect { subject.execute }.not_to raise_error
@@ -157,10 +155,9 @@ RSpec.describe BulkImports::FileDownloadService do
context 'when redirection chunk appears at a later stage of the download' do
it 'raises an error' do
another_chunk_double = double('another redirection', size: 1000, code: 303)
- data_chunk = double('data chunk', size: 1000, code: 200)
+ data_chunk = double('data chunk', size: 1000, code: 200, http_response: double(to_hash: {}))
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
- allow(client).to receive(:head).and_return(response_double)
allow(client)
.to receive(:stream)
.and_yield(chunk_double)
@@ -177,6 +174,51 @@ RSpec.describe BulkImports::FileDownloadService do
end
end
+ describe 'remote content validation' do
+ context 'on redirect chunk' do
+ let(:chunk_code) { 303 }
+
+ it 'does not run content type & length validations' do
+ expect(service).not_to receive(:validate_content_type)
+ expect(service).not_to receive(:validate_content_length)
+
+ service.execute
+ end
+ end
+
+ context 'when there is one data chunk' do
+ it 'validates content type & length' do
+ expect(service).to receive(:validate_content_type)
+ expect(service).to receive(:validate_content_length)
+
+ service.execute
+ end
+ end
+
+ context 'when there are multiple data chunks' do
+ it 'validates content type & length only once' do
+ data_chunk = double(
+ 'data chunk',
+ size: 1000,
+ code: 200,
+ http_response: double(to_hash: {})
+ )
+
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client)
+ .to receive(:stream)
+ .and_yield(chunk_double)
+ .and_yield(data_chunk)
+ end
+
+ expect(service).to receive(:validate_content_type).once
+ expect(service).to receive(:validate_content_length).once
+
+ service.execute
+ end
+ end
+ end
+
context 'when file is a symlink' do
let_it_be(:symlink) { File.join(tmpdir, 'symlink') }
diff --git a/spec/services/chat_names/find_user_service_spec.rb b/spec/services/chat_names/find_user_service_spec.rb
index 4b0a1204558..10cb0a2f065 100644
--- a/spec/services/chat_names/find_user_service_spec.rb
+++ b/spec/services/chat_names/find_user_service_spec.rb
@@ -4,17 +4,16 @@ require 'spec_helper'
RSpec.describe ChatNames::FindUserService, :clean_gitlab_redis_shared_state do
describe '#execute' do
- let(:integration) { create(:integration) }
-
- subject { described_class.new(integration, params).execute }
+ subject { described_class.new(team_id, user_id).execute }
context 'find user mapping' do
- let(:user) { create(:user) }
- let!(:chat_name) { create(:chat_name, user: user, integration: integration) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:chat_name) { create(:chat_name, user: user) }
- context 'when existing user is requested' do
- let(:params) { { team_id: chat_name.team_id, user_id: chat_name.chat_id } }
+ let(:team_id) { chat_name.team_id }
+ let(:user_id) { chat_name.chat_id }
+ context 'when existing user is requested' do
it 'returns the existing chat_name' do
is_expected.to eq(chat_name)
end
@@ -28,7 +27,8 @@ RSpec.describe ChatNames::FindUserService, :clean_gitlab_redis_shared_state do
end
it 'only updates an existing timestamp once within a certain time frame' do
- service = described_class.new(integration, params)
+ chat_name = create(:chat_name, user: user)
+ service = described_class.new(team_id, user_id)
expect(chat_name.last_used_at).to be_nil
@@ -43,9 +43,9 @@ RSpec.describe ChatNames::FindUserService, :clean_gitlab_redis_shared_state do
end
context 'when different user is requested' do
- let(:params) { { team_id: chat_name.team_id, user_id: 'non-existing-user' } }
+ let(:user_id) { 'non-existing-user' }
- it 'returns existing user' do
+ it 'returns nil' do
is_expected.to be_nil
end
end
diff --git a/spec/services/ci/append_build_trace_service_spec.rb b/spec/services/ci/append_build_trace_service_spec.rb
index 487dbacbe90..20f7967d1f1 100644
--- a/spec/services/ci/append_build_trace_service_spec.rb
+++ b/spec/services/ci/append_build_trace_service_spec.rb
@@ -76,4 +76,36 @@ RSpec.describe Ci::AppendBuildTraceService do
expect(build.failure_reason).to eq 'trace_size_exceeded'
end
end
+
+ context 'when debug_trace param is provided' do
+ let(:metadata) { Ci::BuildMetadata.find_by(build_id: build) }
+ let(:stream_size) { 192.kilobytes }
+ let(:body_data) { 'x' * stream_size }
+ let(:content_range) { "#{body_start}-#{stream_size}" }
+
+ context 'when sending the first trace' do
+ let(:body_start) { 0 }
+
+ it 'updates build metadata debug_trace_enabled' do
+ described_class
+ .new(build, content_range: content_range, debug_trace: true)
+ .execute(body_data)
+
+ expect(metadata.debug_trace_enabled).to be(true)
+ end
+ end
+
+ context 'when sending the second trace' do
+ let(:body_start) { 1 }
+
+ it 'does not update build metadata debug_trace_enabled', :aggregate_failures do
+ query_recorder = ActiveRecord::QueryRecorder.new do
+ described_class.new(build, content_range: content_range, debug_trace: true).execute(body_data)
+ end
+
+ expect(metadata.debug_trace_enabled).to be(false)
+ expect(query_recorder.log).not_to include(/p_ci_builds_metadata/)
+ end
+ end
+ end
end
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index 9c02c5218f1..bcdb2b4f796 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
+RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category: :continuous_integration do
include Ci::SourcePipelineHelpers
# Using let_it_be on user and projects for these specs can cause
@@ -13,7 +13,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
let(:downstream_project) { create(:project, :repository) }
let!(:upstream_pipeline) do
- create(:ci_pipeline, :running, project: upstream_project)
+ create(:ci_pipeline, :created, project: upstream_project)
end
let(:trigger) do
@@ -33,6 +33,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
end
let(:service) { described_class.new(upstream_project, user) }
+ let(:pipeline) { subject.payload }
before do
upstream_project.add_developer(user)
@@ -40,6 +41,12 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
subject { service.execute(bridge) }
+ shared_context 'when ci_bridge_remove_sourced_pipelines is disabled' do
+ before do
+ stub_feature_flags(ci_bridge_remove_sourced_pipelines: false)
+ end
+ end
+
context 'when downstream project has not been found' do
let(:trigger) do
{ trigger: { project: 'unknown/project' } }
@@ -48,6 +55,8 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'does not create a pipeline' do
expect { subject }
.not_to change { Ci::Pipeline.count }
+ expect(subject).to be_error
+ expect(subject.message).to eq("Pre-conditions not met")
end
it 'changes pipeline bridge job status to failed' do
@@ -63,9 +72,11 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'does not create a new pipeline' do
expect { subject }
.not_to change { Ci::Pipeline.count }
+ expect(subject).to be_error
+ expect(subject.message).to eq("Pre-conditions not met")
end
- it 'changes status of the bridge build' do
+ it 'changes status of the bridge build to failed' do
subject
expect(bridge.reload).to be_failed
@@ -82,9 +93,11 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'does not create a new pipeline' do
expect { subject }
.not_to change { Ci::Pipeline.count }
+ expect(subject).to be_error
+ expect(subject.message).to eq("Pre-conditions not met")
end
- it 'changes status of the bridge build' do
+ it 'changes status of the bridge build to failed' do
subject
expect(bridge.reload).to be_failed
@@ -103,35 +116,51 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates only one new pipeline' do
expect { subject }
.to change { Ci::Pipeline.count }.by(1)
+ expect(subject).to be_success
end
it 'creates a new pipeline in a downstream project' do
- pipeline = subject
-
expect(pipeline.user).to eq bridge.user
expect(pipeline.project).to eq downstream_project
- expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
+ expect(bridge.reload.sourced_pipeline.pipeline).to eq pipeline
expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
expect(pipeline.source_bridge).to eq bridge
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
+ context 'when ci_bridge_remove_sourced_pipelines is disabled' do
+ include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
+
+ it 'creates a new pipeline in a downstream project' do
+ expect(pipeline.user).to eq bridge.user
+ expect(pipeline.project).to eq downstream_project
+ expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
+ expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
+ expect(pipeline.source_bridge).to eq bridge
+ expect(pipeline.source_bridge).to be_a ::Ci::Bridge
+ end
+ end
+
it_behaves_like 'logs downstream pipeline creation' do
+ let(:downstream_pipeline) { pipeline }
let(:expected_root_pipeline) { upstream_pipeline }
let(:expected_hierarchy_size) { 2 }
let(:expected_downstream_relationship) { :multi_project }
end
it 'updates bridge status when downstream pipeline gets processed' do
- pipeline = subject
-
expect(pipeline.reload).to be_created
expect(bridge.reload).to be_success
end
- context 'when bridge job has already any downstream pipelines' do
+ it 'triggers the upstream pipeline duration calculation', :sidekiq_inline do
+ expect { subject }
+ .to change { upstream_pipeline.reload.duration }.from(nil).to(an_instance_of(Integer))
+ end
+
+ context 'when bridge job has already any downstream pipeline' do
before do
- bridge.sourced_pipelines.create!(
+ bridge.create_sourced_pipeline!(
source_pipeline: bridge.pipeline,
source_project: bridge.project,
project: bridge.project,
@@ -147,7 +176,33 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
bridge_id: bridge.id, project_id: bridge.project.id)
.and_call_original
expect(Ci::CreatePipelineService).not_to receive(:new)
- expect(subject).to eq({ message: "Already has a downstream pipeline", status: :error })
+ expect(subject).to be_error
+ expect(subject.message).to eq("Already has a downstream pipeline")
+ end
+
+ context 'when ci_bridge_remove_sourced_pipelines is disabled' do
+ include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
+
+ before do
+ bridge.sourced_pipelines.create!(
+ source_pipeline: bridge.pipeline,
+ source_project: bridge.project,
+ project: bridge.project,
+ pipeline: create(:ci_pipeline, project: bridge.project)
+ )
+ end
+
+ it 'logs an error and exits' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ instance_of(described_class::DuplicateDownstreamPipelineError),
+ bridge_id: bridge.id, project_id: bridge.project.id)
+ .and_call_original
+ expect(Ci::CreatePipelineService).not_to receive(:new)
+ expect(subject).to be_error
+ expect(subject.message).to eq("Already has a downstream pipeline")
+ end
end
end
@@ -157,8 +212,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
end
it 'is using default branch name' do
- pipeline = subject
-
expect(pipeline.ref).to eq 'master'
end
end
@@ -171,22 +224,33 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates only one new pipeline' do
expect { subject }
.to change { Ci::Pipeline.count }.by(1)
+ expect(subject).to be_error
+ expect(subject.message).to match_array(["jobs job config should implement a script: or a trigger: keyword"])
end
it 'creates a new pipeline in a downstream project' do
- pipeline = subject
-
expect(pipeline.user).to eq bridge.user
expect(pipeline.project).to eq downstream_project
- expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
+ expect(bridge.reload.sourced_pipeline.pipeline).to eq pipeline
expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
expect(pipeline.source_bridge).to eq bridge
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
- it 'updates the bridge status when downstream pipeline gets processed' do
- pipeline = subject
+ context 'when ci_bridge_remove_sourced_pipelines is disabled' do
+ include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
+
+ it 'creates a new pipeline in a downstream project' do
+ expect(pipeline.user).to eq bridge.user
+ expect(pipeline.project).to eq downstream_project
+ expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
+ expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
+ expect(pipeline.source_bridge).to eq bridge
+ expect(pipeline.source_bridge).to be_a ::Ci::Bridge
+ end
+ end
+ it 'updates the bridge status when downstream pipeline gets processed' do
expect(pipeline.reload).to be_failed
expect(bridge.reload).to be_failed
end
@@ -201,6 +265,8 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'does not create a new pipeline' do
expect { subject }
.not_to change { Ci::Pipeline.count }
+ expect(subject).to be_error
+ expect(subject.message).to eq("Pre-conditions not met")
end
it 'changes status of the bridge build' do
@@ -222,32 +288,39 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates only one new pipeline' do
expect { subject }
.to change { Ci::Pipeline.count }.by(1)
+ expect(subject).to be_success
end
it 'creates a child pipeline in the same project' do
- pipeline = subject
- pipeline.reload
-
expect(pipeline.builds.map(&:name)).to match_array(%w[rspec echo])
expect(pipeline.user).to eq bridge.user
expect(pipeline.project).to eq bridge.project
- expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
+ expect(bridge.reload.sourced_pipeline.pipeline).to eq pipeline
expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
expect(pipeline.source_bridge).to eq bridge
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
- it 'updates bridge status when downstream pipeline gets processed' do
- pipeline = subject
+ context 'when ci_bridge_remove_sourced_pipelines is disabled' do
+ include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
+
+ it 'creates a child pipeline in the same project' do
+ expect(pipeline.builds.map(&:name)).to match_array(%w[rspec echo])
+ expect(pipeline.user).to eq bridge.user
+ expect(pipeline.project).to eq bridge.project
+ expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
+ expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
+ expect(pipeline.source_bridge).to eq bridge
+ expect(pipeline.source_bridge).to be_a ::Ci::Bridge
+ end
+ end
+ it 'updates bridge status when downstream pipeline gets processed' do
expect(pipeline.reload).to be_created
expect(bridge.reload).to be_success
end
it 'propagates parent pipeline settings to the child pipeline' do
- pipeline = subject
- pipeline.reload
-
expect(pipeline.ref).to eq(upstream_pipeline.ref)
expect(pipeline.sha).to eq(upstream_pipeline.sha)
expect(pipeline.source_sha).to eq(upstream_pipeline.source_sha)
@@ -276,6 +349,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it_behaves_like 'creates a child pipeline'
it_behaves_like 'logs downstream pipeline creation' do
+ let(:downstream_pipeline) { pipeline }
let(:expected_root_pipeline) { upstream_pipeline }
let(:expected_hierarchy_size) { 2 }
let(:expected_downstream_relationship) { :parent_child }
@@ -283,6 +357,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'updates the bridge job to success' do
expect { subject }.to change { bridge.status }.to 'success'
+ expect(subject).to be_success
end
context 'when bridge uses "depend" strategy' do
@@ -292,8 +367,9 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
}
end
- it 'does not update the bridge job status' do
- expect { subject }.not_to change { bridge.status }
+ it 'update the bridge job to running status' do
+ expect { subject }.to change { bridge.status }.from('pending').to('running')
+ expect(subject).to be_success
end
end
@@ -323,8 +399,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it_behaves_like 'creates a child pipeline'
it 'propagates the merge request to the child pipeline' do
- pipeline = subject
-
expect(pipeline.merge_request).to eq(merge_request)
expect(pipeline).to be_merge_request
end
@@ -341,11 +415,13 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates the pipeline' do
expect { subject }
.to change { Ci::Pipeline.count }.by(1)
+ expect(subject).to be_success
expect(bridge.reload).to be_success
end
it_behaves_like 'logs downstream pipeline creation' do
+ let(:downstream_pipeline) { pipeline }
let(:expected_root_pipeline) { upstream_pipeline.parent_pipeline }
let(:expected_hierarchy_size) { 3 }
let(:expected_downstream_relationship) { :parent_child }
@@ -394,6 +470,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'create the pipeline' do
expect { subject }.to change { Ci::Pipeline.count }.by(1)
+ expect(subject).to be_success
end
end
@@ -406,11 +483,10 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates a new pipeline allowing variables to be passed downstream' do
expect { subject }.to change { Ci::Pipeline.count }.by(1)
+ expect(subject).to be_success
end
it 'passes variables downstream from the bridge' do
- pipeline = subject
-
pipeline.variables.map(&:key).tap do |variables|
expect(variables).to include 'BRIDGE'
end
@@ -466,6 +542,8 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'does not create a new pipeline' do
expect { subject }
.not_to change { Ci::Pipeline.count }
+ expect(subject).to be_error
+ expect(subject.message).to eq("Pre-conditions not met")
end
it 'changes status of the bridge build' do
@@ -480,6 +558,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates a new pipeline' do
expect { subject }
.to change { Ci::Pipeline.count }
+ expect(subject).to be_success
end
it 'expect bridge build not to be failed' do
@@ -559,18 +638,16 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates only one new pipeline' do
expect { subject }
.to change { Ci::Pipeline.count }.by(1)
+ expect(subject).to be_error
+ expect(subject.message).to match_array(["jobs invalid config should implement a script: or a trigger: keyword"])
end
it 'creates a new pipeline in the downstream project' do
- pipeline = subject
-
expect(pipeline.user).to eq bridge.user
expect(pipeline.project).to eq downstream_project
end
it 'drops the bridge' do
- pipeline = subject
-
expect(pipeline.reload).to be_failed
expect(bridge.reload).to be_failed
expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
@@ -585,15 +662,10 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
bridge.drop!
end
- it 'tracks the exception' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(
- instance_of(Ci::Bridge::InvalidTransitionError),
- bridge_id: bridge.id,
- downstream_pipeline_id: kind_of(Numeric))
-
- subject
+ it 'returns the error' do
+ expect { subject }.not_to change(downstream_project.ci_pipelines, :count)
+ expect(subject).to be_error
+ expect(subject.message).to eq('Can not run the bridge')
end
end
@@ -603,8 +675,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
end
it 'passes bridge variables to downstream pipeline' do
- pipeline = subject
-
expect(pipeline.variables.first)
.to have_attributes(key: 'BRIDGE', value: 'var')
end
@@ -616,8 +686,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
end
it 'does not pass pipeline variables directly downstream' do
- pipeline = subject
-
pipeline.variables.map(&:key).tap do |variables|
expect(variables).not_to include 'PIPELINE_VARIABLE'
end
@@ -629,8 +697,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
end
it 'makes it possible to pass pipeline variable downstream' do
- pipeline = subject
-
pipeline.variables.find_by(key: 'BRIDGE').tap do |variable|
expect(variable.value).to eq 'my-value-var'
end
@@ -644,11 +710,11 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'does not create a new pipeline' do
expect { subject }
.not_to change { Ci::Pipeline.count }
+ expect(subject).to be_error
+ expect(subject.message).to match_array(["Insufficient permissions to set pipeline variables"])
end
it 'ignores variables passed downstream from the bridge' do
- pipeline = subject
-
pipeline.variables.map(&:key).tap do |variables|
expect(variables).not_to include 'BRIDGE'
end
@@ -668,7 +734,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
# TODO: Move this context into a feature spec that uses
# multiple pipeline processing services. Location TBD in:
# https://gitlab.com/gitlab-org/gitlab/issues/36216
- context 'when configured with bridge job rules' do
+ context 'when configured with bridge job rules', :sidekiq_inline do
before do
stub_ci_pipeline_yaml_file(config)
downstream_project.add_maintainer(upstream_project.first_owner)
@@ -701,6 +767,8 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates the downstream pipeline' do
expect { subject }
.to change(downstream_project.ci_pipelines, :count).by(1)
+ expect(subject).to be_error
+ expect(subject.message).to eq("Already has a downstream pipeline")
end
end
end
@@ -731,6 +799,8 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'does not create a pipeline and drops the bridge' do
expect { subject }.not_to change(downstream_project.ci_pipelines, :count)
+ expect(subject).to be_error
+ expect(subject.message).to match_array(["Reference not found"])
expect(bridge.reload).to be_failed
expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
@@ -754,6 +824,8 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'does not create a pipeline and drops the bridge' do
expect { subject }.not_to change(downstream_project.ci_pipelines, :count)
+ expect(subject).to be_error
+ expect(subject.message).to match_array(["No stages / jobs for this pipeline."])
expect(bridge.reload).to be_failed
expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
@@ -776,6 +848,10 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates the pipeline but drops the bridge' do
expect { subject }.to change(downstream_project.ci_pipelines, :count).by(1)
+ expect(subject).to be_error
+ expect(subject.message).to eq(
+ ["test job: chosen stage does not exist; available stages are .pre, build, test, deploy, .post"]
+ )
expect(bridge.reload).to be_failed
expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
@@ -808,6 +884,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates the pipeline' do
expect { subject }.to change(downstream_project.ci_pipelines, :count).by(1)
+ expect(subject).to be_success
expect(bridge.reload).to be_success
end
@@ -822,6 +899,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
context 'when a downstream pipeline has sibling pipelines' do
it_behaves_like 'logs downstream pipeline creation' do
+ let(:downstream_pipeline) { pipeline }
let(:expected_root_pipeline) { upstream_pipeline }
let(:expected_downstream_relationship) { :multi_project }
@@ -839,23 +917,47 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
let_it_be(:child) { create(:ci_pipeline, child_of: parent) }
let_it_be(:sibling) { create(:ci_pipeline, child_of: parent) }
- before do
- stub_const("#{described_class}::MAX_HIERARCHY_SIZE", 3)
- end
-
+ let(:project) { build(:project, :repository) }
let(:bridge) do
- create(:ci_bridge, status: :pending, user: user, options: trigger, pipeline: child)
+ create(:ci_bridge, status: :pending, user: user, options: trigger, pipeline: child, project: project)
end
- it 'does not create a new pipeline' do
- expect { subject }.not_to change { Ci::Pipeline.count }
+ context 'when limit was specified by admin' do
+ before do
+ project.actual_limits.update!(pipeline_hierarchy_size: 3)
+ end
+
+ it 'does not create a new pipeline' do
+ expect { subject }.not_to change { Ci::Pipeline.count }
+ end
+
+ it 'drops the trigger job with an explanatory reason' do
+ subject
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq('reached_max_pipeline_hierarchy_size')
+ end
end
- it 'drops the trigger job with an explanatory reason' do
- subject
+ context 'when there was no limit specified by admin' do
+ before do
+ allow(bridge.pipeline).to receive(:complete_hierarchy_count).and_return(1000)
+ end
- expect(bridge.reload).to be_failed
- expect(bridge.failure_reason).to eq('reached_max_pipeline_hierarchy_size')
+ context 'when pipeline count reaches the default limit of 1000' do
+ it 'does not create a new pipeline' do
+ expect { subject }.not_to change { Ci::Pipeline.count }
+ expect(subject).to be_error
+ expect(subject.message).to eq("Pre-conditions not met")
+ end
+
+ it 'drops the trigger job with an explanatory reason' do
+ subject
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq('reached_max_pipeline_hierarchy_size')
+ end
+ end
end
context 'with :ci_limit_complete_hierarchy_size disabled' do
@@ -865,6 +967,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'creates a new pipeline' do
expect { subject }.to change { Ci::Pipeline.count }.by(1)
+ expect(subject).to be_success
end
it 'marks the bridge job as successful' do
diff --git a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
index 74d3534eb45..0d5017a763f 100644
--- a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_fl
pipeline = create_pipeline!
test = pipeline.statuses.find_by(name: 'instrumentation_test')
- expect(test).to be_pending
+ expect(test).to be_running
expect(pipeline.triggered_pipelines.count).to eq(1)
end
diff --git a/spec/services/ci/create_pipeline_service/environment_spec.rb b/spec/services/ci/create_pipeline_service/environment_spec.rb
index 438cb6ac895..b713cad2cad 100644
--- a/spec/services/ci/create_pipeline_service/environment_spec.rb
+++ b/spec/services/ci/create_pipeline_service/environment_spec.rb
@@ -46,6 +46,24 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
end
+ context 'when branch pipeline creates a dynamic environment' do
+ before do
+ config = YAML.dump(
+ review_app: {
+ script: 'echo',
+ environment: { name: "review/$CI_COMMIT_REF_NAME" }
+ })
+
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'does not associate merge request with the environment' do
+ is_expected.to be_created_successfully
+
+ expect(Environment.find_by_name('review/master').merge_request).to be_nil
+ end
+ end
+
context 'when variables are dependent on stage name' do
let(:config) do
<<~YAML
diff --git a/spec/services/ci/create_pipeline_service/logger_spec.rb b/spec/services/ci/create_pipeline_service/logger_spec.rb
index 3045f8e92b1..ccb15bfa684 100644
--- a/spec/services/ci/create_pipeline_service/logger_spec.rb
+++ b/spec/services/ci/create_pipeline_service/logger_spec.rb
@@ -2,8 +2,10 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
- context 'pipeline logger' do
+RSpec.describe Ci::CreatePipelineService, # rubocop: disable RSpec/FilePath
+ :yaml_processor_feature_flag_corectness,
+ feature_category: :continuous_integration do
+ describe 'pipeline logger' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
@@ -12,17 +14,11 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
let(:pipeline) { service.execute(:push).payload }
let(:file_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' }
- before do
- stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
- end
-
let(:counters) do
{
'count' => a_kind_of(Numeric),
- 'avg' => a_kind_of(Numeric),
- 'sum' => a_kind_of(Numeric),
'max' => a_kind_of(Numeric),
- 'min' => a_kind_of(Numeric)
+ 'sum' => a_kind_of(Numeric)
}
end
@@ -34,15 +30,22 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
'pipeline_persisted' => true,
'project_id' => project.id,
'pipeline_creation_service_duration_s' => a_kind_of(Numeric),
- 'pipeline_creation_duration_s' => counters,
- 'pipeline_size_count' => counters,
- 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters,
+ 'pipeline_creation_duration_s' => a_kind_of(Numeric),
+ 'pipeline_size_count' => a_kind_of(Numeric),
+ 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => a_kind_of(Numeric),
'pipeline_seed_build_inclusion_duration_s' => counters,
+ 'pipeline_seed_build_errors_duration_s' => counters,
+ 'pipeline_seed_build_to_resource_duration_s' => counters,
+ 'pipeline_seed_stage_seeds_duration_s' => counters,
'pipeline_builds_tags_count' => a_kind_of(Numeric),
'pipeline_builds_distinct_tags_count' => a_kind_of(Numeric)
}
end
+ before do
+ stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
+ end
+
context 'when the duration is under the threshold' do
it 'does not create a log entry but it collects the data' do
expect(Gitlab::AppJsonLogger).not_to receive(:info)
@@ -51,9 +54,9 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(service.logger.observations_hash)
.to match(
a_hash_including(
- 'pipeline_creation_duration_s' => counters,
- 'pipeline_size_count' => counters,
- 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
+ 'pipeline_creation_duration_s' => a_kind_of(Numeric),
+ 'pipeline_size_count' => a_kind_of(Numeric),
+ 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => a_kind_of(Numeric)
)
)
end
@@ -62,7 +65,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when the durations exceeds the threshold' do
let(:timer) do
proc do
- @timer = @timer.to_i + 30
+ @timer = @timer.to_i + 30 # rubocop: disable RSpec/InstanceVariable
end
end
@@ -88,17 +91,15 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
'pipeline_persisted' => false,
'project_id' => project.id,
'pipeline_creation_service_duration_s' => a_kind_of(Numeric),
- 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
+ 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => a_kind_of(Numeric)
}
end
- before do
+ it 'creates a log entry' do
allow_next_instance_of(Ci::Pipeline) do |pipeline|
expect(pipeline).to receive(:save!).and_raise { RuntimeError }
end
- end
- it 'creates a log entry' do
expect(Gitlab::AppJsonLogger)
.to receive(:info)
.with(a_hash_including(loggable_data))
@@ -125,7 +126,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when the size exceeds the threshold' do
before do
allow_next_instance_of(Ci::Pipeline) do |pipeline|
- allow(pipeline).to receive(:total_size) { 5000 }
+ allow(pipeline).to receive(:total_size).and_return(5000)
end
end
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index 513cbbed6cd..eb17935967c 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -108,7 +108,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :yaml_processor_feature_fl
pipeline = create_pipeline!
test = pipeline.statuses.find_by(name: 'instrumentation_test')
- expect(test).to be_pending
+ expect(test).to be_running
expect(pipeline.triggered_pipelines.count).to eq(1)
end
diff --git a/spec/services/ci/create_pipeline_service/partitioning_spec.rb b/spec/services/ci/create_pipeline_service/partitioning_spec.rb
index f34d103d965..a87135cefdd 100644
--- a/spec/services/ci/create_pipeline_service/partitioning_spec.rb
+++ b/spec/services/ci/create_pipeline_service/partitioning_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, :aggregate_failures,
-:ci_partitionable do
+:ci_partitionable, feature_category: :continuous_integration do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
@@ -15,8 +15,13 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
- test
- deploy
+ needs:build:
+ stage: build
+ script: echo "needs..."
+
build:
stage: build
+ needs: ["needs:build"]
script: make build
test:
@@ -95,6 +100,12 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(pipeline.variables.size).to eq(2)
expect(variables_partition_ids).to eq([current_partition_id])
end
+
+ it 'assigns partition_id to needs' do
+ needs = find_need('build')
+
+ expect(needs.partition_id).to eq(current_partition_id)
+ end
end
context 'with parent child pipelines' do
@@ -144,4 +155,12 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
.find { |job| job.name == name }
.metadata
end
+
+ def find_need(name)
+ pipeline
+ .processables
+ .find { |job| job.name == name }
+ .needs
+ .first
+ end
end
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index 5fdefb2b306..b866293393b 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -912,7 +912,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when outside freeze period' do
it 'creates two jobs' do
- Timecop.freeze(2020, 4, 10, 22, 59) do
+ travel_to(Time.utc(2020, 4, 10, 22, 59)) do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('test-job', 'deploy-job')
end
@@ -921,7 +921,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when inside freeze period' do
it 'creates one job' do
- Timecop.freeze(2020, 4, 10, 23, 1) do
+ travel_to(Time.utc(2020, 4, 10, 23, 1)) do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('test-job')
end
@@ -946,7 +946,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when outside freeze period' do
it 'creates two jobs' do
- Timecop.freeze(2020, 4, 10, 22, 59) do
+ travel_to(Time.utc(2020, 4, 10, 22, 59)) do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('deploy-job')
end
@@ -955,7 +955,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when inside freeze period' do
it 'does not create the pipeline', :aggregate_failures do
- Timecop.freeze(2020, 4, 10, 23, 1) do
+ travel_to(Time.utc(2020, 4, 10, 23, 1)) do
expect(response).to be_error
expect(pipeline).not_to be_persisted
end
diff --git a/spec/services/ci/create_pipeline_service/scripts_spec.rb b/spec/services/ci/create_pipeline_service/scripts_spec.rb
new file mode 100644
index 00000000000..50b558e505a
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/scripts_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.first_owner }
+
+ let(:service) { described_class.new(project, user, { ref: 'master' }) }
+ let(:pipeline) { service.execute(:push).payload }
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ context 'when job has script and nested before_script and after_script' do
+ let(:config) do
+ <<-CI_CONFIG
+ default:
+ before_script: echo 'hello default before_script'
+ after_script: echo 'hello default after_script'
+
+ job:
+ before_script: echo 'hello job before_script'
+ after_script: echo 'hello job after_script'
+ script: echo 'hello job script'
+ CI_CONFIG
+ end
+
+ it 'creates a job with script data' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.builds.first).to have_attributes(
+ name: 'job',
+ stage: 'test',
+ options: { script: ["echo 'hello job script'"],
+ before_script: ["echo 'hello job before_script'"],
+ after_script: ["echo 'hello job after_script'"] }
+ )
+ end
+ end
+
+ context 'when job has hooks and default hooks' do
+ let(:config) do
+ <<-CI_CONFIG
+ default:
+ hooks:
+ pre_get_sources_script:
+ - echo 'hello default pre_get_sources_script'
+
+ job1:
+ hooks:
+ pre_get_sources_script:
+ - echo 'hello job1 pre_get_sources_script'
+ script: echo 'hello job1 script'
+
+ job2:
+ script: echo 'hello job2 script'
+
+ job3:
+ inherit:
+ default: false
+ script: echo 'hello job3 script'
+ CI_CONFIG
+ end
+
+ it 'creates jobs with hook data' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.builds.find_by(name: 'job1')).to have_attributes(
+ name: 'job1',
+ stage: 'test',
+ options: { script: ["echo 'hello job1 script'"],
+ hooks: { pre_get_sources_script: ["echo 'hello job1 pre_get_sources_script'"] } }
+ )
+ expect(pipeline.builds.find_by(name: 'job2')).to have_attributes(
+ name: 'job2',
+ stage: 'test',
+ options: { script: ["echo 'hello job2 script'"],
+ hooks: { pre_get_sources_script: ["echo 'hello default pre_get_sources_script'"] } }
+ )
+ expect(pipeline.builds.find_by(name: 'job3')).to have_attributes(
+ name: 'job3',
+ stage: 'test',
+ options: { script: ["echo 'hello job3 script'"] }
+ )
+ end
+
+ context 'when the FF ci_hooks_pre_get_sources_script is disabled' do
+ before do
+ stub_feature_flags(ci_hooks_pre_get_sources_script: false)
+ end
+
+ it 'creates jobs without hook data' do
+ expect(pipeline).to be_created_successfully
+ expect(pipeline.builds.find_by(name: 'job1')).to have_attributes(
+ name: 'job1',
+ stage: 'test',
+ options: { script: ["echo 'hello job1 script'"] }
+ )
+ expect(pipeline.builds.find_by(name: 'job2')).to have_attributes(
+ name: 'job2',
+ stage: 'test',
+ options: { script: ["echo 'hello job2 script'"] }
+ )
+ expect(pipeline.builds.find_by(name: 'job3')).to have_attributes(
+ name: 'job3',
+ stage: 'test',
+ options: { script: ["echo 'hello job3 script'"] }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 67c13649c6f..8628e95ba80 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -79,11 +79,11 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
let(:accepted_n_plus_ones) do
1 + # SELECT "ci_instance_variables"
- 1 + # INSERT INTO "ci_stages"
- 1 + # SELECT "ci_builds".* FROM "ci_builds"
- 1 + # INSERT INTO "ci_builds"
- 1 + # INSERT INTO "ci_builds_metadata"
- 1 # SELECT "taggings".* FROM "taggings"
+ 1 + # INSERT INTO "ci_stages"
+ 1 + # SELECT "ci_builds".* FROM "ci_builds"
+ 1 + # INSERT INTO "ci_builds"
+ 1 + # INSERT INTO "ci_builds_metadata"
+ 1 # SELECT "taggings".* FROM "taggings"
end
end
end
@@ -710,6 +710,29 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
end
+ context 'when the configuration includes ID tokens' do
+ it 'creates variables for the ID tokens' do
+ config = YAML.dump({
+ job_with_id_tokens: {
+ script: 'ls',
+ id_tokens: {
+ 'TEST_ID_TOKEN' => {
+ aud: 'https://gitlab.com'
+ }
+ }
+ }
+ })
+ stub_ci_pipeline_yaml_file(config)
+
+ result = execute_service.payload
+
+ expect(result).to be_persisted
+ expect(result.builds.first.id_tokens).to eq({
+ 'TEST_ID_TOKEN' => { 'aud' => 'https://gitlab.com' }
+ })
+ end
+ end
+
context 'with manual actions' do
before do
config = YAML.dump({ deploy: { script: 'ls', when: 'manual' } })
diff --git a/spec/services/ci/enqueue_job_service_spec.rb b/spec/services/ci/enqueue_job_service_spec.rb
new file mode 100644
index 00000000000..c2bb0bb2bb5
--- /dev/null
+++ b/spec/services/ci/enqueue_job_service_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::EnqueueJobService, '#execute', feature_category: :continuous_integration do
+ let_it_be(:project) { create(:project) }
+ let(:user) { create(:user, developer_projects: [project]) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:build) { create(:ci_build, :manual, pipeline: pipeline) }
+
+ let(:service) do
+ described_class.new(build, current_user: user)
+ end
+
+ subject(:execute) { service.execute }
+
+ it 'assigns the user to the job' do
+ expect { execute }.to change { build.reload.user }.to(user)
+ end
+
+ it 'calls enqueue!' do
+ expect(build).to receive(:enqueue!)
+ execute
+ end
+
+ it 'calls Ci::ResetSkippedJobsService' do
+ expect_next_instance_of(Ci::ResetSkippedJobsService) do |service|
+ expect(service).to receive(:execute).with(build)
+ end
+
+ execute
+ end
+
+ it 'returns the job' do
+ expect(execute).to eq(build)
+ end
+
+ context 'when variables are supplied' do
+ let(:job_variables) do
+ [{ key: 'first', secret_value: 'first' },
+ { key: 'second', secret_value: 'second' }]
+ end
+
+ let(:service) do
+ described_class.new(build, current_user: user, variables: job_variables)
+ end
+
+ it 'assigns the variables to the job' do
+ execute
+ expect(build.reload.job_variables.map(&:key)).to contain_exactly('first', 'second')
+ end
+ end
+
+ context 'when the job transition is invalid' do
+ let(:bridge) { create(:ci_bridge, :failed, pipeline: pipeline, project: project) }
+
+ let(:service) do
+ described_class.new(bridge, current_user: user)
+ end
+
+ it 'raises StateMachines::InvalidTransition' do
+ expect { execute }.to raise_error StateMachines::InvalidTransition
+ end
+ end
+
+ context 'when a transition block is supplied' do
+ let(:bridge) { create(:ci_bridge, :playable, pipeline: pipeline) }
+
+ let(:service) do
+ described_class.new(bridge, current_user: user)
+ end
+
+ subject(:execute) { service.execute(&:pending!) }
+
+ it 'calls the transition block instead of enqueue!' do
+ expect(bridge).to receive(:pending!)
+ expect(bridge).not_to receive(:enqueue!)
+ execute
+ end
+ end
+end
diff --git a/spec/services/ci/generate_kubeconfig_service_spec.rb b/spec/services/ci/generate_kubeconfig_service_spec.rb
index bfde39780dd..c0858b0f0c9 100644
--- a/spec/services/ci/generate_kubeconfig_service_spec.rb
+++ b/spec/services/ci/generate_kubeconfig_service_spec.rb
@@ -4,52 +4,98 @@ require 'spec_helper'
RSpec.describe Ci::GenerateKubeconfigService do
describe '#execute' do
- let(:project) { create(:project) }
- let(:build) { create(:ci_build, project: project) }
- let(:pipeline) { build.pipeline }
- let(:agent1) { create(:cluster_agent, project: project) }
- let(:agent2) { create(:cluster_agent) }
- let(:authorization1) { create(:agent_project_authorization, agent: agent1) }
- let(:authorization2) { create(:agent_project_authorization, agent: agent2) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project) }
+ let_it_be(:build) { create(:ci_build, project: project, pipeline: pipeline) }
- let(:template) { instance_double(Gitlab::Kubernetes::Kubeconfig::Template) }
+ let_it_be(:agent_project) { create(:project, group: group, name: 'project-containing-agent-config') }
- subject { described_class.new(pipeline, token: build.token).execute }
+ let_it_be(:project_agent_authorization) do
+ agent = create(:cluster_agent, project: agent_project)
+ create(:agent_project_authorization, agent: agent, project: project)
+ end
+
+ let_it_be(:group_agent_authorization) do
+ agent = create(:cluster_agent, project: agent_project)
+ create(:agent_group_authorization, agent: agent, group: group)
+ end
+
+ let(:template) do
+ instance_double(
+ Gitlab::Kubernetes::Kubeconfig::Template,
+ add_cluster: nil,
+ add_user: nil,
+ add_context: nil
+ )
+ end
+
+ let(:agent_authorizations) { [project_agent_authorization, group_agent_authorization] }
+ let(:filter_service) do
+ instance_double(
+ ::Clusters::Agents::FilterAuthorizationsService,
+ execute: agent_authorizations
+ )
+ end
+
+ subject(:execute) { described_class.new(pipeline, token: build.token, environment: nil).execute }
before do
- expect(Gitlab::Kubernetes::Kubeconfig::Template).to receive(:new).and_return(template)
- expect(pipeline).to receive(:cluster_agent_authorizations).and_return([authorization1, authorization2])
+ allow(Gitlab::Kubernetes::Kubeconfig::Template).to receive(:new).and_return(template)
+ allow(::Clusters::Agents::FilterAuthorizationsService).to receive(:new).and_return(filter_service)
+ end
+
+ it 'returns a Kubeconfig Template' do
+ expect(execute).to eq(template)
end
- it 'adds a cluster, and a user and context for each available agent' do
+ it 'adds a cluster' do
expect(template).to receive(:add_cluster).with(
name: 'gitlab',
url: Gitlab::Kas.tunnel_url
).once
- expect(template).to receive(:add_user).with(
- name: "agent:#{agent1.id}",
- token: "ci:#{agent1.id}:#{build.token}"
- )
- expect(template).to receive(:add_user).with(
- name: "agent:#{agent2.id}",
- token: "ci:#{agent2.id}:#{build.token}"
- )
+ execute
+ end
- expect(template).to receive(:add_context).with(
- name: "#{project.full_path}:#{agent1.name}",
- namespace: 'production',
- cluster: 'gitlab',
- user: "agent:#{agent1.id}"
- )
- expect(template).to receive(:add_context).with(
- name: "#{agent2.project.full_path}:#{agent2.name}",
- namespace: 'production',
- cluster: 'gitlab',
- user: "agent:#{agent2.id}"
+ it "filters the pipeline's agents by `nil` environment" do
+ expect(::Clusters::Agents::FilterAuthorizationsService).to receive(:new).with(
+ pipeline.cluster_agent_authorizations,
+ environment: nil
)
- expect(subject).to eq(template)
+ execute
+ end
+
+ it 'adds user and context for all eligible agents', :aggregate_failures do
+ agent_authorizations.each do |authorization|
+ expect(template).to receive(:add_user).with(
+ name: "agent:#{authorization.agent.id}",
+ token: "ci:#{authorization.agent.id}:#{build.token}"
+ )
+
+ expect(template).to receive(:add_context).with(
+ name: "#{agent_project.full_path}:#{authorization.agent.name}",
+ namespace: 'production',
+ cluster: 'gitlab',
+ user: "agent:#{authorization.agent.id}"
+ )
+ end
+
+ execute
+ end
+
+ context 'when environment is specified' do
+ subject(:execute) { described_class.new(pipeline, token: build.token, environment: 'production').execute }
+
+ it "filters the pipeline's agents by the specified environment" do
+ expect(::Clusters::Agents::FilterAuthorizationsService).to receive(:new).with(
+ pipeline.cluster_agent_authorizations,
+ environment: 'production'
+ )
+
+ execute
+ end
end
end
end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index 1fbefc1fa22..2f2af9f6c85 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
+RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category: :continuous_integration do
describe 'Pipeline Processing Service Tests With Yaml' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
diff --git a/spec/services/ci/pipeline_schedules/calculate_next_run_service_spec.rb b/spec/services/ci/pipeline_schedules/calculate_next_run_service_spec.rb
new file mode 100644
index 00000000000..182c5bebbc1
--- /dev/null
+++ b/spec/services/ci/pipeline_schedules/calculate_next_run_service_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+# rubocop:disable Layout/LineLength
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineSchedules::CalculateNextRunService, feature_category: :continuous_integration do
+ let_it_be(:project) { create(:project, :public, :repository) }
+
+ describe '#execute' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:run_service) do
+ described_class.new(project).execute(pipeline_schedule,
+ fallback_method: pipeline_schedule.method(:calculate_next_run_at))
+ end
+
+ let(:pipeline_schedule) { create(:ci_pipeline_schedule, cron: schedule_cron) }
+ let(:daily_limit_of_144_runs) { 1.day / 10.minutes }
+ let(:daily_limit_of_24_runs) { 1.day / 1.hour }
+
+ before do
+ allow(Settings).to receive(:cron_jobs) { { 'pipeline_schedule_worker' => { 'cron' => worker_cron } } }
+ create(:plan_limits, :default_plan, ci_daily_pipeline_schedule_triggers: plan_limit) if plan_limit
+ end
+
+ context "when there is invalid or no plan limits" do
+ where(:worker_cron, :schedule_cron, :plan_limit, :now, :expected_result) do
+ '0 1 2 3 *' | '0 1 * * *' | nil | Time.zone.local(2021, 3, 2, 1, 0) | Time.zone.local(2022, 3, 2, 1, 0)
+ '*/5 * * * *' | '*/1 * * * *' | nil | Time.zone.local(2021, 5, 27, 11, 0) | Time.zone.local(2021, 5, 27, 11, 5)
+ '*/5 * * * *' | '0 * * * *' | nil | Time.zone.local(2021, 5, 27, 11, 0) | Time.zone.local(2021, 5, 27, 12, 5)
+ # 1.day / 2.hours => 12 times a day and it is invalid because there is a minimum for plan limits.
+ # See: https://docs.gitlab.com/ee/administration/instance_limits.html#limit-the-number-of-pipelines-created-by-a-pipeline-schedule-per-day
+ '*/5 * * * *' | '0 * * * *' | 1.day / 2.hours | Time.zone.local(2021, 5, 27, 11, 0) | Time.zone.local(2021, 5, 27, 12, 5)
+ end
+
+ with_them do
+ it 'calls fallback method to get next_run_at' do
+ travel_to(now) do
+ expect(pipeline_schedule).to receive(:calculate_next_run_at).and_call_original
+
+ result = run_service
+
+ expect(result).to eq(expected_result)
+ end
+ end
+ end
+ end
+
+ context "when the workers next run matches schedule's earliest run" do
+ where(:worker_cron, :schedule_cron, :plan_limit, :now, :expected_result) do
+ '*/5 * * * *' | '0 * * * *' | daily_limit_of_144_runs | Time.zone.local(2021, 5, 27, 11, 0) | Time.zone.local(2021, 5, 27, 12, 0)
+ '*/5 * * * *' | '*/5 * * * *' | daily_limit_of_144_runs | Time.zone.local(2021, 5, 27, 11, 0) | Time.zone.local(2021, 5, 27, 11, 10)
+ '*/5 * * * *' | '0 1 * * *' | daily_limit_of_144_runs | Time.zone.local(2021, 5, 27, 1, 0) | Time.zone.local(2021, 5, 28, 1, 0)
+ '*/5 * * * *' | '0 2 * * *' | daily_limit_of_144_runs | Time.zone.local(2021, 5, 27, 1, 0) | Time.zone.local(2021, 5, 27, 2, 0)
+ '*/5 * * * *' | '0 3 * * *' | daily_limit_of_144_runs | Time.zone.local(2021, 5, 27, 1, 0) | Time.zone.local(2021, 5, 27, 3, 0)
+ '*/5 * * * *' | '0 1 1 * *' | daily_limit_of_144_runs | Time.zone.local(2021, 5, 1, 1, 0) | Time.zone.local(2021, 6, 1, 1, 0)
+ '*/9 * * * *' | '0 1 1 * *' | daily_limit_of_144_runs | Time.zone.local(2021, 5, 1, 1, 9) | Time.zone.local(2021, 6, 1, 1, 0)
+ '*/5 * * * *' | '45 21 1 2 *' | daily_limit_of_144_runs | Time.zone.local(2021, 2, 1, 21, 45) | Time.zone.local(2022, 2, 1, 21, 45)
+ end
+
+ with_them do
+ it 'calculates the next_run_at to be earliest point of match' do
+ travel_to(now) do
+ result = run_service
+
+ expect(result).to eq(expected_result)
+ end
+ end
+ end
+ end
+
+ context "when next_run_at is restricted by plan limit" do
+ where(:worker_cron, :schedule_cron, :plan_limit, :now, :expected_result) do
+ '*/5 * * * *' | '59 14 * * *' | daily_limit_of_24_runs | Time.zone.local(2021, 5, 1, 15, 0) | Time.zone.local(2021, 5, 2, 15, 0)
+ '*/5 * * * *' | '*/1 * * * *' | daily_limit_of_24_runs | Time.zone.local(2021, 5, 27, 11, 0) | Time.zone.local(2021, 5, 27, 12, 0)
+ '*/5 * * * *' | '*/1 * * * *' | daily_limit_of_144_runs | Time.zone.local(2021, 5, 27, 11, 0) | Time.zone.local(2021, 5, 27, 11, 10)
+ '*/5 * * * *' | '*/1 * * * *' | (1.day / 7.minutes).to_i | Time.zone.local(2021, 5, 27, 11, 0) | Time.zone.local(2021, 5, 27, 11, 10)
+ end
+
+ with_them do
+ it 'calculates the next_run_at based on next available limit' do
+ travel_to(now) do
+ result = run_service
+
+ expect(result).to eq(expected_result)
+ end
+ end
+ end
+ end
+
+ context "when next_run_at is restricted by worker's availability" do
+ where(:worker_cron, :schedule_cron, :plan_limit, :now, :expected_result) do
+ '0 1 2 3 *' | '0 1 * * *' | daily_limit_of_144_runs | Time.zone.local(2021, 3, 2, 1, 0) | Time.zone.local(2022, 3, 2, 1, 0)
+ end
+
+ with_them do
+ it 'calculates the next_run_at using worker_cron' do
+ travel_to(now) do
+ result = run_service
+
+ expect(result).to eq(expected_result)
+ end
+ end
+ end
+ end
+ end
+end
+# rubocop:enable Layout/LineLength
diff --git a/spec/services/ci/pipeline_trigger_service_spec.rb b/spec/services/ci/pipeline_trigger_service_spec.rb
index 4b3e774ff3c..4946367380e 100644
--- a/spec/services/ci/pipeline_trigger_service_spec.rb
+++ b/spec/services/ci/pipeline_trigger_service_spec.rb
@@ -197,8 +197,7 @@ RSpec.describe Ci::PipelineTriggerService do
end
it_behaves_like 'logs downstream pipeline creation' do
- subject { result[:pipeline] }
-
+ let(:downstream_pipeline) { result[:pipeline] }
let(:expected_root_pipeline) { pipeline }
let(:expected_hierarchy_size) { 2 }
let(:expected_downstream_relationship) { :multi_project }
diff --git a/spec/services/ci/pipelines/add_job_service_spec.rb b/spec/services/ci/pipelines/add_job_service_spec.rb
index e735b2752d9..c62aa9506bd 100644
--- a/spec/services/ci/pipelines/add_job_service_spec.rb
+++ b/spec/services/ci/pipelines/add_job_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::Pipelines::AddJobService do
include ExclusiveLeaseHelpers
- let_it_be(:pipeline) { create(:ci_pipeline) }
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline) }
let(:job) { build(:ci_build) }
@@ -35,7 +35,7 @@ RSpec.describe Ci::Pipelines::AddJobService do
end
it 'assigns partition_id to job and metadata' do
- pipeline.partition_id = 123
+ pipeline.partition_id = ci_testing_partition_id
expect { execute }
.to change(job, :partition_id).to(pipeline.partition_id)
diff --git a/spec/services/ci/process_build_service_spec.rb b/spec/services/ci/process_build_service_spec.rb
index 9301098b083..de308bb1a87 100644
--- a/spec/services/ci/process_build_service_spec.rb
+++ b/spec/services/ci/process_build_service_spec.rb
@@ -19,31 +19,25 @@ RSpec.describe Ci::ProcessBuildService, '#execute' do
end
end
- shared_context 'with ci_retry_job_fix disabled' do
- before do
- stub_feature_flags(ci_retry_job_fix: false)
- end
- end
-
context 'for single build' do
let!(:build) { create(:ci_build, *[trait].compact, :created, **conditions, pipeline: pipeline) }
- where(:trait, :conditions, :current_status, :after_status, :retry_after_status, :retry_disabled_after_status) do
- nil | { when: :on_success } | 'success' | 'pending' | 'pending' | 'pending'
- nil | { when: :on_success } | 'skipped' | 'pending' | 'pending' | 'pending'
- nil | { when: :on_success } | 'failed' | 'skipped' | 'skipped' | 'skipped'
- nil | { when: :on_failure } | 'success' | 'skipped' | 'skipped' | 'skipped'
- nil | { when: :on_failure } | 'skipped' | 'skipped' | 'skipped' | 'skipped'
- nil | { when: :on_failure } | 'failed' | 'pending' | 'pending' | 'pending'
- nil | { when: :always } | 'success' | 'pending' | 'pending' | 'pending'
- nil | { when: :always } | 'skipped' | 'pending' | 'pending' | 'pending'
- nil | { when: :always } | 'failed' | 'pending' | 'pending' | 'pending'
- :actionable | { when: :manual } | 'success' | 'manual' | 'pending' | 'manual'
- :actionable | { when: :manual } | 'skipped' | 'manual' | 'pending' | 'manual'
- :actionable | { when: :manual } | 'failed' | 'skipped' | 'skipped' | 'skipped'
- :schedulable | { when: :delayed } | 'success' | 'scheduled' | 'pending' | 'scheduled'
- :schedulable | { when: :delayed } | 'skipped' | 'scheduled' | 'pending' | 'scheduled'
- :schedulable | { when: :delayed } | 'failed' | 'skipped' | 'skipped' | 'skipped'
+ where(:trait, :conditions, :current_status, :after_status, :retry_after_status) do
+ nil | { when: :on_success } | 'success' | 'pending' | 'pending'
+ nil | { when: :on_success } | 'skipped' | 'pending' | 'pending'
+ nil | { when: :on_success } | 'failed' | 'skipped' | 'skipped'
+ nil | { when: :on_failure } | 'success' | 'skipped' | 'skipped'
+ nil | { when: :on_failure } | 'skipped' | 'skipped' | 'skipped'
+ nil | { when: :on_failure } | 'failed' | 'pending' | 'pending'
+ nil | { when: :always } | 'success' | 'pending' | 'pending'
+ nil | { when: :always } | 'skipped' | 'pending' | 'pending'
+ nil | { when: :always } | 'failed' | 'pending' | 'pending'
+ :actionable | { when: :manual } | 'success' | 'manual' | 'pending'
+ :actionable | { when: :manual } | 'skipped' | 'manual' | 'pending'
+ :actionable | { when: :manual } | 'failed' | 'skipped' | 'skipped'
+ :schedulable | { when: :delayed } | 'success' | 'scheduled' | 'pending'
+ :schedulable | { when: :delayed } | 'skipped' | 'scheduled' | 'pending'
+ :schedulable | { when: :delayed } | 'failed' | 'skipped' | 'skipped'
end
with_them do
@@ -57,14 +51,6 @@ RSpec.describe Ci::ProcessBuildService, '#execute' do
it 'updates the job status to retry_after_status' do
expect { subject }.to change { build.status }.to(retry_after_status)
end
-
- context 'when feature flag ci_retry_job_fix is disabled' do
- include_context 'with ci_retry_job_fix disabled'
-
- it "updates the job status to retry_disabled_after_status" do
- expect { subject }.to change { build.status }.to(retry_disabled_after_status)
- end
- end
end
end
end
@@ -84,15 +70,15 @@ RSpec.describe Ci::ProcessBuildService, '#execute' do
let!(:other_build) { create(:ci_build, :created, when: :on_success, pipeline: pipeline) }
- where(:trait, :build_when, :current_status, :after_status, :retry_after_status, :retry_disabled_after_status) do
- nil | :on_success | 'success' | 'pending' | 'pending' | 'pending'
- nil | :on_success | 'skipped' | 'skipped' | 'skipped' | 'skipped'
- nil | :manual | 'success' | 'manual' | 'pending' | 'manual'
- nil | :manual | 'skipped' | 'skipped' | 'skipped' | 'skipped'
- nil | :delayed | 'success' | 'manual' | 'pending' | 'manual'
- nil | :delayed | 'skipped' | 'skipped' | 'skipped' | 'skipped'
- :schedulable | :delayed | 'success' | 'scheduled' | 'pending' | 'scheduled'
- :schedulable | :delayed | 'skipped' | 'skipped' | 'skipped' | 'skipped'
+ where(:trait, :build_when, :current_status, :after_status, :retry_after_status) do
+ nil | :on_success | 'success' | 'pending' | 'pending'
+ nil | :on_success | 'skipped' | 'skipped' | 'skipped'
+ nil | :manual | 'success' | 'manual' | 'pending'
+ nil | :manual | 'skipped' | 'skipped' | 'skipped'
+ nil | :delayed | 'success' | 'manual' | 'pending'
+ nil | :delayed | 'skipped' | 'skipped' | 'skipped'
+ :schedulable | :delayed | 'success' | 'scheduled' | 'pending'
+ :schedulable | :delayed | 'skipped' | 'skipped' | 'skipped'
end
with_them do
@@ -106,14 +92,6 @@ RSpec.describe Ci::ProcessBuildService, '#execute' do
it 'updates the job status to retry_after_status' do
expect { subject }.to change { build.status }.to(retry_after_status)
end
-
- context 'when feature flag ci_retry_job_fix is disabled' do
- include_context 'with ci_retry_job_fix disabled'
-
- it "updates the job status to retry_disabled_after_status" do
- expect { subject }.to change { build.status }.to(retry_disabled_after_status)
- end
- end
end
end
end
diff --git a/spec/services/ci/after_requeue_job_service_spec.rb b/spec/services/ci/reset_skipped_jobs_service_spec.rb
index e6f46fb9ebe..712a21e665b 100644
--- a/spec/services/ci/after_requeue_job_service_spec.rb
+++ b/spec/services/ci/reset_skipped_jobs_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
+RSpec.describe Ci::ResetSkippedJobsService, :sidekiq_inline, feature_category: :continuous_integration do
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:user) { project.first_owner }
@@ -12,9 +12,9 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
subject(:service) { described_class.new(project, user) }
- context 'stage-dag mixed pipeline' do
+ context 'with a stage-dag mixed pipeline' do
let(:config) do
- <<-EOY
+ <<-YAML
stages: [a, b, c]
a1:
@@ -49,7 +49,7 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
c2:
stage: c
script: exit 0
- EOY
+ YAML
end
let(:pipeline) do
@@ -150,9 +150,9 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
end
end
- context 'stage-dag mixed pipeline with some same-stage needs' do
+ context 'with stage-dag mixed pipeline with some same-stage needs' do
let(:config) do
- <<-EOY
+ <<-YAML
stages: [a, b, c]
a1:
@@ -181,7 +181,7 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
c2:
stage: c
script: exit 0
- EOY
+ YAML
end
let(:pipeline) do
@@ -239,7 +239,7 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
context 'with same-stage needs' do
let(:config) do
- <<-EOY
+ <<-YAML
a:
script: exit $(($RANDOM % 2))
@@ -250,7 +250,7 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
c:
script: exit 0
needs: [b]
- EOY
+ YAML
end
let(:pipeline) do
diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb
index 540e700efa6..c3d80f2cb56 100644
--- a/spec/services/ci/retry_job_service_spec.rb
+++ b/spec/services/ci/retry_job_service_spec.rb
@@ -48,12 +48,6 @@ RSpec.describe Ci::RetryJobService do
end
end
- shared_context 'with ci_retry_job_fix disabled' do
- before do
- stub_feature_flags(ci_retry_job_fix: false)
- end
- end
-
shared_examples_for 'clones the job' do
let(:job) { job_to_clone }
@@ -284,14 +278,6 @@ RSpec.describe Ci::RetryJobService do
with_them do
it_behaves_like 'checks enqueue_immediately?'
-
- context 'when feature flag is disabled' do
- include_context 'with ci_retry_job_fix disabled'
-
- it_behaves_like 'checks enqueue_immediately?' do
- let(:enqueue_immediately) { false }
- end
- end
end
end
end
@@ -384,15 +370,6 @@ RSpec.describe Ci::RetryJobService do
expect(subject).to be_success
expect(new_job.status).to eq after_status
end
-
- context 'when feature flag is disabled' do
- include_context 'with ci_retry_job_fix disabled'
-
- it 'enqueues the new job' do
- expect(subject).to be_success
- expect(new_job).to be_pending
- end
- end
end
end
@@ -435,15 +412,6 @@ RSpec.describe Ci::RetryJobService do
expect(subject).to be_success
expect(new_job.status).to eq after_status
end
-
- context 'when feature flag is disabled' do
- include_context 'with ci_retry_job_fix disabled'
-
- it 'enqueues the new job' do
- expect(subject).to be_success
- expect(new_job).to be_pending
- end
- end
end
end
@@ -487,19 +455,6 @@ RSpec.describe Ci::RetryJobService do
end
it_behaves_like 'checks enqueue_immediately?'
-
- context 'when feature flag is disabled' do
- include_context 'with ci_retry_job_fix disabled'
-
- it 'enqueues the new job' do
- expect(subject).to be_success
- expect(new_job).to be_pending
- end
-
- it_behaves_like 'checks enqueue_immediately?' do
- let(:enqueue_immediately) { false }
- end
- end
end
end
end
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 77345096537..07518c35fab 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -349,7 +349,10 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
it 'marks source bridge as pending' do
expect { service.execute(pipeline) }.to change { bridge.reload.status }.to('pending')
- .and not_change { bridge.reload.user }
+ end
+
+ it 'assigns the current user to the source bridge' do
+ expect { service.execute(pipeline) }.to change { bridge.reload.user }.to(user)
end
end
end
diff --git a/spec/services/ci/runners/assign_runner_service_spec.rb b/spec/services/ci/runners/assign_runner_service_spec.rb
index 08bb99830fb..92f6db2bdfb 100644
--- a/spec/services/ci/runners/assign_runner_service_spec.rb
+++ b/spec/services/ci/runners/assign_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::AssignRunnerService, '#execute' do
+RSpec.describe ::Ci::Runners::AssignRunnerService, '#execute', feature_category: :runner_fleet do
subject(:execute) { described_class.new(runner, project, user).execute }
let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
diff --git a/spec/services/ci/runners/bulk_delete_runners_service_spec.rb b/spec/services/ci/runners/bulk_delete_runners_service_spec.rb
index fa8af1100df..5e697565972 100644
--- a/spec/services/ci/runners/bulk_delete_runners_service_spec.rb
+++ b/spec/services/ci/runners/bulk_delete_runners_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::BulkDeleteRunnersService, '#execute' do
+RSpec.describe ::Ci::Runners::BulkDeleteRunnersService, '#execute', feature_category: :runner_fleet do
subject(:execute) { described_class.new(**service_args).execute }
let_it_be(:admin_user) { create(:user, :admin) }
diff --git a/spec/services/ci/runners/process_runner_version_update_service_spec.rb b/spec/services/ci/runners/process_runner_version_update_service_spec.rb
index b885138fc7a..d2a7e87b2d5 100644
--- a/spec/services/ci/runners/process_runner_version_update_service_spec.rb
+++ b/spec/services/ci/runners/process_runner_version_update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateService do
+RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateService, feature_category: :runner_fleet do
subject(:service) { described_class.new(version) }
let(:version) { '1.0.0' }
diff --git a/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb b/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
index 1690190320a..39082b5c0f4 100644
--- a/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
+++ b/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute' do
+RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute', feature_category: :runner_fleet do
include RunnerReleasesHelper
subject(:execute) { described_class.new.execute }
diff --git a/spec/services/ci/runners/register_runner_service_spec.rb b/spec/services/ci/runners/register_runner_service_spec.rb
index 2d1b109072f..47d399cb19a 100644
--- a/spec/services/ci/runners/register_runner_service_spec.rb
+++ b/spec/services/ci/runners/register_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
+RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute', feature_category: :runner_fleet do
let(:registration_token) { 'abcdefg123456' }
let(:token) {}
let(:args) { {} }
diff --git a/spec/services/ci/runners/reset_registration_token_service_spec.rb b/spec/services/ci/runners/reset_registration_token_service_spec.rb
index 79059712032..c8115236034 100644
--- a/spec/services/ci/runners/reset_registration_token_service_spec.rb
+++ b/spec/services/ci/runners/reset_registration_token_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::ResetRegistrationTokenService, '#execute' do
+RSpec.describe ::Ci::Runners::ResetRegistrationTokenService, '#execute', feature_category: :runner_fleet do
subject(:execute) { described_class.new(scope, current_user).execute }
let_it_be(:user) { build(:user) }
diff --git a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
index e5cba80d567..9921f9322bd 100644
--- a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
+++ b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute' do
+RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute', feature_category: :runner_fleet do
subject(:execute) { described_class.new(runner: runner, current_user: user, project_ids: project_ids).execute }
let_it_be(:owner_project) { create(:project) }
diff --git a/spec/services/ci/runners/unassign_runner_service_spec.rb b/spec/services/ci/runners/unassign_runner_service_spec.rb
index cf710cf6893..e91d4249473 100644
--- a/spec/services/ci/runners/unassign_runner_service_spec.rb
+++ b/spec/services/ci/runners/unassign_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::UnassignRunnerService, '#execute' do
+RSpec.describe ::Ci::Runners::UnassignRunnerService, '#execute', feature_category: :runner_fleet do
let_it_be(:project) { create(:project) }
let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
diff --git a/spec/services/ci/runners/unregister_runner_service_spec.rb b/spec/services/ci/runners/unregister_runner_service_spec.rb
index 77fc299e4e1..fb779e1a673 100644
--- a/spec/services/ci/runners/unregister_runner_service_spec.rb
+++ b/spec/services/ci/runners/unregister_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::UnregisterRunnerService, '#execute' do
+RSpec.describe ::Ci::Runners::UnregisterRunnerService, '#execute', feature_category: :runner_fleet do
subject(:execute) { described_class.new(runner, 'some_token').execute }
let(:runner) { create(:ci_runner) }
diff --git a/spec/services/ci/runners/update_runner_service_spec.rb b/spec/services/ci/runners/update_runner_service_spec.rb
index 1f953ac4cbb..86875df70a2 100644
--- a/spec/services/ci/runners/update_runner_service_spec.rb
+++ b/spec/services/ci/runners/update_runner_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Runners::UpdateRunnerService, '#execute' do
+RSpec.describe Ci::Runners::UpdateRunnerService, '#execute', feature_category: :runner_fleet do
subject(:execute) { described_class.new(runner).execute(params) }
let(:runner) { create(:ci_runner) }
diff --git a/spec/services/ci/test_failure_history_service_spec.rb b/spec/services/ci/test_failure_history_service_spec.rb
index c19df6e217b..10f6c6f5007 100644
--- a/spec/services/ci/test_failure_history_service_spec.rb
+++ b/spec/services/ci/test_failure_history_service_spec.rb
@@ -3,16 +3,19 @@
require 'spec_helper'
RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
- describe '#execute' do
- let(:project) { create(:project) }
- let(:pipeline) { create(:ci_empty_pipeline, status: :created, project: project) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let_it_be_with_reload(:pipeline) do
+ create(:ci_pipeline, status: :created, project: project, ref: project.default_branch)
+ end
+ describe '#execute' do
subject(:execute_service) { described_class.new(pipeline).execute }
context 'when pipeline has failed builds with test reports' do
- before do
+ let_it_be(:job) do
# The test report has 2 unit test failures
- create(:ci_build, :failed, :test_reports, pipeline: pipeline, project: project)
+ create(:ci_build, :failed, :test_reports, pipeline: pipeline)
end
it 'creates unit test failures records' do
@@ -22,6 +25,14 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
expect(Ci::UnitTestFailure.count).to eq(2)
end
+ it 'assigns partition_id to Ci::UnitTestFailure' do
+ execute_service
+
+ unit_test_failure_partition_ids = Ci::UnitTestFailure.distinct.pluck(:partition_id)
+
+ expect(unit_test_failure_partition_ids).to match_array([job.partition_id])
+ end
+
context 'when pipeline is not for the default branch' do
before do
pipeline.update_column(:ref, 'new-feature')
@@ -67,7 +78,7 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
# This other test report has 1 unique unit test failure which brings us to 3 total failures across all builds
# thus exceeding the limit of 2 for MAX_TRACKABLE_FAILURES
- create(:ci_build, :failed, :test_reports_with_duplicate_failed_test_names, pipeline: pipeline, project: project)
+ create(:ci_build, :failed, :test_reports_with_duplicate_failed_test_names, pipeline: pipeline)
end
it 'does not persist data' do
@@ -82,7 +93,7 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
context 'when test failure data have duplicates within the same payload (happens when the JUnit report has duplicate unit test names but have different failures)' do
before do
# The test report has 2 unit test failures but with the same unit test keys
- create(:ci_build, :failed, :test_reports_with_duplicate_failed_test_names, pipeline: pipeline, project: project)
+ create(:ci_build, :failed, :test_reports_with_duplicate_failed_test_names, pipeline: pipeline)
end
it 'does not fail but does not persist duplicate data' do
@@ -95,8 +106,8 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
context 'when pipeline has no failed builds with test reports' do
before do
- create(:ci_build, :test_reports, pipeline: pipeline, project: project)
- create(:ci_build, :failed, pipeline: pipeline, project: project)
+ create(:ci_build, :test_reports, pipeline: pipeline)
+ create(:ci_build, :failed, pipeline: pipeline)
end
it 'does not persist data' do
@@ -109,14 +120,10 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
end
describe '#should_track_failures?' do
- let(:project) { create(:project, :repository) }
- let(:pipeline) { create(:ci_empty_pipeline, status: :created, project: project, ref: project.default_branch) }
-
subject { described_class.new(pipeline).should_track_failures? }
- before do
- create(:ci_build, :test_reports, :failed, pipeline: pipeline, project: project)
- create(:ci_build, :test_reports, :failed, pipeline: pipeline, project: project)
+ let_it_be(:jobs) do
+ create_list(:ci_build, 2, :test_reports, :failed, pipeline: pipeline)
end
context 'when feature flag is enabled and pipeline ref is the default branch' do
diff --git a/spec/services/ci/track_failed_build_service_spec.rb b/spec/services/ci/track_failed_build_service_spec.rb
index d83e56f0669..676769d2fc7 100644
--- a/spec/services/ci/track_failed_build_service_spec.rb
+++ b/spec/services/ci/track_failed_build_service_spec.rb
@@ -21,19 +21,22 @@ RSpec.describe Ci::TrackFailedBuildService do
expect(response.success?).to be true
+ context = {
+ schema: described_class::SCHEMA_URL,
+ data: {
+ build_id: build.id,
+ build_name: build.name,
+ build_artifact_types: ["sast"],
+ exit_code: exit_code,
+ failure_reason: failure_reason,
+ project: project.id
+ }
+ }
+
expect_snowplow_event(
category: 'ci::build',
action: 'failed',
- context: [{
- schema: described_class::SCHEMA_URL,
- data: {
- build_id: build.id,
- build_name: build.name,
- build_artifact_types: ["sast"],
- exit_code: exit_code,
- failure_reason: failure_reason
- }
- }],
+ context: [context],
user: user,
project: project.id)
end
diff --git a/spec/services/ci/unlock_artifacts_service_spec.rb b/spec/services/ci/unlock_artifacts_service_spec.rb
index f21afc7fe9e..c15e1cb2b5d 100644
--- a/spec/services/ci/unlock_artifacts_service_spec.rb
+++ b/spec/services/ci/unlock_artifacts_service_spec.rb
@@ -5,11 +5,11 @@ require 'spec_helper'
RSpec.describe Ci::UnlockArtifactsService do
using RSpec::Parameterized::TableSyntax
- where(:tag, :ci_update_unlocked_job_artifacts) do
- false | false
- false | true
- true | false
- true | true
+ where(:tag) do
+ [
+ [false],
+ [true]
+ ]
end
with_them do
@@ -31,7 +31,6 @@ RSpec.describe Ci::UnlockArtifactsService do
before do
stub_const("#{described_class}::BATCH_SIZE", 1)
- stub_feature_flags(ci_update_unlocked_job_artifacts: ci_update_unlocked_job_artifacts)
end
describe '#execute' do
@@ -69,17 +68,11 @@ RSpec.describe Ci::UnlockArtifactsService do
end
it 'unlocks job artifact records' do
- pending unless ci_update_unlocked_job_artifacts
-
expect { execute }.to change { ::Ci::JobArtifact.artifact_unlocked.count }.from(0).to(2)
end
it 'unlocks pipeline artifact records' do
- if ci_update_unlocked_job_artifacts
- expect { execute }.to change { ::Ci::PipelineArtifact.artifact_unlocked.count }.from(0).to(1)
- else
- expect { execute }.not_to change { ::Ci::PipelineArtifact.artifact_unlocked.count }
- end
+ expect { execute }.to change { ::Ci::PipelineArtifact.artifact_unlocked.count }.from(0).to(1)
end
end
@@ -111,17 +104,11 @@ RSpec.describe Ci::UnlockArtifactsService do
end
it 'unlocks job artifact records' do
- pending unless ci_update_unlocked_job_artifacts
-
expect { execute }.to change { ::Ci::JobArtifact.artifact_unlocked.count }.from(0).to(8)
end
it 'unlocks pipeline artifact records' do
- if ci_update_unlocked_job_artifacts
- expect { execute }.to change { ::Ci::PipelineArtifact.artifact_unlocked.count }.from(0).to(1)
- else
- expect { execute }.not_to change { ::Ci::PipelineArtifact.artifact_unlocked.count }
- end
+ expect { execute }.to change { ::Ci::PipelineArtifact.artifact_unlocked.count }.from(0).to(1)
end
end
end
diff --git a/spec/services/clusters/agents/filter_authorizations_service_spec.rb b/spec/services/clusters/agents/filter_authorizations_service_spec.rb
new file mode 100644
index 00000000000..62cff405d0c
--- /dev/null
+++ b/spec/services/clusters/agents/filter_authorizations_service_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Agents::FilterAuthorizationsService, feature_category: :continuous_integration do
+ describe '#execute' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ let(:agent_authorizations_without_env) do
+ [
+ build(:agent_project_authorization, project: project, agent: build(:cluster_agent, project: project)),
+ build(:agent_group_authorization, group: group, agent: build(:cluster_agent, project: project)),
+ ::Clusters::Agents::ImplicitAuthorization.new(agent: build(:cluster_agent, project: project))
+ ]
+ end
+
+ let(:filter_params) { {} }
+
+ subject(:execute_filter) { described_class.new(agent_authorizations, filter_params).execute }
+
+ context 'when there are no filters' do
+ let(:agent_authorizations) { agent_authorizations_without_env }
+
+ it 'returns the authorizations as is' do
+ expect(execute_filter).to eq agent_authorizations
+ end
+ end
+
+ context 'when filtering by environment' do
+ let(:agent_authorizations_with_env) do
+ [
+ build(
+ :agent_project_authorization,
+ project: project,
+ agent: build(:cluster_agent, project: project),
+ environments: ['staging', 'review/*', 'production']
+ ),
+ build(
+ :agent_group_authorization,
+ group: group,
+ agent: build(:cluster_agent, project: project),
+ environments: ['staging', 'review/*', 'production']
+ )
+ ]
+ end
+
+ let(:agent_authorizations_with_different_env) do
+ [
+ build(
+ :agent_project_authorization,
+ project: project,
+ agent: build(:cluster_agent, project: project),
+ environments: ['staging']
+ ),
+ build(
+ :agent_group_authorization,
+ group: group,
+ agent: build(:cluster_agent, project: project),
+ environments: ['staging']
+ )
+ ]
+ end
+
+ let(:agent_authorizations) do
+ (
+ agent_authorizations_without_env +
+ agent_authorizations_with_env +
+ agent_authorizations_with_different_env
+ )
+ end
+
+ let(:filter_params) { { environment: 'production' } }
+
+ it 'returns the authorizations with the given environment AND authorizations without any environment' do
+ expected_authorizations = agent_authorizations_with_env + agent_authorizations_without_env
+
+ expect(execute_filter).to match_array expected_authorizations
+ end
+
+ context 'when environment filter has a wildcard' do
+ let(:filter_params) { { environment: 'review/123' } }
+
+ it 'returns the authorizations with matching environments AND authorizations without any environment' do
+ expected_authorizations = agent_authorizations_with_env + agent_authorizations_without_env
+
+ expect(execute_filter).to match_array expected_authorizations
+ end
+ end
+
+ context 'when environment filter is nil' do
+ let(:filter_params) { { environment: nil } }
+
+ it 'returns the authorizations without any environment' do
+ expect(execute_filter).to match_array agent_authorizations_without_env
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/agents/refresh_authorization_service_spec.rb b/spec/services/clusters/agents/refresh_authorization_service_spec.rb
index 09bec7ae0e8..fa38bc202e7 100644
--- a/spec/services/clusters/agents/refresh_authorization_service_spec.rb
+++ b/spec/services/clusters/agents/refresh_authorization_service_spec.rb
@@ -113,6 +113,16 @@ RSpec.describe Clusters::Agents::RefreshAuthorizationService do
expect(modified_authorization.config).to eq({ 'default_namespace' => 'new-namespace' })
end
+ context 'project does not belong to a group, and is in the same namespace as the agent' do
+ let(:root_ancestor) { create(:namespace) }
+ let(:added_project) { create(:project, namespace: root_ancestor) }
+
+ it 'creates an authorization record for the project' do
+ expect(subject).to be_truthy
+ expect(agent.authorized_projects).to contain_exactly(added_project)
+ end
+ end
+
context 'project does not belong to a group, and is authorizing itself' do
let(:root_ancestor) { create(:namespace) }
let(:added_project) { project }
diff --git a/spec/services/clusters/applications/install_service_spec.rb b/spec/services/clusters/applications/install_service_spec.rb
deleted file mode 100644
index d34b4dd943c..00000000000
--- a/spec/services/clusters/applications/install_service_spec.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Applications::InstallService do
- describe '#execute' do
- let(:application) { create(:clusters_applications_helm, :scheduled) }
- let!(:install_command) { application.install_command }
- let(:service) { described_class.new(application) }
- let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::API) }
-
- before do
- allow(service).to receive(:install_command).and_return(install_command)
- allow(service).to receive(:helm_api).and_return(helm_client)
- end
-
- context 'when there are no errors' do
- before do
- expect(helm_client).to receive(:install).with(install_command)
- allow(ClusterWaitForAppInstallationWorker).to receive(:perform_in).and_return(nil)
- end
-
- it 'make the application installing' do
- expect(application.cluster).not_to be_nil
- service.execute
-
- expect(application).to be_installing
- end
-
- it 'schedule async installation status check' do
- expect(ClusterWaitForAppInstallationWorker).to receive(:perform_in).once
-
- service.execute
- end
- end
-
- context 'when k8s cluster communication fails' do
- let(:error) { Kubeclient::HttpError.new(500, 'system failure', nil) }
-
- before do
- expect(helm_client).to receive(:install).with(install_command).and_raise(error)
- end
-
- include_examples 'logs kubernetes errors' do
- let(:error_name) { 'Kubeclient::HttpError' }
- let(:error_message) { 'system failure' }
- let(:error_code) { 500 }
- end
-
- it 'make the application errored' do
- service.execute
-
- expect(application).to be_errored
- expect(application.status_reason).to match('Kubernetes error: 500')
- end
- end
-
- context 'a non kubernetes error happens' do
- let(:application) { create(:clusters_applications_helm, :scheduled) }
- let(:error) { StandardError.new('something bad happened') }
-
- before do
- expect(helm_client).to receive(:install).with(install_command).and_raise(error)
- end
-
- include_examples 'logs kubernetes errors' do
- let(:error_name) { 'StandardError' }
- let(:error_message) { 'something bad happened' }
- let(:error_code) { nil }
- end
-
- it 'make the application errored' do
- service.execute
-
- expect(application).to be_errored
- expect(application.status_reason).to eq('Failed to install.')
- end
- end
- end
-end
diff --git a/spec/services/clusters/applications/prometheus_config_service_spec.rb b/spec/services/clusters/applications/prometheus_config_service_spec.rb
deleted file mode 100644
index 7399f250248..00000000000
--- a/spec/services/clusters/applications/prometheus_config_service_spec.rb
+++ /dev/null
@@ -1,162 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Applications::PrometheusConfigService do
- include Gitlab::Routing.url_helpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:production) { create(:environment, project: project) }
- let_it_be(:cluster) { create(:cluster, :provided_by_user, projects: [project]) }
-
- let(:application) do
- create(:clusters_applications_prometheus, :installed, cluster: cluster)
- end
-
- subject { described_class.new(project, cluster, application).execute(input) }
-
- describe '#execute' do
- let(:input) do
- YAML.load_file(Rails.root.join('vendor/prometheus/values.yaml'))
- end
-
- context 'with alerts' do
- let!(:alert) do
- create(:prometheus_alert, project: project, environment: production)
- end
-
- it 'enables alertmanager' do
- expect(subject.dig('alertmanager', 'enabled')).to eq(true)
- end
-
- describe 'alertmanagerFiles' do
- let(:alertmanager) do
- subject.dig('alertmanagerFiles', 'alertmanager.yml')
- end
-
- it 'contains receivers and route' do
- expect(alertmanager.keys).to contain_exactly('receivers', 'route')
- end
-
- describe 'receivers' do
- let(:receiver) { alertmanager.dig('receivers', 0) }
- let(:webhook_config) { receiver.dig('webhook_configs', 0) }
-
- let(:notify_url) do
- notify_project_prometheus_alerts_url(project, format: :json)
- end
-
- it 'sets receiver' do
- expect(receiver['name']).to eq('gitlab')
- end
-
- it 'sets webhook_config' do
- expect(webhook_config).to eq(
- 'url' => notify_url,
- 'send_resolved' => true,
- 'http_config' => {
- 'bearer_token' => application.alert_manager_token
- }
- )
- end
- end
-
- describe 'route' do
- let(:route) { alertmanager.fetch('route') }
-
- it 'sets route' do
- expect(route).to eq(
- 'receiver' => 'gitlab',
- 'group_wait' => '30s',
- 'group_interval' => '5m',
- 'repeat_interval' => '4h'
- )
- end
- end
- end
-
- describe 'serverFiles' do
- let(:groups) { subject.dig('serverFiles', 'alerts', 'groups') }
-
- it 'sets the alerts' do
- rules = groups.dig(0, 'rules')
- expect(rules.size).to eq(1)
-
- expect(rules.first['alert']).to eq(alert.title)
- end
-
- context 'with parameterized queries' do
- let!(:alert) do
- create(:prometheus_alert,
- project: project,
- environment: production,
- prometheus_metric: metric,
- operator: PrometheusAlert.operators['gt'],
- threshold: 0)
- end
-
- let(:metric) do
- create(:prometheus_metric, query: query, project: project)
- end
-
- let(:query) { 'up{environment="{{ci_environment_slug}}"}' }
-
- it 'substitutes query variables' do
- expect(Gitlab::Prometheus::QueryVariables)
- .to receive(:call)
- .with(production, start_time: nil, end_time: nil)
- .and_call_original
-
- expr = groups.dig(0, 'rules', 0, 'expr')
- expect(expr).to eq("up{environment=\"#{production.slug}\"} > 0.0")
- end
- end
-
- context 'with multiple environments' do
- let(:staging) { create(:environment, project: project) }
-
- before do
- create(:prometheus_alert, project: project, environment: production)
- create(:prometheus_alert, project: project, environment: staging)
- end
-
- it 'sets alerts for multiple environment' do
- env_names = groups.map { |group| group['name'] }
- expect(env_names).to contain_exactly(
- "#{production.name}.rules",
- "#{staging.name}.rules"
- )
- end
-
- it 'substitutes query variables once per environment' do
- allow(Gitlab::Prometheus::QueryVariables).to receive(:call).and_call_original
-
- expect(Gitlab::Prometheus::QueryVariables)
- .to receive(:call)
- .with(production, start_time: nil, end_time: nil)
-
- expect(Gitlab::Prometheus::QueryVariables)
- .to receive(:call)
- .with(staging, start_time: nil, end_time: nil)
-
- subject
- end
- end
- end
- end
-
- context 'without alerts' do
- it 'disables alertmanager' do
- expect(subject.dig('alertmanager', 'enabled')).to eq(false)
- end
-
- it 'removes alertmanagerFiles' do
- expect(subject).not_to include('alertmanagerFiles')
- end
-
- it 'removes alerts' do
- expect(subject.dig('serverFiles', 'alerts')).to eq({})
- end
- end
- end
-end
diff --git a/spec/services/clusters/applications/upgrade_service_spec.rb b/spec/services/clusters/applications/upgrade_service_spec.rb
deleted file mode 100644
index 22fbb7ca6e3..00000000000
--- a/spec/services/clusters/applications/upgrade_service_spec.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Applications::UpgradeService do
- describe '#execute' do
- let(:application) { create(:clusters_applications_helm, :scheduled) }
- let!(:install_command) { application.install_command }
- let(:service) { described_class.new(application) }
- let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::API) }
-
- before do
- allow(service).to receive(:install_command).and_return(install_command)
- allow(service).to receive(:helm_api).and_return(helm_client)
- end
-
- context 'when there are no errors' do
- before do
- expect(helm_client).to receive(:update).with(install_command)
- allow(ClusterWaitForAppInstallationWorker).to receive(:perform_in).and_return(nil)
- end
-
- it 'make the application updating' do
- expect(application.cluster).not_to be_nil
- service.execute
-
- expect(application).to be_updating
- end
-
- it 'schedule async installation status check' do
- expect(ClusterWaitForAppInstallationWorker).to receive(:perform_in).once
-
- service.execute
- end
- end
-
- context 'when kubernetes cluster communication fails' do
- let(:error) { Kubeclient::HttpError.new(500, 'system failure', nil) }
-
- before do
- expect(helm_client).to receive(:update).with(install_command).and_raise(error)
- end
-
- include_examples 'logs kubernetes errors' do
- let(:error_name) { 'Kubeclient::HttpError' }
- let(:error_message) { 'system failure' }
- let(:error_code) { 500 }
- end
-
- it 'make the application errored' do
- service.execute
-
- expect(application).to be_update_errored
- expect(application.status_reason).to eq(_('Kubernetes error: %{error_code}') % { error_code: 500 })
- end
- end
-
- context 'a non kubernetes error happens' do
- let(:application) { create(:clusters_applications_helm, :scheduled) }
- let(:error) { StandardError.new('something bad happened') }
-
- before do
- expect(helm_client).to receive(:update).with(install_command).and_raise(error)
- end
-
- include_examples 'logs kubernetes errors' do
- let(:error_name) { 'StandardError' }
- let(:error_message) { 'something bad happened' }
- let(:error_code) { nil }
- end
-
- it 'make the application errored' do
- service.execute
-
- expect(application).to be_update_errored
- expect(application.status_reason).to eq(_('Failed to upgrade.'))
- end
- end
- end
-end
diff --git a/spec/services/database/consistency_check_service_spec.rb b/spec/services/database/consistency_check_service_spec.rb
index 6695e4b5e9f..d7dee50f7c2 100644
--- a/spec/services/database/consistency_check_service_spec.rb
+++ b/spec/services/database/consistency_check_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Database::ConsistencyCheckService do
+RSpec.describe Database::ConsistencyCheckService, feature_category: :database do
let(:batch_size) { 5 }
let(:max_batches) { 2 }
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index c952bcddd9a..31a3abda8c7 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe Deployments::UpdateEnvironmentService do
let(:external_url) { 'javascript:alert("hello")' }
it 'fails to update the tier due to validation error' do
- expect { subject.execute }.not_to change { environment.tier }
+ expect { subject.execute }.not_to change { environment.reload.tier }
end
it 'tracks an exception' do
diff --git a/spec/services/environments/create_for_build_service_spec.rb b/spec/services/environments/create_for_build_service_spec.rb
index 721822f355b..c7aadb20c01 100644
--- a/spec/services/environments/create_for_build_service_spec.rb
+++ b/spec/services/environments/create_for_build_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Environments::CreateForBuildService do
let(:merge_request) {}
describe '#execute' do
- subject { service.execute(job, merge_request: merge_request) }
+ subject { service.execute(job) }
shared_examples_for 'returning a correct environment' do
let(:expected_auto_stop_in_seconds) do
@@ -187,10 +187,11 @@ RSpec.describe Environments::CreateForBuildService do
end
context 'when merge_request is provided' do
+ let(:pipeline) { create(:ci_pipeline, project: project, merge_request: merge_request) }
let(:environment_name) { 'development' }
let(:attributes) { { environment: environment_name, options: { environment: { name: environment_name } } } }
let(:merge_request) { create(:merge_request, source_project: project) }
- let(:seed) { described_class.new(job, merge_request: merge_request) }
+ let(:seed) { described_class.new(job) }
context 'and environment does not exist' do
let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
@@ -216,7 +217,8 @@ RSpec.describe Environments::CreateForBuildService do
end
context 'when a pipeline contains a deployment job' do
- let!(:job) { build(:ci_build, :start_review_app, project: project) }
+ let(:pipeline) { create(:ci_pipeline, project: project, merge_request: merge_request) }
+ let!(:job) { build(:ci_build, :start_review_app, project: project, pipeline: pipeline) }
context 'and the environment does not exist' do
it 'creates the environment specified by the job' do
diff --git a/spec/services/environments/stop_service_spec.rb b/spec/services/environments/stop_service_spec.rb
index 4f766b73710..5f983a2151a 100644
--- a/spec/services/environments/stop_service_spec.rb
+++ b/spec/services/environments/stop_service_spec.rb
@@ -204,6 +204,8 @@ RSpec.describe Environments::StopService do
context 'and merge request has associated created_environments' do
let!(:environment1) { create(:environment, project: project, merge_request: merge_request) }
let!(:environment2) { create(:environment, project: project, merge_request: merge_request) }
+ let!(:environment3) { create(:environment, project: project) }
+ let!(:environment3_deployment) { create(:deployment, environment: environment3, sha: pipeline.sha) }
before do
subject
@@ -215,8 +217,7 @@ RSpec.describe Environments::StopService do
end
it 'does not affect environments that are not associated to the merge request' do
- expect(pipeline.environments_in_self_and_project_descendants.first.merge_request).to be_nil
- expect(pipeline.environments_in_self_and_project_descendants.first).to be_available
+ expect(environment3.reload).to be_available
end
end
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index c3ae062a4b2..e60954a19ed 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -72,12 +72,13 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
let(:category) { described_class.name }
let(:action) { 'created' }
- let(:label) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
+ let(:label) { described_class::MR_EVENT_LABEL }
let(:namespace) { project.namespace }
let(:project) { merge_request.project }
let(:user) { merge_request.author }
+ let(:property) { described_class::MR_EVENT_PROPERTY }
let(:context) do
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'merge_requests_users').to_context]
+ [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: property).to_context]
end
end
end
@@ -101,12 +102,13 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
let(:category) { described_class.name }
let(:action) { 'closed' }
- let(:label) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
+ let(:label) { described_class::MR_EVENT_LABEL }
let(:namespace) { project.namespace }
let(:project) { merge_request.project }
let(:user) { merge_request.author }
+ let(:property) { described_class::MR_EVENT_PROPERTY }
let(:context) do
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'merge_requests_users').to_context]
+ [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: property).to_context]
end
end
end
@@ -130,12 +132,13 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
let(:category) { described_class.name }
let(:action) { 'merged' }
- let(:label) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
+ let(:label) { described_class::MR_EVENT_LABEL }
let(:namespace) { project.namespace }
let(:project) { merge_request.project }
let(:user) { merge_request.author }
+ let(:property) { described_class::MR_EVENT_PROPERTY }
let(:context) do
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'merge_requests_users').to_context]
+ [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: property).to_context]
end
end
end
@@ -318,10 +321,7 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
let(:namespace) { project.namespace }
let(:feature_flag_name) { :route_hll_to_snowplow }
let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_project_repo' }
- let(:context) do
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll,
- event: 'action_active_users_project_repo').to_context]
- end
+ let(:property) { 'project_action' }
end
end
@@ -348,10 +348,7 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
let(:namespace) { project.namespace }
let(:feature_flag_name) { :route_hll_to_snowplow }
let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_project_repo' }
- let(:context) do
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll,
- event: 'action_active_users_project_repo').to_context]
- end
+ let(:property) { 'project_action' }
end
end
@@ -408,41 +405,28 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION }
end
- it 'records correct create payload with Snowplow event' do
- service.save_designs(author, create: [design])
-
- expect_snowplow_event(
- category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
- action: 'create',
- namespace: design.project.namespace,
- user: author,
- project: design.project,
- label: 'design_users'
- )
- end
-
- it 'records correct update payload with Snowplow event' do
- service.save_designs(author, update: [design])
+ describe 'Snowplow tracking' do
+ let(:project) { design.project }
+ let(:namespace) { project.namespace }
+ let(:category) { described_class.name }
+ let(:property) { Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s }
+ let(:label) { ::EventCreateService::DEGIGN_EVENT_LABEL }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
- expect_snowplow_event(
- category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
- action: 'update',
- namespace: design.project.namespace,
- user: author,
- project: design.project,
- label: 'design_users'
- )
- end
+ context 'for create event' do
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ subject(:design_service) { service.save_designs(author, create: [design]) }
- context 'when FF is disabled' do
- before do
- stub_feature_flags(route_hll_to_snowplow_phase2: false)
+ let(:action) { 'create' }
+ end
end
- it 'doesnt emit snowwplow events', :snowplow do
- subject
+ context 'for update event' do
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ subject(:design_service) { service.save_designs(author, update: [design]) }
- expect_no_snowplow_event
+ let(:action) { 'update' }
+ end
end
end
end
@@ -469,29 +453,17 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION }
end
- it 'records correct payload with Snowplow event' do
- service.destroy_designs([design], author)
-
- expect_snowplow_event(
- category: Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s,
- action: 'destroy',
- namespace: design.project.namespace,
- user: author,
- project: design.project,
- label: 'design_users'
- )
- end
-
- context 'when FF is disabled' do
- before do
- stub_feature_flags(route_hll_to_snowplow_phase2: false)
- end
-
- it 'doesnt emit snowplow events', :snowplow do
- subject
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ subject(:design_service) { service.destroy_designs([design], author) }
- expect_no_snowplow_event
- end
+ let(:project) { design.project }
+ let(:namespace) { project.namespace }
+ let(:category) { described_class.name }
+ let(:action) { 'destroy' }
+ let(:user) { author }
+ let(:property) { Gitlab::UsageDataCounters::TrackUniqueEvents::DESIGN_ACTION.to_s }
+ let(:label) { ::EventCreateService::DEGIGN_EVENT_LABEL }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
end
end
end
@@ -519,12 +491,13 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
let(:note) { create(:diff_note_on_merge_request) }
let(:category) { described_class.name }
let(:action) { 'commented' }
- let(:label) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
+ let(:property) { described_class::MR_EVENT_PROPERTY }
+ let(:label) { described_class::MR_EVENT_LABEL }
let(:namespace) { project.namespace }
let(:project) { note.project }
let(:user) { author }
let(:context) do
- [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'merge_requests_users').to_context]
+ [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: property).to_context]
end
end
end
diff --git a/spec/services/feature_flags/hook_service_spec.rb b/spec/services/feature_flags/hook_service_spec.rb
index 19c935e43f3..f3edaca52a9 100644
--- a/spec/services/feature_flags/hook_service_spec.rb
+++ b/spec/services/feature_flags/hook_service_spec.rb
@@ -14,14 +14,14 @@ RSpec.describe FeatureFlags::HookService do
subject(:service) { described_class.new(feature_flag, user) }
- describe 'HOOK_NAME' do
- specify { expect(described_class::HOOK_NAME).to eq(:feature_flag_hooks) }
- end
-
before do
allow(Gitlab::DataBuilder::FeatureFlag).to receive(:build).with(feature_flag, user).once.and_return(hook_data)
end
+ describe 'HOOK_NAME' do
+ specify { expect(described_class::HOOK_NAME).to eq(:feature_flag_hooks) }
+ end
+
it 'calls feature_flag.project.execute_hooks' do
expect(feature_flag.project).to receive(:execute_hooks).with(hook_data, described_class::HOOK_NAME)
diff --git a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb b/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
index b83037f80cd..ef77958fa60 100644
--- a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
+++ b/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::FetchGoogleIpListService,
- :use_clean_rails_memory_store_caching, :clean_gitlab_redis_rate_limiting do
+RSpec.describe GoogleCloud::FetchGoogleIpListService, :use_clean_rails_memory_store_caching,
+:clean_gitlab_redis_rate_limiting, feature_category: :continuous_integration do
include StubRequests
let(:google_cloud_ips) { File.read(Rails.root.join('spec/fixtures/cdn/google_cloud.json')) }
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index f2dbb69f855..2791203f395 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -8,8 +8,8 @@ RSpec.describe Groups::DestroyService do
let!(:nested_group) { create(:group, parent: group) }
let!(:project) { create(:project, :repository, :legacy_storage, namespace: group) }
let!(:notification_setting) { create(:notification_setting, source: group) }
- let(:gitlab_shell) { Gitlab::Shell.new }
let(:remove_path) { group.path + "+#{group.id}+deleted" }
+ let(:removed_repo) { Gitlab::Git::Repository.new(project.repository_storage, remove_path, nil, nil) }
before do
group.add_member(user, Gitlab::Access::OWNER)
@@ -70,8 +70,11 @@ RSpec.describe Groups::DestroyService do
end
it 'verifies that paths have been deleted' do
- expect(TestEnv.storage_dir_exists?(project.repository_storage, group.path)).to be_falsey
- expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_falsey
+ Gitlab::GitalyClient::NamespaceService.allow do
+ expect(Gitlab::GitalyClient::NamespaceService.new(project.repository_storage)
+ .exists?(group.path)).to be_falsey
+ end
+ expect(removed_repo).not_to exist
end
end
end
@@ -98,8 +101,11 @@ RSpec.describe Groups::DestroyService do
end
it 'verifies original paths and projects still exist' do
- expect(TestEnv.storage_dir_exists?(project.repository_storage, group.path)).to be_truthy
- expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_falsey
+ Gitlab::GitalyClient::NamespaceService.allow do
+ expect(Gitlab::GitalyClient::NamespaceService.new(project.repository_storage)
+ .exists?(group.path)).to be_truthy
+ end
+ expect(removed_repo).not_to exist
expect(Project.unscoped.count).to eq(1)
expect(Group.unscoped.count).to eq(2)
end
@@ -150,7 +156,7 @@ RSpec.describe Groups::DestroyService do
let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: group) }
it 'removes repository' do
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
+ expect(project.repository.raw).not_to exist
end
end
@@ -158,7 +164,7 @@ RSpec.describe Groups::DestroyService do
let!(:project) { create(:project, :empty_repo, namespace: group) }
it 'removes repository' do
- expect(gitlab_shell.repository_exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
+ expect(project.repository.raw).not_to exist
end
end
end
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index 66b50704939..d41acbcc2de 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -148,6 +148,14 @@ RSpec.describe Groups::ImportExport::ImportService do
action: 'create',
label: 'import_group_from_file'
)
+
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
+ )
end
it 'removes import file' do
@@ -235,6 +243,14 @@ RSpec.describe Groups::ImportExport::ImportService do
)
service.execute
+
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
+ )
end
end
end
@@ -275,6 +291,14 @@ RSpec.describe Groups::ImportExport::ImportService do
action: 'create',
label: 'import_group_from_file'
)
+
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
+ )
end
it 'removes import file' do
@@ -352,6 +376,24 @@ RSpec.describe Groups::ImportExport::ImportService do
expect(service.execute).to be_truthy
end
+ it 'tracks the event' do
+ service.execute
+
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_group_from_file'
+ )
+
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
+ )
+ end
+
it 'logs the import success' do
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
diff --git a/spec/services/import/bitbucket_server_service_spec.rb b/spec/services/import/bitbucket_server_service_spec.rb
index 0b9fe10e95a..555812ca9cf 100644
--- a/spec/services/import/bitbucket_server_service_spec.rb
+++ b/spec/services/import/bitbucket_server_service_spec.rb
@@ -31,6 +31,25 @@ RSpec.describe Import::BitbucketServerService do
allow(subject).to receive(:authorized?).and_return(true)
end
+ context 'execute' do
+ before do
+ allow(subject).to receive(:authorized?).and_return(true)
+ allow(client).to receive(:repo).with(project_key, repo_slug).and_return(double(repo))
+ end
+
+ it 'tracks an access level event' do
+ subject.execute(credentials)
+
+ expect_snowplow_event(
+ category: 'Import::BitbucketServerService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { import_type: 'bitbucket', user_role: 'Owner' }
+ )
+ end
+ end
+
context 'when no repo is found' do
before do
allow(subject).to receive(:authorized?).and_return(true)
diff --git a/spec/services/import/github/gists_import_service_spec.rb b/spec/services/import/github/gists_import_service_spec.rb
new file mode 100644
index 00000000000..c5d73e6479d
--- /dev/null
+++ b/spec/services/import/github/gists_import_service_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::Github::GistsImportService, feature_category: :importer do
+ subject(:import) { described_class.new(user, params) }
+
+ let_it_be(:user) { create(:user) }
+ let(:params) { { github_access_token: 'token' } }
+ let(:import_status) { instance_double('Gitlab::GithubGistsImport::Status') }
+
+ describe '#execute', :aggregate_failures do
+ before do
+ allow(Gitlab::GithubGistsImport::Status).to receive(:new).and_return(import_status)
+ end
+
+ context 'when import in progress' do
+ let(:expected_result) do
+ {
+ status: :error,
+ http_status: 422,
+ message: 'Import already in progress'
+ }
+ end
+
+ it 'returns error' do
+ expect(import_status).to receive(:started?).and_return(true)
+ expect(import.execute).to eq(expected_result)
+ end
+ end
+
+ context 'when import was not started' do
+ it 'returns success' do
+ encrypted_token = Gitlab::CryptoHelper.aes256_gcm_encrypt(params[:github_access_token])
+ expect(import_status).to receive(:started?).and_return(false)
+ expect(Gitlab::CryptoHelper)
+ .to receive(:aes256_gcm_encrypt).with(params[:github_access_token])
+ .and_return(encrypted_token)
+ expect(Gitlab::GithubGistsImport::StartImportWorker)
+ .to receive(:perform_async).with(user.id, encrypted_token)
+ expect(import_status).to receive(:start!)
+
+ expect(import.execute).to eq({ status: :success })
+ end
+ end
+ end
+end
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 38d84009f08..d1b372c5e87 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -82,9 +82,16 @@ RSpec.describe Import::GithubService do
end
context 'when there is no repository size limit defined' do
- it 'skips the check and succeeds' do
+ it 'skips the check, succeeds, and tracks an access level' do
expect(subject.execute(access_params, :github)).to include(status: :success)
expect(settings).to have_received(:write).with(nil)
+ expect_snowplow_event(
+ category: 'Import::GithubService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { import_type: 'github', user_role: 'Owner' }
+ )
end
end
@@ -98,6 +105,13 @@ RSpec.describe Import::GithubService do
it 'succeeds when the repository is smaller than the limit' do
expect(subject.execute(access_params, :github)).to include(status: :success)
expect(settings).to have_received(:write).with(nil)
+ expect_snowplow_event(
+ category: 'Import::GithubService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { import_type: 'github', user_role: 'Not a member' }
+ )
end
it 'returns error when the repository is larger than the limit' do
@@ -118,6 +132,13 @@ RSpec.describe Import::GithubService do
it 'succeeds when the repository is smaller than the limit' do
expect(subject.execute(access_params, :github)).to include(status: :success)
expect(settings).to have_received(:write).with(nil)
+ expect_snowplow_event(
+ category: 'Import::GithubService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { import_type: 'github', user_role: 'Owner' }
+ )
end
it 'returns error when the repository is larger than the limit' do
diff --git a/spec/services/incident_management/incidents/create_service_spec.rb b/spec/services/incident_management/incidents/create_service_spec.rb
index 851b21e1227..7db762b9c5b 100644
--- a/spec/services/incident_management/incidents/create_service_spec.rb
+++ b/spec/services/incident_management/incidents/create_service_spec.rb
@@ -66,6 +66,26 @@ RSpec.describe IncidentManagement::Incidents::CreateService do
end
end
end
+
+ context 'with an alert' do
+ subject(:create_incident) { described_class.new(project, user, title: title, description: description, alert: alert).execute }
+
+ context 'when the alert is valid' do
+ let(:alert) { create(:alert_management_alert, project: project) }
+
+ it 'associates the alert with the incident' do
+ expect(create_incident[:issue].reload.alert_management_alerts).to match_array([alert])
+ end
+ end
+
+ context 'when the alert is not valid' do
+ let(:alert) { create(:alert_management_alert, :with_validation_errors, project: project) }
+
+ it 'does not associate the alert with the incident' do
+ expect(create_incident[:issue].reload.alert_management_alerts).to be_empty
+ end
+ end
+ end
end
context 'when incident has no title' do
@@ -89,10 +109,6 @@ RSpec.describe IncidentManagement::Incidents::CreateService do
subject(:create_incident) { described_class.new(project, user, title: title, description: description, alert: alert).execute }
- it 'associates the alert with the incident' do
- expect(create_incident[:issue].alert_management_alert).to eq(alert)
- end
-
context 'the alert prevents the issue from saving' do
let(:alert) { create(:alert_management_alert, :with_validation_errors, project: project) }
diff --git a/spec/services/incident_management/link_alerts/create_service_spec.rb b/spec/services/incident_management/link_alerts/create_service_spec.rb
new file mode 100644
index 00000000000..fab28771174
--- /dev/null
+++ b/spec/services/incident_management/link_alerts/create_service_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::LinkAlerts::CreateService, feature_category: :incident_management do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:another_project) { create(:project) }
+ let_it_be(:linked_alert) { create(:alert_management_alert, project: project) }
+ let_it_be(:alert1) { create(:alert_management_alert, project: project) }
+ let_it_be(:alert2) { create(:alert_management_alert, project: project) }
+ let_it_be(:external_alert) { create(:alert_management_alert, project: another_project) }
+ let_it_be(:incident) { create(:incident, project: project, alert_management_alerts: [linked_alert]) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:another_developer) { create(:user) }
+
+ before_all do
+ project.add_guest(guest)
+ project.add_developer(developer)
+ project.add_developer(another_developer)
+
+ another_project.add_guest(guest)
+ another_project.add_developer(developer)
+ end
+
+ describe '#execute' do
+ subject(:execute) { described_class.new(incident, current_user, alert_references).execute }
+
+ let(:alert_references) { [alert1.to_reference, alert2.details_url] }
+
+ context 'when current user is a guest' do
+ let(:current_user) { guest }
+
+ it 'responds with error', :aggregate_failures do
+ response = execute
+
+ expect(response).to be_error
+ expect(response.message).to eq('You have insufficient permissions to manage alerts for this project')
+ end
+
+ it 'does not link alerts to the incident' do
+ expect { execute }.not_to change { incident.reload.alert_management_alerts.to_a }
+ end
+ end
+
+ context 'when current user is a developer' do
+ let(:current_user) { developer }
+
+ it 'responds with success', :aggregate_failures do
+ response = execute
+
+ expect(response).to be_success
+ expect(response.payload[:incident]).to eq(incident)
+ end
+
+ it 'links alerts to the incident' do
+ expect { execute }
+ .to change { incident.reload.alert_management_alerts.to_a }
+ .from([linked_alert])
+ .to match_array([linked_alert, alert1, alert2])
+ end
+
+ context 'when linking an already linked alert' do
+ let(:alert_references) { [linked_alert.details_url] }
+
+ it 'does not change incident alerts list' do
+ expect { execute }.not_to change { incident.reload.alert_management_alerts.to_a }
+ end
+ end
+
+ context 'when linking an alert from another project' do
+ let(:alert_references) { [external_alert.details_url] }
+
+ it 'links an external alert to the incident' do
+ expect { execute }
+ .to change { incident.reload.alert_management_alerts.to_a }
+ .from([linked_alert])
+ .to match_array([linked_alert, external_alert])
+ end
+ end
+ end
+
+ context 'when current user does not have permission to read alerts on external project' do
+ let(:current_user) { another_developer }
+
+ context 'when linking alerts from current and external projects' do
+ let(:alert_references) { [alert1.details_url, external_alert.details_url] }
+
+ it 'links only alerts the current user can read' do
+ expect { execute }
+ .to change { incident.reload.alert_management_alerts.to_a }
+ .from([linked_alert])
+ .to match_array([linked_alert, alert1])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/incident_management/link_alerts/destroy_service_spec.rb b/spec/services/incident_management/link_alerts/destroy_service_spec.rb
new file mode 100644
index 00000000000..13885ab7d5d
--- /dev/null
+++ b/spec/services/incident_management/link_alerts/destroy_service_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::LinkAlerts::DestroyService, feature_category: :incident_management do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:another_project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:incident) { create(:incident, project: project) }
+ let_it_be(:another_incident) { create(:incident, project: project) }
+ let_it_be(:internal_alert) { create(:alert_management_alert, project: project, issue: incident) }
+ let_it_be(:external_alert) { create(:alert_management_alert, project: another_project, issue: incident) }
+ let_it_be(:unrelated_alert) { create(:alert_management_alert, project: project, issue: another_incident) }
+
+ before_all do
+ project.add_guest(guest)
+ project.add_developer(developer)
+ end
+
+ describe '#execute' do
+ subject(:execute) { described_class.new(incident, current_user, alert).execute }
+
+ let(:alert) { internal_alert }
+
+ context 'when current user is a guest' do
+ let(:current_user) { guest }
+
+ it 'responds with error', :aggregate_failures do
+ response = execute
+
+ expect(response).to be_error
+ expect(response.message).to eq('You have insufficient permissions to manage alerts for this project')
+ end
+
+ it 'does not unlink alert from the incident' do
+ expect { execute }.not_to change { incident.reload.alert_management_alerts.to_a }
+ end
+ end
+
+ context 'when current user is a developer' do
+ let(:current_user) { developer }
+
+ it 'responds with success', :aggregate_failures do
+ response = execute
+
+ expect(response).to be_success
+ expect(response.payload[:incident]).to eq(incident)
+ end
+
+ context 'when unlinking internal alert' do
+ let(:alert) { internal_alert }
+
+ it 'unlinks the alert' do
+ expect { execute }
+ .to change { incident.reload.alert_management_alerts.to_a }
+ .to match_array([external_alert])
+ end
+ end
+
+ context 'when unlinking external alert' do
+ let(:alert) { external_alert }
+
+ it 'unlinks the alert' do
+ expect { execute }
+ .to change { incident.reload.alert_management_alerts.to_a }
+ .to match_array([internal_alert])
+ end
+ end
+
+ context 'when unlinking an alert not related to the incident' do
+ let(:alert) { unrelated_alert }
+
+ it "does not change the incident's alerts" do
+ expect { execute }.not_to change { incident.reload.alert_management_alerts.to_a }
+ end
+
+ it "does not change another incident's alerts" do
+ expect { execute }.not_to change { another_incident.reload.alert_management_alerts.to_a }
+ end
+
+ it "does not change the alert's incident" do
+ expect { execute }.not_to change { unrelated_alert.reload.issue }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
index 572b1a20166..2fda789cf56 100644
--- a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
+++ b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe IncidentManagement::PagerDuty::CreateIncidentIssueService do
let(:webhook_payload) { Gitlab::Json.parse(fixture_file('pager_duty/webhook_incident_trigger.json')) }
let(:parsed_payload) { ::PagerDuty::WebhookPayloadParser.call(webhook_payload) }
- let(:incident_payload) { parsed_payload.first['incident'] }
+ let(:incident_payload) { parsed_payload['incident'] }
subject(:execute) { described_class.new(project, incident_payload).execute }
@@ -41,14 +41,14 @@ RSpec.describe IncidentManagement::PagerDuty::CreateIncidentIssueService do
expect(execute.payload[:issue].description).to eq(
<<~MARKDOWN.chomp
- **Incident:** [My new incident](https://webdemo.pagerduty.com/incidents/PRORDTY)#{markdown_line_break}
- **Incident number:** 33#{markdown_line_break}
+ **Incident:** [[FILTERED]](https://gitlab-1.pagerduty.com/incidents/Q1XZUF87W1HB5A)#{markdown_line_break}
+ **Incident number:** 2#{markdown_line_break}
**Urgency:** high#{markdown_line_break}
**Status:** triggered#{markdown_line_break}
- **Incident key:** #{markdown_line_break}
- **Created at:** 26 September 2017, 3:14PM (UTC)#{markdown_line_break}
- **Assignees:** [Laura Haley](https://webdemo.pagerduty.com/users/P553OPV)#{markdown_line_break}
- **Impacted services:** [Production XDB Cluster](https://webdemo.pagerduty.com/services/PN49J75)
+ **Incident key:** [FILTERED]#{markdown_line_break}
+ **Created at:** 30 November 2022, 8:46AM (UTC)#{markdown_line_break}
+ **Assignees:** [Rajendra Kadam](https://gitlab-1.pagerduty.com/users/PIN0B5C)#{markdown_line_break}
+ **Impacted service:** [Test service](https://gitlab-1.pagerduty.com/services/PK6IKMT)
MARKDOWN
)
end
diff --git a/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb b/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
index 8b6eb21c25d..e2aba0b61af 100644
--- a/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
+++ b/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe IncidentManagement::PagerDuty::ProcessWebhookService do
end
it 'processes issues' do
- incident_payload = ::PagerDuty::WebhookPayloadParser.call(webhook_payload).first['incident']
+ incident_payload = ::PagerDuty::WebhookPayloadParser.call(webhook_payload)['incident']
expect(::IncidentManagement::PagerDuty::ProcessIncidentWorker)
.to receive(:perform_async)
diff --git a/spec/services/incident_management/timeline_events/create_service_spec.rb b/spec/services/incident_management/timeline_events/create_service_spec.rb
index b10862a78b5..a3810879c65 100644
--- a/spec/services/incident_management/timeline_events/create_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/create_service_spec.rb
@@ -55,6 +55,15 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
end
it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_created
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { project.namespace.reload }
+ let(:category) { described_class.to_s }
+ let(:user) { current_user }
+ let(:action) { 'incident_management_timeline_event_created' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ end
end
subject(:execute) { service.execute }
@@ -276,6 +285,15 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_created
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { project.namespace.reload }
+ let(:category) { described_class.to_s }
+ let(:user) { current_user }
+ let(:action) { 'incident_management_timeline_event_created' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ end
+
it 'successfully creates a database record', :aggregate_failures do
expect { execute }.to change { ::IncidentManagement::TimelineEvent.count }.by(1)
end
diff --git a/spec/services/incident_management/timeline_events/destroy_service_spec.rb b/spec/services/incident_management/timeline_events/destroy_service_spec.rb
index e1b258960ae..f90ff72a2bf 100644
--- a/spec/services/incident_management/timeline_events/destroy_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/destroy_service_spec.rb
@@ -65,6 +65,15 @@ RSpec.describe IncidentManagement::TimelineEvents::DestroyService do
end
it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_deleted
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { project.namespace.reload }
+ let(:category) { described_class.to_s }
+ let(:user) { current_user }
+ let(:action) { 'incident_management_timeline_event_deleted' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ end
end
end
end
diff --git a/spec/services/incident_management/timeline_events/update_service_spec.rb b/spec/services/incident_management/timeline_events/update_service_spec.rb
index 2373a73e108..ff802109715 100644
--- a/spec/services/incident_management/timeline_events/update_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/update_service_spec.rb
@@ -2,10 +2,20 @@
require 'spec_helper'
-RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
+RSpec.describe IncidentManagement::TimelineEvents::UpdateService, feature_category: :incident_management do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:incident) { create(:incident, project: project) }
+ let_it_be(:tag1) { create(:incident_management_timeline_event_tag, project: project, name: 'Tag 1') }
+ let_it_be(:tag2) { create(:incident_management_timeline_event_tag, project: project, name: 'Tag 2') }
+ let_it_be(:tag3) { create(:incident_management_timeline_event_tag, project: project, name: 'Tag 3') }
+
+ let!(:tag_link1) do
+ create(:incident_management_timeline_event_tag_link,
+ timeline_event: timeline_event,
+ timeline_event_tag: tag3
+ )
+ end
let!(:timeline_event) { create(:incident_management_timeline_event, project: project, incident: incident) }
let(:occurred_at) { 1.minute.ago }
@@ -13,6 +23,24 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
let(:current_user) { user }
describe '#execute' do
+ shared_examples 'successful tag response' do
+ it_behaves_like 'successful response'
+
+ it 'adds the new tag' do
+ expect { execute }.to change { timeline_event.timeline_event_tags.count }.by(1)
+ end
+
+ it 'adds the new tag link' do
+ expect { execute }.to change { IncidentManagement::TimelineEventTagLink.count }.by(1)
+ end
+
+ it 'returns the new tag in response' do
+ timeline_event = execute.payload[:timeline_event]
+
+ expect(timeline_event.timeline_event_tags.pluck_names).to contain_exactly(tag1.name, tag3.name)
+ end
+ end
+
shared_examples 'successful response' do
it 'responds with success', :aggregate_failures do
expect(execute).to be_success
@@ -20,6 +48,14 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
end
it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_edited
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { project.namespace.reload }
+ let(:category) { described_class.to_s }
+ let(:action) { 'incident_management_timeline_event_edited' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ end
end
shared_examples 'error response' do |message|
@@ -67,7 +103,7 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
it_behaves_like 'passing the correct was_changed value', :occurred_at_and_note
context 'when note is nil' do
- let(:params) { { occurred_at: occurred_at } }
+ let(:params) { { occurred_at: occurred_at, timeline_event_tag_names: [tag3.name, tag2.name] } }
it_behaves_like 'successful response'
it_behaves_like 'passing the correct was_changed value', :occurred_at
@@ -79,18 +115,30 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
it 'updates occurred_at' do
expect { execute }.to change { timeline_event.occurred_at }.to(params[:occurred_at])
end
+
+ it 'updates the tags' do
+ expect { execute }.to change { timeline_event.timeline_event_tags.count }.by(1)
+ end
end
context 'when note is blank' do
- let(:params) { { note: '', occurred_at: occurred_at } }
+ let(:params) { { note: '', occurred_at: occurred_at, timeline_event_tag_names: [tag3.name, tag2.name] } }
it_behaves_like 'error response', "Timeline text can't be blank"
+
+ it 'does not add the tags as it rollsback the transaction' do
+ expect { execute }.not_to change { timeline_event.timeline_event_tags.count }
+ end
end
context 'when note is more than 280 characters long' do
- let(:params) { { note: 'n' * 281, occurred_at: occurred_at } }
+ let(:params) { { note: 'n' * 281, occurred_at: occurred_at, timeline_event_tag_names: [tag3.name, tag2.name] } }
it_behaves_like 'error response', 'Timeline text is too long (maximum is 280 characters)'
+
+ it 'does not add the tags as it rollsback the transaction' do
+ expect { execute }.not_to change { timeline_event.timeline_event_tags.count }
+ end
end
context 'when occurred_at is nil' do
@@ -109,9 +157,13 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
end
context 'when occurred_at is blank' do
- let(:params) { { note: 'Updated note', occurred_at: '' } }
+ let(:params) { { note: 'Updated note', occurred_at: '', timeline_event_tag_names: [tag3.name, tag2.name] } }
it_behaves_like 'error response', "Occurred at can't be blank"
+
+ it 'does not add the tags as it rollsback the transaction' do
+ expect { execute }.not_to change { timeline_event.timeline_event_tags.count }
+ end
end
context 'when both occurred_at and note is nil' do
@@ -142,6 +194,112 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
it_behaves_like 'error response',
'You have insufficient permissions to manage timeline events for this incident'
end
+
+ context 'when timeline event tags are passed' do
+ context 'when predefined tags are passed' do
+ let(:params) do
+ {
+ note: 'Updated note',
+ occurred_at: occurred_at,
+ timeline_event_tag_names: ['start time', 'end time']
+ }
+ end
+
+ it 'returns the new tag in response' do
+ timeline_event = execute.payload[:timeline_event]
+
+ expect(timeline_event.timeline_event_tags.pluck_names).to contain_exactly('Start time', 'End time')
+ end
+
+ it 'creates the predefined tags on the project' do
+ execute
+
+ expect(project.incident_management_timeline_event_tags.pluck_names).to include('Start time', 'End time')
+ end
+ end
+
+ context 'when they exist' do
+ let(:params) do
+ {
+ note: 'Updated note',
+ occurred_at: occurred_at,
+ timeline_event_tag_names: [tag3.name, tag1.name]
+ }
+ end
+
+ it_behaves_like 'successful tag response'
+
+ context 'when tag name is of random case' do
+ let(:params) do
+ {
+ note: 'Updated note',
+ occurred_at: occurred_at,
+ timeline_event_tag_names: ['tAg 3', 'TaG 1']
+ }
+ end
+
+ it_behaves_like 'successful tag response'
+ end
+
+ context 'when tag is removed' do
+ let(:params) { { note: 'Updated note', occurred_at: occurred_at, timeline_event_tag_names: [tag2.name] } }
+
+ it_behaves_like 'successful response'
+
+ it 'adds the new tag and removes the old tag' do
+ # Since it adds a tag (+1) and removes old tag (-1) so next change in count in 0
+ expect { execute }.to change { timeline_event.timeline_event_tags.count }.by(0)
+ end
+
+ it 'adds the new tag link and removes the old tag link' do
+ # Since it adds a tag link (+1) and removes old tag link (-1) so next change in count in 0
+ expect { execute }.to change { IncidentManagement::TimelineEventTagLink.count }.by(0)
+ end
+
+ it 'returns the new tag and does not contain the old tag in response' do
+ timeline_event = execute.payload[:timeline_event]
+
+ expect(timeline_event.timeline_event_tags.pluck_names).to contain_exactly(tag2.name)
+ end
+ end
+
+ context 'when all assigned tags are removed' do
+ let(:params) { { note: 'Updated note', occurred_at: occurred_at, timeline_event_tag_names: [] } }
+
+ it_behaves_like 'successful response'
+
+ it 'removes all the assigned tags' do
+ expect { execute }.to change { timeline_event.timeline_event_tags.count }.by(-1)
+ end
+
+ it 'removes all the assigned tag links' do
+ expect { execute }.to change { IncidentManagement::TimelineEventTagLink.count }.by(-1)
+ end
+
+ it 'does not contain any tags in response' do
+ timeline_event = execute.payload[:timeline_event]
+
+ expect(timeline_event.timeline_event_tags.pluck_names).to be_empty
+ end
+ end
+ end
+
+ context 'when they do not exist' do
+ let(:params) do
+ {
+ note: 'Updated note 2',
+ occurred_at: occurred_at,
+ timeline_event_tag_names: ['non existing tag']
+ }
+ end
+
+ it_behaves_like 'error response', "Following tags don't exist: [\"non existing tag\"]"
+
+ it 'does not update the note' do
+ expect { execute }.not_to change { timeline_event.reload.note }
+ end
+ end
+ end
end
context 'when user does not have permissions' do
diff --git a/spec/services/issuable/discussions_list_service_spec.rb b/spec/services/issuable/discussions_list_service_spec.rb
index 2ce47f42a72..ecdd8d031c9 100644
--- a/spec/services/issuable/discussions_list_service_spec.rb
+++ b/spec/services/issuable/discussions_list_service_spec.rb
@@ -17,6 +17,19 @@ RSpec.describe Issuable::DiscussionsListService do
let_it_be(:issuable) { create(:issue, project: project) }
it_behaves_like 'listing issuable discussions', :guest, 1, 7
+
+ context 'without notes widget' do
+ let_it_be(:issuable) { create(:work_item, :issue, project: project) }
+
+ before do
+ stub_const('WorkItems::Type::BASE_TYPES', { issue: { name: 'NoNotesWidget', enum_value: 0 } })
+ stub_const('WorkItems::Type::WIDGETS_FOR_TYPE', { issue: [::WorkItems::Widgets::Description] })
+ end
+
+ it "returns no notes" do
+ expect(discussions_service.execute).to be_empty
+ end
+ end
end
describe 'fetching notes for merge requests' do
diff --git a/spec/services/issue_links/create_service_spec.rb b/spec/services/issue_links/create_service_spec.rb
index 9cb5980716a..88e8470658d 100644
--- a/spec/services/issue_links/create_service_spec.rb
+++ b/spec/services/issue_links/create_service_spec.rb
@@ -41,6 +41,14 @@ RSpec.describe IssueLinks::CreateService do
it_behaves_like 'an incident management tracked event', :incident_management_incident_relate do
let(:current_user) { user }
end
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { issue.namespace }
+ let(:category) { described_class.to_s }
+ let(:action) { 'incident_management_incident_relate' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ end
end
end
end
diff --git a/spec/services/issue_links/destroy_service_spec.rb b/spec/services/issue_links/destroy_service_spec.rb
index a478a2c1448..ecb53b5cd31 100644
--- a/spec/services/issue_links/destroy_service_spec.rb
+++ b/spec/services/issue_links/destroy_service_spec.rb
@@ -25,6 +25,14 @@ RSpec.describe IssueLinks::DestroyService do
it_behaves_like 'an incident management tracked event', :incident_management_incident_unrelate do
let(:current_user) { user }
end
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { issue_b.namespace }
+ let(:category) { described_class.to_s }
+ let(:action) { 'incident_management_incident_unrelate' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ end
end
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index ef92b6984d5..e6ad755f911 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -99,6 +99,14 @@ RSpec.describe Issues::CloseService do
it_behaves_like 'an incident management tracked event', :incident_management_incident_closed
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { issue.namespace }
+ let(:category) { described_class.to_s }
+ let(:action) { 'incident_management_incident_closed' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ end
+
it 'creates a new escalation resolved escalation status', :aggregate_failures do
expect { service.execute(issue) }.to change { IncidentManagement::IssuableEscalationStatus.where(issue: issue).count }.by(1)
@@ -346,31 +354,26 @@ RSpec.describe Issues::CloseService do
context 'when there is an associated Alert Management Alert' do
context 'when alert can be resolved' do
- let!(:alert) { create(:alert_management_alert, issue: issue, project: project) }
-
it 'resolves an alert and sends a system note' do
- expect_any_instance_of(SystemNoteService) do |notes_service|
- expect(notes_service).to receive(:change_alert_status).with(
- alert,
- current_user,
- " by closing issue #{issue.to_reference(project)}"
- )
- end
+ alert = create(:alert_management_alert, issue: issue, project: project)
+
+ expect(SystemNoteService).to receive(:change_alert_status)
+ .with(alert, User.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}")
close_issue
- expect(alert.reload.resolved?).to eq(true)
+ expect(alert.reload).to be_resolved
end
end
context 'when alert cannot be resolved' do
- let!(:alert) { create(:alert_management_alert, :with_validation_errors, issue: issue, project: project) }
-
before do
allow(Gitlab::AppLogger).to receive(:warn).and_call_original
end
it 'writes a warning into the log' do
+ alert = create(:alert_management_alert, :with_validation_errors, issue: issue, project: project)
+
close_issue
expect(Gitlab::AppLogger).to have_received(:warn).with(
@@ -383,6 +386,23 @@ RSpec.describe Issues::CloseService do
end
end
+ context 'when there are several associated Alert Management Alerts' do
+ context 'when alerts can be resolved' do
+ it 'resolves an alert and sends a system note', :aggregate_failures do
+ alerts = create_list(:alert_management_alert, 2, issue: issue, project: project)
+
+ alerts.each do |alert|
+ expect(SystemNoteService).to receive(:change_alert_status)
+ .with(alert, User.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}")
+ end
+
+ close_issue
+
+ expect(alerts.map(&:reload)).to all(be_resolved)
+ end
+ end
+ end
+
it 'deletes milestone issue counters cache' do
issue.update!(milestone: create(:milestone, project: project))
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 5ddf91e167e..7ab2046b6be 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -9,21 +9,22 @@ RSpec.describe Issues::CreateService do
let_it_be_with_reload(:project) { create(:project, :public, group: group) }
let_it_be(:user) { create(:user) }
+ let(:opts) { { title: 'title' } }
let(:spam_params) { double }
+ let(:service) { described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params) }
it_behaves_like 'rate limited service' do
let(:key) { :issues_create }
let(:key_scope) { %i[project current_user external_author] }
let(:application_limit_key) { :issues_create_limit }
let(:created_model) { Issue }
- let(:service) { described_class.new(project: project, current_user: user, params: { title: 'title' }, spam_params: double) }
end
describe '#execute' do
let_it_be(:assignee) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
- let(:result) { described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute }
+ let(:result) { service.execute }
let(:issue) { result[:issue] }
before do
@@ -54,6 +55,7 @@ RSpec.describe Issues::CreateService do
let(:opts) do
{ title: 'Awesome issue',
+ issue_type: :task,
description: 'please fix',
assignee_ids: [assignee.id],
label_ids: labels.map(&:id),
@@ -118,10 +120,26 @@ RSpec.describe Issues::CreateService do
expect(issue.labels).to match_array(labels)
expect(issue.milestone).to eq(milestone)
expect(issue.due_date).to eq(Date.tomorrow)
- expect(issue.work_item_type.base_type).to eq('issue')
+ expect(issue.work_item_type.base_type).to eq('task')
expect(issue.issue_customer_relations_contacts).to be_empty
end
+ context 'when the work item type is not allowed to create' do
+ before do
+ allow_next_instance_of(::Issues::BuildService) do |instance|
+ allow(instance).to receive(:create_issue_type_allowed?).twice.and_return(false)
+ end
+ end
+
+ it 'ignores the type and creates default issue' do
+ expect(result).to be_success
+ expect(issue).to be_persisted
+ expect(issue).to be_a(::Issue)
+ expect(issue.work_item_type.base_type).to eq('issue')
+ expect(issue.issue_type).to eq('issue')
+ end
+ end
+
it 'calls NewIssueWorker with correct arguments' do
expect(NewIssueWorker).to receive(:perform_async).with(Integer, user.id, 'Issue')
@@ -405,7 +423,8 @@ RSpec.describe Issues::CreateService do
iid: { current: kind_of(Integer), previous: nil },
project_id: { current: project.id, previous: nil },
title: { current: opts[:title], previous: nil },
- updated_at: { current: kind_of(Time), previous: nil }
+ updated_at: { current: kind_of(Time), previous: nil },
+ time_estimate: { current: 0, previous: nil }
},
object_attributes: include(
opts.merge(
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 655c5085fdc..324b2aa9fe2 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -508,4 +508,25 @@ RSpec.describe Issues::MoveService do
end
end
end
+
+ context 'copying email participants' do
+ let!(:participant1) { create(:issue_email_participant, email: 'user1@example.com', issue: old_issue) }
+ let!(:participant2) { create(:issue_email_participant, email: 'user2@example.com', issue: old_issue) }
+ let!(:participant3) { create(:issue_email_participant, email: 'other_project_customer@example.com') }
+
+ include_context 'user can move issue'
+
+ subject(:new_issue) do
+ move_service.execute(old_issue, new_project)
+ end
+
+ it 'copies moved issue email participants' do
+ new_issue
+
+ expect(participant1.reload.issue).to eq(old_issue)
+ expect(participant2.reload.issue).to eq(old_issue)
+ expect(new_issue.issue_email_participants.pluck(:email))
+ .to match_array([participant1.email, participant2.email])
+ end
+ end
end
diff --git a/spec/services/issues/reopen_service_spec.rb b/spec/services/issues/reopen_service_spec.rb
index 6013826f9b1..529b3ff266b 100644
--- a/spec/services/issues/reopen_service_spec.rb
+++ b/spec/services/issues/reopen_service_spec.rb
@@ -74,6 +74,14 @@ RSpec.describe Issues::ReopenService do
it_behaves_like 'an incident management tracked event', :incident_management_incident_reopened
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { issue.namespace }
+ let(:category) { described_class.to_s }
+ let(:action) { 'incident_management_incident_reopened' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ end
+
it 'creates a timeline event' do
expect(IncidentManagement::TimelineEvents::CreateService)
.to receive(:reopen_incident)
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index f1ee62fd589..70fc6ffc38f 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe Issues::UpdateService, :mailer do
description: 'Also please fix',
assignee_ids: [user2.id],
state_event: 'close',
- label_ids: [label.id],
+ label_ids: [label&.id],
due_date: Date.tomorrow,
discussion_locked: true,
severity: 'low',
@@ -189,6 +189,27 @@ RSpec.describe Issues::UpdateService, :mailer do
subject { update_issue(confidential: true) }
it_behaves_like 'an incident management tracked event', :incident_management_incident_change_confidential
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { issue.namespace }
+ let(:category) { described_class.to_s }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ let(:action) { 'incident_management_incident_change_confidential' }
+ let(:opts) do
+ {
+ title: 'New title',
+ description: 'Also please fix',
+ assignee_ids: [user2.id],
+ state_event: 'close',
+ due_date: Date.tomorrow,
+ discussion_locked: true,
+ severity: 'low',
+ milestone_id: milestone.id,
+ add_contacts: [contact.email]
+ }
+ end
+ end
end
end
@@ -673,6 +694,14 @@ RSpec.describe Issues::UpdateService, :mailer do
let(:current_user) { user }
it_behaves_like 'an incident management tracked event', :incident_management_incident_assigned
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { issue.namespace }
+ let(:category) { described_class.to_s }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ let(:action) { "incident_management_incident_assigned" }
+ end
end
end
diff --git a/spec/services/issues/zoom_link_service_spec.rb b/spec/services/issues/zoom_link_service_spec.rb
index d662d9fa978..ad1f91ab5e6 100644
--- a/spec/services/issues/zoom_link_service_spec.rb
+++ b/spec/services/issues/zoom_link_service_spec.rb
@@ -95,6 +95,14 @@ RSpec.describe Issues::ZoomLinkService do
let(:current_user) { user }
it_behaves_like 'an incident management tracked event', :incident_management_incident_zoom_meeting
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { issue.namespace }
+ let(:category) { described_class.to_s }
+ let(:action) { 'incident_management_incident_zoom_meeting' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ end
end
context 'with insufficient issue update permissions' do
diff --git a/spec/services/jira_connect/create_asymmetric_jwt_service_spec.rb b/spec/services/jira_connect/create_asymmetric_jwt_service_spec.rb
index f5359e5b643..bb96e327307 100644
--- a/spec/services/jira_connect/create_asymmetric_jwt_service_spec.rb
+++ b/spec/services/jira_connect/create_asymmetric_jwt_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe JiraConnect::CreateAsymmetricJwtService do
+RSpec.describe JiraConnect::CreateAsymmetricJwtService, feature_category: :integrations do
describe '#execute' do
let_it_be(:jira_connect_installation) { create(:jira_connect_installation) }
@@ -19,27 +19,40 @@ RSpec.describe JiraConnect::CreateAsymmetricJwtService do
let(:public_key_id) { Atlassian::Jwt.decode(jwt_token, nil, false, algorithm: 'RS256').last['kid'] }
let(:public_key_cdn) { 'https://gitlab.com/-/jira_connect/public_keys/' }
+ let(:event_url) { 'https://gitlab.test/-/jira_connect/events/installed' }
let(:jwt_verification_claims) do
{
aud: 'https://gitlab.test/-/jira_connect',
iss: jira_connect_installation.client_key,
- qsh: Atlassian::Jwt.create_query_string_hash('https://gitlab.test/-/jira_connect/events/installed', 'POST', 'https://gitlab.test/-/jira_connect')
+ qsh: Atlassian::Jwt.create_query_string_hash(event_url, 'POST', 'https://gitlab.test/-/jira_connect')
}
end
subject(:jwt_token) { service.execute }
+ shared_examples 'produces a valid JWT' do
+ it 'produces a valid JWT' do
+ public_key = OpenSSL::PKey.read(JiraConnect::PublicKey.find(public_key_id).key)
+ options = jwt_verification_claims.except(:qsh).merge({ verify_aud: true, verify_iss: true,
+ algorithm: 'RS256' })
+
+ decoded_token = Atlassian::Jwt.decode(jwt_token, public_key, true, options).first
+
+ expect(decoded_token).to eq(jwt_verification_claims.stringify_keys)
+ end
+ end
+
it 'stores the public key' do
expect { JiraConnect::PublicKey.find(public_key_id) }.not_to raise_error
end
- it 'is produces a valid JWT' do
- public_key = OpenSSL::PKey.read(JiraConnect::PublicKey.find(public_key_id).key)
- options = jwt_verification_claims.except(:qsh).merge({ verify_aud: true, verify_iss: true, algorithm: 'RS256' })
+ it_behaves_like 'produces a valid JWT'
- decoded_token = Atlassian::Jwt.decode(jwt_token, public_key, true, options).first
+ context 'with uninstalled event option' do
+ let(:service) { described_class.new(jira_connect_installation, event: :uninstalled) }
+ let(:event_url) { 'https://gitlab.test/-/jira_connect/events/uninstalled' }
- expect(decoded_token).to eq(jwt_verification_claims.stringify_keys)
+ it_behaves_like 'produces a valid JWT'
end
end
end
diff --git a/spec/services/jira_connect_installations/proxy_lifecycle_event_service_spec.rb b/spec/services/jira_connect_installations/proxy_lifecycle_event_service_spec.rb
new file mode 100644
index 00000000000..c621388a734
--- /dev/null
+++ b/spec/services/jira_connect_installations/proxy_lifecycle_event_service_spec.rb
@@ -0,0 +1,154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnectInstallations::ProxyLifecycleEventService, feature_category: :integrations do
+ describe '.execute' do
+ let(:installation) { create(:jira_connect_installation) }
+
+ it 'creates an instance and calls execute' do
+ expect_next_instance_of(described_class, installation, 'installed', 'https://test.gitlab.com') do |update_service|
+ expect(update_service).to receive(:execute)
+ end
+
+ described_class.execute(installation, 'installed', 'https://test.gitlab.com')
+ end
+ end
+
+ describe '.new' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: nil) }
+
+ let(:event) { :installed }
+
+ subject(:service) { described_class.new(installation, event, 'https://test.gitlab.com') }
+
+ it 'creates an internal duplicate of the installation and sets the instance_url' do
+ expect(service.instance_variable_get(:@installation).instance_url).to eq('https://test.gitlab.com')
+ end
+
+ context 'with unknown event' do
+ let(:event) { 'test' }
+
+ it 'raises an error' do
+ expect { service }.to raise_error(ArgumentError, 'Unknown event \'test\'')
+ end
+ end
+ end
+
+ describe '#execute' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'https://old_instance_url.example.com') }
+
+ let(:service) { described_class.new(installation, evnet_type, 'https://gitlab.example.com') }
+ let(:service_instance_installation) { service.instance_variable_get(:@installation) }
+
+ before do
+ allow_next_instance_of(JiraConnect::CreateAsymmetricJwtService) do |create_asymmetric_jwt_service|
+ allow(create_asymmetric_jwt_service).to receive(:execute).and_return('123456')
+ end
+
+ stub_request(:post, hook_url)
+ end
+
+ subject(:execute_service) { service.execute }
+
+ shared_examples 'sends the event hook' do
+ it 'returns a ServiceResponse' do
+ expect(execute_service).to be_kind_of(ServiceResponse)
+ expect(execute_service[:status]).to eq(:success)
+ end
+
+ it 'sends an installed event to the instance' do
+ execute_service
+
+ expect(WebMock).to have_requested(:post, hook_url).with(body: expected_request_body)
+ end
+
+ it 'creates the JWT token with the event and installation' do
+ expect_next_instance_of(
+ JiraConnect::CreateAsymmetricJwtService,
+ service_instance_installation,
+ event: evnet_type
+ ) do |create_asymmetric_jwt_service|
+ expect(create_asymmetric_jwt_service).to receive(:execute).and_return('123456')
+ end
+
+ expect(execute_service[:status]).to eq(:success)
+ end
+
+ context 'and the instance responds with an error' do
+ before do
+ stub_request(:post, hook_url).to_return(
+ status: 422,
+ body: 'Error message',
+ headers: {}
+ )
+ end
+
+ it 'returns an error ServiceResponse', :aggregate_failures do
+ expect(execute_service).to be_kind_of(ServiceResponse)
+ expect(execute_service[:status]).to eq(:error)
+ expect(execute_service[:message]).to eq( { type: :response_error, code: 422 } )
+ end
+
+ it 'logs the error response' do
+ expect(Gitlab::IntegrationsLogger).to receive(:info).with(
+ integration: 'JiraConnect',
+ message: 'Proxy lifecycle event received error response',
+ event_type: evnet_type,
+ status_code: 422,
+ body: 'Error message'
+ )
+
+ execute_service
+ end
+ end
+
+ context 'and the request raises an error' do
+ before do
+ allow(Gitlab::HTTP).to receive(:post).and_raise(Errno::ECONNREFUSED, 'error message')
+ end
+
+ it 'returns an error ServiceResponse', :aggregate_failures do
+ expect(execute_service).to be_kind_of(ServiceResponse)
+ expect(execute_service[:status]).to eq(:error)
+ expect(execute_service[:message]).to eq(
+ {
+ type: :network_error,
+ message: 'Connection refused - error message'
+ }
+ )
+ end
+ end
+ end
+
+ context 'when installed event' do
+ let(:evnet_type) { :installed }
+ let(:hook_url) { 'https://gitlab.example.com/-/jira_connect/events/installed' }
+ let(:expected_request_body) do
+ {
+ clientKey: installation.client_key,
+ sharedSecret: installation.shared_secret,
+ baseUrl: installation.base_url,
+ jwt: '123456',
+ eventType: 'installed'
+ }
+ end
+
+ it_behaves_like 'sends the event hook'
+ end
+
+ context 'when uninstalled event' do
+ let(:evnet_type) { :uninstalled }
+ let(:hook_url) { 'https://gitlab.example.com/-/jira_connect/events/uninstalled' }
+ let(:expected_request_body) do
+ {
+ clientKey: installation.client_key,
+ jwt: '123456',
+ eventType: 'uninstalled'
+ }
+ end
+
+ it_behaves_like 'sends the event hook'
+ end
+ end
+end
diff --git a/spec/services/jira_connect_installations/update_service_spec.rb b/spec/services/jira_connect_installations/update_service_spec.rb
new file mode 100644
index 00000000000..ec5bb5d6d6a
--- /dev/null
+++ b/spec/services/jira_connect_installations/update_service_spec.rb
@@ -0,0 +1,186 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnectInstallations::UpdateService, feature_category: :integrations do
+ describe '.execute' do
+ it 'creates an instance and calls execute' do
+ expect_next_instance_of(described_class, 'param1', 'param2') do |update_service|
+ expect(update_service).to receive(:execute)
+ end
+
+ described_class.execute('param1', 'param2')
+ end
+ end
+
+ describe '#execute' do
+ let_it_be_with_reload(:installation) { create(:jira_connect_installation) }
+ let(:update_params) { { client_key: 'new_client_key' } }
+
+ subject(:execute_service) { described_class.new(installation, update_params).execute }
+
+ it 'returns a ServiceResponse' do
+ expect(execute_service).to be_kind_of(ServiceResponse)
+ expect(execute_service[:status]).to eq(:success)
+ end
+
+ it 'updates the installation' do
+ expect { execute_service }.to change { installation.client_key }.to('new_client_key')
+ end
+
+ it 'returns a successful result' do
+ expect(execute_service.success?).to eq(true)
+ end
+
+ context 'and model validation fails' do
+ let(:update_params) { { instance_url: 'invalid' } }
+
+ it 'returns an error result' do
+ expect(execute_service.error?).to eq(true)
+ expect(execute_service.message).to eq(installation.errors)
+ end
+ end
+
+ context 'and the installation has an instance_url' do
+ let_it_be_with_reload(:installation) { create(:jira_connect_installation, instance_url: 'https://other_gitlab.example.com') }
+
+ it 'sends an installed event to the instance', :aggregate_failures do
+ expect_next_instance_of(JiraConnectInstallations::ProxyLifecycleEventService, installation, :installed,
+'https://other_gitlab.example.com') do |proxy_lifecycle_events_service|
+ expect(proxy_lifecycle_events_service).to receive(:execute).and_return(ServiceResponse.new(status: :success))
+ end
+
+ expect(JiraConnect::SendUninstalledHookWorker).not_to receive(:perform_async)
+
+ expect { execute_service }.not_to change { installation.instance_url }
+ end
+
+ context 'and instance_url gets updated' do
+ let(:update_params) { { instance_url: 'https://gitlab.example.com' } }
+
+ before do
+ stub_request(:post, 'https://other_gitlab.example.com/-/jira_connect/events/uninstalled')
+ end
+
+ it 'starts an async worker to send an uninstalled event to the previous instance' do
+ expect(JiraConnect::SendUninstalledHookWorker).to receive(:perform_async).with(installation.id, 'https://other_gitlab.example.com')
+
+ expect(JiraConnectInstallations::ProxyLifecycleEventService)
+ .to receive(:execute).with(installation, :installed, 'https://gitlab.example.com')
+ .and_return(ServiceResponse.new(status: :success))
+
+ execute_service
+
+ expect(installation.instance_url).to eq(update_params[:instance_url])
+ end
+
+ context 'and the new instance_url is empty' do
+ let(:update_params) { { instance_url: nil } }
+
+ it 'starts an async worker to send an uninstalled event to the previous instance' do
+ expect(JiraConnect::SendUninstalledHookWorker).to receive(:perform_async).with(installation.id, 'https://other_gitlab.example.com')
+
+ execute_service
+
+ expect(installation.instance_url).to eq(nil)
+ end
+
+ it 'does not send an installed event' do
+ expect(JiraConnectInstallations::ProxyLifecycleEventService).not_to receive(:new)
+
+ execute_service
+ end
+ end
+ end
+ end
+
+ context 'and instance_url is updated' do
+ let(:update_params) { { instance_url: 'https://gitlab.example.com' } }
+
+ it 'sends an installed event to the instance and updates instance_url' do
+ expect_next_instance_of(JiraConnectInstallations::ProxyLifecycleEventService, installation, :installed,
+'https://gitlab.example.com') do |proxy_lifecycle_events_service|
+ expect(proxy_lifecycle_events_service).to receive(:execute).and_return(ServiceResponse.new(status: :success))
+ end
+
+ expect(JiraConnect::SendUninstalledHookWorker).not_to receive(:perform_async)
+
+ execute_service
+
+ expect(installation.instance_url).to eq(update_params[:instance_url])
+ end
+
+ context 'and the instance installation cannot be created' do
+ before do
+ allow_next_instance_of(
+ JiraConnectInstallations::ProxyLifecycleEventService,
+ installation,
+ :installed,
+ 'https://gitlab.example.com'
+ ) do |proxy_lifecycle_events_service|
+ allow(proxy_lifecycle_events_service).to receive(:execute).and_return(
+ ServiceResponse.error(
+ message: {
+ type: :response_error,
+ code: '422'
+ }
+ )
+ )
+ end
+ end
+
+ it 'does not change instance_url' do
+ expect { execute_service }.not_to change { installation.instance_url }
+ end
+
+ it 'returns an error message' do
+ expect(execute_service[:status]).to eq(:error)
+ expect(execute_service[:message]).to eq(
+ {
+ instance_url: ["Could not be installed on the instance. Error response code 422"]
+ }
+ )
+ end
+
+ context 'and the installation had a previous instance_url' do
+ let(:installation) { build(:jira_connect_installation, instance_url: 'https://other_gitlab.example.com') }
+
+ it 'does not send the uninstalled hook to the previous instance_url' do
+ expect(JiraConnect::SendUninstalledHookWorker).not_to receive(:perform_async)
+
+ execute_service
+ end
+ end
+
+ context 'when failure because of a network error' do
+ before do
+ allow_next_instance_of(
+ JiraConnectInstallations::ProxyLifecycleEventService,
+ installation,
+ :installed,
+ 'https://gitlab.example.com'
+ ) do |proxy_lifecycle_events_service|
+ allow(proxy_lifecycle_events_service).to receive(:execute).and_return(
+ ServiceResponse.error(
+ message: {
+ type: :network_error,
+ message: 'Connection refused - error message'
+ }
+ )
+ )
+ end
+ end
+
+ it 'returns an error message' do
+ expect(execute_service[:status]).to eq(:error)
+ expect(execute_service[:message]).to eq(
+ {
+ instance_url: ["Could not be installed on the instance. Network error"]
+ }
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/markup/rendering_service_spec.rb b/spec/services/markup/rendering_service_spec.rb
index d54bc71f0a4..99ab87f2072 100644
--- a/spec/services/markup/rendering_service_spec.rb
+++ b/spec/services/markup/rendering_service_spec.rb
@@ -75,25 +75,6 @@ RSpec.describe Markup::RenderingService do
is_expected.to eq(expected_html)
end
-
- context 'when renderer returns an error' do
- before do
- allow(Banzai).to receive(:render).and_raise(StandardError, "An error")
- end
-
- it 'returns html (rendered by ActionView:TextHelper)' do
- is_expected.to eq('<p>Noël</p>')
- end
-
- it 'logs the error' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- instance_of(StandardError),
- project_id: context[:project].id, file_name: 'foo.md'
- )
-
- subject
- end
- end
end
context 'when file is asciidoc file' do
@@ -130,37 +111,5 @@ RSpec.describe Markup::RenderingService do
is_expected.to eq(expected_html)
end
end
-
- context 'when rendering takes too long' do
- let(:file_name) { 'foo.bar' }
-
- before do
- stub_const("Markup::RenderingService::RENDER_TIMEOUT", 0.1)
- allow(Gitlab::OtherMarkup).to receive(:render) do
- sleep(0.2)
- text
- end
- end
-
- it 'times out' do
- expect(Gitlab::RenderTimeout).to receive(:timeout).and_call_original
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- instance_of(Timeout::Error),
- project_id: context[:project].id, file_name: file_name
- )
-
- is_expected.to eq("<p>#{text}</p>")
- end
-
- context 'when markup_rendering_timeout is disabled' do
- it 'waits until the execution completes' do
- stub_feature_flags(markup_rendering_timeout: false)
-
- expect(Gitlab::RenderTimeout).not_to receive(:timeout)
-
- is_expected.to eq(text)
- end
- end
- end
end
end
diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb
index 2155b4ffad1..f477b2166d9 100644
--- a/spec/services/merge_requests/after_create_service_spec.rb
+++ b/spec/services/merge_requests/after_create_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe MergeRequests::AfterCreateService do
it 'calls the merge request activity counter' do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
.to receive(:track_create_mr_action)
- .with(user: merge_request.author)
+ .with(user: merge_request.author, merge_request: merge_request)
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
.to receive(:track_mr_including_ci_config)
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index da6492aca95..1d6427900b9 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -91,6 +91,10 @@ RSpec.describe MergeRequests::ApprovalService do
it_behaves_like 'triggers GraphQL subscription mergeRequestReviewersUpdated' do
let(:action) { service.execute(merge_request) }
end
+
+ it_behaves_like 'triggers GraphQL subscription mergeRequestApprovalStateUpdated' do
+ let(:action) { service.execute(merge_request) }
+ end
end
context 'user cannot update the merge request' do
@@ -109,6 +113,10 @@ RSpec.describe MergeRequests::ApprovalService do
it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
let(:action) { service.execute(merge_request) }
end
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestApprovalStateUpdated' do
+ let(:action) { service.execute(merge_request) }
+ end
end
end
end
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index 4f27ff30da7..79c779678a4 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -25,7 +25,9 @@ RSpec.describe MergeRequests::BuildService do
safe_message: 'Initial commit',
gitaly_commit?: false,
id: 'f00ba6',
- parent_ids: ['f00ba5'])
+ parent_ids: ['f00ba5'],
+ author_email: 'tom@example.com',
+ author_name: 'Tom Example')
end
let(:commit_2) do
@@ -34,7 +36,9 @@ RSpec.describe MergeRequests::BuildService do
safe_message: "Closes #1234 Second commit\n\nCreate the app",
gitaly_commit?: false,
id: 'f00ba7',
- parent_ids: ['f00ba6'])
+ parent_ids: ['f00ba6'],
+ author_email: 'alice@example.com',
+ author_name: 'Alice Example')
end
let(:commit_3) do
@@ -43,7 +47,9 @@ RSpec.describe MergeRequests::BuildService do
safe_message: 'This is a bad commit message!',
gitaly_commit?: false,
id: 'f00ba8',
- parent_ids: ['f00ba7'])
+ parent_ids: ['f00ba7'],
+ author_email: 'jo@example.com',
+ author_name: 'Jo Example')
end
let(:commits) { nil }
@@ -742,4 +748,91 @@ RSpec.describe MergeRequests::BuildService do
end
end
end
+
+ describe '#replace_variables_in_description' do
+ context 'when the merge request description is blank' do
+ let(:description) { nil }
+
+ it 'does not update the description' do
+ expect(merge_request.description).to eq(nil)
+ end
+ end
+
+ context 'when the merge request description contains template variables' do
+ let(:description) { <<~MSG.rstrip }
+ source_branch:%{source_branch}
+ target_branch:%{target_branch}
+ title:%{title}
+ issues:%{issues}
+ description:%{description}
+ first_commit:%{first_commit}
+ first_multiline_commit:%{first_multiline_commit}
+ url:%{url}
+ approved_by:%{approved_by}
+ merged_by:%{merged_by}
+ co_authored_by:%{co_authored_by}
+ all_commits:%{all_commits}
+ MSG
+
+ context 'when there are multiple commits in the diff' do
+ let(:commits) { Commit.decorate([commit_1, commit_2, commit_3], project) }
+
+ before do
+ stub_compare
+ end
+
+ it 'replaces the variables in the description' do
+ expect(merge_request.description).to eq <<~MSG.rstrip
+ source_branch:feature-branch
+ target_branch:master
+ title:
+ issues:
+ description:
+ first_commit:Initial commit
+ first_multiline_commit:Closes #1234 Second commit
+
+ Create the app
+ url:
+ approved_by:
+ merged_by:
+ co_authored_by:Co-authored-by: Jo Example <jo@example.com>
+ Co-authored-by: Alice Example <alice@example.com>
+ Co-authored-by: Tom Example <tom@example.com>
+ all_commits:* This is a bad commit message!
+
+ * Closes #1234 Second commit
+
+ Create the app
+
+ * Initial commit
+ MSG
+ end
+ end
+
+ context 'when there are no commits in the diff' do
+ let(:commits) { [] }
+
+ before do
+ stub_compare
+ end
+
+ it 'replaces the variables in the description' do
+ expect(merge_request.description).to eq <<~MSG.rstrip
+ source_branch:feature-branch
+ target_branch:master
+ title:
+ issues:
+ description:
+ first_commit:
+ first_multiline_commit:
+ url:
+ approved_by:
+ merged_by:
+ co_authored_by:
+ all_commits:
+ MSG
+ end
+ end
+ end
+ end
end
diff --git a/spec/services/merge_requests/create_pipeline_service_spec.rb b/spec/services/merge_requests/create_pipeline_service_spec.rb
index dc96b5c0e5e..7984fff3031 100644
--- a/spec/services/merge_requests/create_pipeline_service_spec.rb
+++ b/spec/services/merge_requests/create_pipeline_service_spec.rb
@@ -223,5 +223,26 @@ RSpec.describe MergeRequests::CreatePipelineService, :clean_gitlab_redis_cache d
expect(response.payload).to be_nil
end
end
+
+ context 'when merge request pipeline creates a dynamic environment' do
+ let(:config) do
+ {
+ review_app: {
+ script: 'echo',
+ only: ['merge_requests'],
+ environment: { name: "review/$CI_COMMIT_REF_NAME" }
+ }
+ }
+ end
+
+ it 'associates merge request with the environment' do
+ expect { response }.to change { Ci::Pipeline.count }.by(1)
+
+ environment = Environment.find_by_name('review/feature')
+ expect(response).to be_success
+ expect(environment).to be_present
+ expect(environment.merge_request).to eq(merge_request)
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/remove_approval_service_spec.rb b/spec/services/merge_requests/remove_approval_service_spec.rb
index 7b38f0d1c45..fd8240935e8 100644
--- a/spec/services/merge_requests/remove_approval_service_spec.rb
+++ b/spec/services/merge_requests/remove_approval_service_spec.rb
@@ -53,6 +53,10 @@ RSpec.describe MergeRequests::RemoveApprovalService do
it_behaves_like 'triggers GraphQL subscription mergeRequestReviewersUpdated' do
let(:action) { execute! }
end
+
+ it_behaves_like 'triggers GraphQL subscription mergeRequestApprovalStateUpdated' do
+ let(:action) { execute! }
+ end
end
context 'with a user who has not approved' do
@@ -77,6 +81,10 @@ RSpec.describe MergeRequests::RemoveApprovalService do
it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
let(:action) { execute! }
end
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestApprovalStateUpdated' do
+ let(:action) { execute! }
+ end
end
end
end
diff --git a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
index 8002b2ebc86..ff3b295d185 100644
--- a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
@@ -31,13 +31,17 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository do
end
describe '#create!' do
- subject { repository.create!(experiment, 1234) }
+ subject { repository.create!(experiment, 1234, [{ key: 'hello', value: 'world' }]) }
it 'creates the candidate' do
expect(subject.start_time).to eq(1234)
expect(subject.iid).not_to be_nil
expect(subject.end_time).to be_nil
end
+
+ it 'creates with tag' do
+ expect(subject.metadata.length).to eq(1)
+ end
end
describe '#update' do
@@ -118,6 +122,32 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository do
end
end
+ describe '#add_tag!' do
+ let(:props) { { name: 'abc', value: 'def' } }
+
+ subject { repository.add_tag!(candidate, props[:name], props[:value]) }
+
+ it 'adds a new tag' do
+ expect { subject }.to change { candidate.reload.metadata.size }.by(1)
+ end
+
+ context 'when name missing' do
+ let(:props) { { value: 1234 } }
+
+ it 'throws RecordInvalid' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
+ context 'when tag was already added' do
+ it 'throws RecordInvalid' do
+ repository.add_tag!(candidate, 'new', props[:value])
+
+ expect { repository.add_tag!(candidate, 'new', props[:value]) }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
+
describe "#add_params" do
let(:params) do
[{ key: 'model_class', value: 'LogisticRegression' }, { 'key': 'pythonEnv', value: '3.10' }]
@@ -196,4 +226,50 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository do
end
end
end
+
+ describe "#add_tags" do
+ let(:tags) do
+ [{ key: 'gitlab.tag1', value: 'hello' }, { 'key': 'gitlab.tag2', value: 'world' }]
+ end
+
+ subject { repository.add_tags(candidate, tags) }
+
+ it 'adds the tags' do
+ expect { subject }.to change { candidate.reload.metadata.size }.by(2)
+ end
+
+ context 'if tags misses key' do
+ let(:tags) { [{ value: 'hello' }] }
+
+ it 'does throw and does not add' do
+ expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
+ end
+ end
+
+ context 'if tag misses value' do
+ let(:tags) { [{ key: 'gitlab.tag1' }] }
+
+ it 'does throw and does not add' do
+ expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
+ end
+ end
+
+ context 'if tag repeated' do
+ let(:params) do
+ [
+ { 'key': 'gitlab.tag1', value: 'hello' },
+ { 'key': 'gitlab.tag2', value: 'world' },
+ { 'key': 'gitlab.tag1', value: 'gitlab' }
+ ]
+ end
+
+ before do
+ repository.add_tag!(candidate, 'gitlab.tag2', '0')
+ end
+
+ it 'does not throw and adds only the first of each kind' do
+ expect { subject }.to change { candidate.reload.metadata.size }.by(1)
+ end
+ end
+ end
end
diff --git a/spec/services/ml/experiment_tracking/experiment_repository_spec.rb b/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
index 80e1fa025d1..c3c716b831a 100644
--- a/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
@@ -59,10 +59,11 @@ RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository do
describe '#create!' do
let(:name) { 'hello' }
+ let(:tags) { nil }
- subject { repository.create!(name) }
+ subject { repository.create!(name, tags) }
- it 'creates the candidate' do
+ it 'creates the experiment' do
expect { subject }.to change { repository.all.size }.by(1)
end
@@ -74,6 +75,14 @@ RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository do
end
end
+ context 'when has tags' do
+ let(:tags) { [{ key: 'hello', value: 'world' }] }
+
+ it 'creates the experiment with tag' do
+ expect(subject.metadata.length).to eq(1)
+ end
+ end
+
context 'when name is missing' do
let(:name) { nil }
@@ -82,4 +91,30 @@ RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository do
end
end
end
+
+ describe '#add_tag!' do
+ let(:props) { { name: 'abc', value: 'def' } }
+
+ subject { repository.add_tag!(experiment, props[:name], props[:value]) }
+
+ it 'adds a new tag' do
+ expect { subject }.to change { experiment.reload.metadata.size }.by(1)
+ end
+
+ context 'when name missing' do
+ let(:props) { { value: 1234 } }
+
+ it 'throws RecordInvalid' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
+ context 'when tag was already added' do
+ it 'throws RecordInvalid' do
+ repository.add_tag!(experiment, 'new', props[:value])
+
+ expect { repository.add_tag!(experiment, 'new', props[:value]) }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 4922e72b7a4..2f1c5a5b0f3 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -102,6 +102,14 @@ RSpec.describe Notes::CreateService do
it_behaves_like 'an incident management tracked event', :incident_management_incident_comment do
let(:current_user) { user }
end
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { issue.namespace }
+ let(:category) { described_class.to_s }
+ let(:action) { 'incident_management_incident_comment' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ end
end
describe 'event tracking', :snowplow do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 7857bd2263f..1ca14cd430b 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -361,8 +361,14 @@ RSpec.describe NotificationService, :mailer do
subject(:notification_service) { notification.access_token_revoked(user, pat.name) }
- it 'sends email to the token owner' do
- expect { notification_service }.to have_enqueued_email(user, pat.name, mail: "access_token_revoked_email")
+ it 'sends email to the token owner without source' do
+ expect { notification_service }.to have_enqueued_email(user, pat.name, nil, mail: "access_token_revoked_email")
+ end
+
+ it 'sends email to the token owner with source' do
+ expect do
+ notification.access_token_revoked(user, pat.name, 'secret_detection')
+ end.to have_enqueued_email(user, pat.name, 'secret_detection', mail: "access_token_revoked_email")
end
context 'when user is not allowed to receive notifications' do
diff --git a/spec/services/packages/debian/process_changes_service_spec.rb b/spec/services/packages/debian/process_changes_service_spec.rb
index a45dd68cd6e..27b49a13d52 100644
--- a/spec/services/packages/debian/process_changes_service_spec.rb
+++ b/spec/services/packages/debian/process_changes_service_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe Packages::Debian::ProcessChangesService do
end
context 'marked as pending_destruction' do
- it 'creates a package' do
+ it 'does not re-use the existing package' do
existing_package.pending_destruction!
expect { subject.execute }
@@ -73,7 +73,7 @@ RSpec.describe Packages::Debian::ProcessChangesService do
end
end
- it 'remove the package file', :aggregate_failures do
+ it 're-raise error', :aggregate_failures do
expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
expect { subject.execute }
.to not_change { Packages::Package.count }
diff --git a/spec/services/packages/debian/process_package_file_service_spec.rb b/spec/services/packages/debian/process_package_file_service_spec.rb
new file mode 100644
index 00000000000..571861f42cf
--- /dev/null
+++ b/spec/services/packages/debian/process_package_file_service_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::ProcessPackageFileService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file, codename: 'unstable') }
+
+ let!(:incoming) { create(:debian_incoming, project: distribution.project) }
+
+ let(:distribution_name) { distribution.codename }
+ let(:debian_file_metadatum) { package_file.debian_file_metadatum }
+
+ subject { described_class.new(package_file, user, distribution_name, component_name) }
+
+ RSpec.shared_context 'with Debian package file' do |file_name|
+ let(:package_file) { incoming.package_files.with_file_name(file_name).first }
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:case_name, :expected_file_type, :file_name, :component_name) do
+ 'with a deb' | 'deb' | 'libsample0_1.2.3~alpha2_amd64.deb' | 'main'
+ 'with an udeb' | 'udeb' | 'sample-udeb_1.2.3~alpha2_amd64.udeb' | 'contrib'
+ end
+
+ with_them do
+ include_context 'with Debian package file', params[:file_name] do
+ it 'creates package and updates package file', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker)
+ .to receive(:perform_async).with(:project, distribution.id)
+ expect { subject.execute }
+ .to change(Packages::Package, :count).from(1).to(2)
+ .and not_change(Packages::PackageFile, :count)
+ .and change(incoming.package_files, :count).from(7).to(6)
+ .and change(debian_file_metadatum, :file_type).from('unknown').to(expected_file_type)
+ .and change(debian_file_metadatum, :component).from(nil).to(component_name)
+
+ created_package = Packages::Package.last
+ expect(created_package.name).to eq 'sample'
+ expect(created_package.version).to eq '1.2.3~alpha2'
+ expect(created_package.creator).to eq user
+ end
+
+ context 'with existing package' do
+ let_it_be_with_reload(:existing_package) do
+ create(:debian_package, name: 'sample', version: '1.2.3~alpha2', project: distribution.project)
+ end
+
+ before do
+ existing_package.update!(debian_distribution: distribution)
+ end
+
+ it 'does not create a package and assigns the package_file to the existing package' do
+ expect(::Packages::Debian::GenerateDistributionWorker)
+ .to receive(:perform_async).with(:project, distribution.id)
+ expect { subject.execute }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and change(incoming.package_files, :count).from(7).to(6)
+ .and change(package_file, :package).from(incoming).to(existing_package)
+ .and change(debian_file_metadatum, :file_type).from('unknown').to(expected_file_type.to_s)
+ .and change(debian_file_metadatum, :component).from(nil).to(component_name)
+ end
+
+ context 'when marked as pending_destruction' do
+ it 'does not re-use the existing package' do
+ existing_package.pending_destruction!
+
+ expect { subject.execute }
+ .to change(Packages::Package, :count).by(1)
+ .and not_change(Packages::PackageFile, :count)
+ end
+ end
+ end
+ end
+ end
+
+ context 'without a distribution' do
+ let(:package_file) { incoming.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
+ let(:component_name) { 'main' }
+
+ before do
+ distribution.destroy!
+ end
+
+ it 'raise ActiveRecord::RecordNotFound', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
+ expect { subject.execute }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and not_change(incoming.package_files, :count)
+ .and raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with package file without Debian metadata' do
+ let!(:package_file) { create(:debian_package_file, without_loaded_metadatum: true) }
+ let(:component_name) { 'main' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
+ expect { subject.execute }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and not_change(incoming.package_files, :count)
+ .and raise_error(ArgumentError, 'package file without Debian metadata')
+ end
+ end
+
+ context 'with already processed package file' do
+ let_it_be(:package_file) { create(:debian_package_file) }
+
+ let(:component_name) { 'main' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
+ expect { subject.execute }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and not_change(incoming.package_files, :count)
+ .and raise_error(ArgumentError, 'already processed package file')
+ end
+ end
+
+ context 'with invalid package file type' do
+ let(:package_file) { incoming.package_files.with_file_name('sample_1.2.3~alpha2.tar.xz').first }
+ let(:component_name) { 'main' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
+ expect { subject.execute }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and not_change(incoming.package_files, :count)
+ .and raise_error(ArgumentError, 'invalid package file type: source')
+ end
+ end
+
+ context 'when creating package fails' do
+ let(:package_file) { incoming.package_files.with_file_name('libsample0_1.2.3~alpha2_amd64.deb').first }
+ let(:component_name) { 'main' }
+
+ before do
+ allow_next_instance_of(::Packages::Debian::FindOrCreatePackageService) do |find_or_create_package_service|
+ allow(find_or_create_package_service)
+ .to receive(:execute).and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+ end
+ end
+
+ it 're-raise error', :aggregate_failures do
+ expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async)
+ expect { subject.execute }
+ .to not_change(Packages::Package, :count)
+ .and not_change(Packages::PackageFile, :count)
+ .and not_change(incoming.package_files, :count)
+ .and raise_error(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+ end
+ end
+ end
+end
diff --git a/spec/services/pages_domains/retry_acme_order_service_spec.rb b/spec/services/pages_domains/retry_acme_order_service_spec.rb
index 601de24e766..3152e05f2f1 100644
--- a/spec/services/pages_domains/retry_acme_order_service_spec.rb
+++ b/spec/services/pages_domains/retry_acme_order_service_spec.rb
@@ -2,21 +2,37 @@
require 'spec_helper'
-RSpec.describe PagesDomains::RetryAcmeOrderService do
- let(:domain) { create(:pages_domain, auto_ssl_enabled: true, auto_ssl_failed: true) }
+RSpec.describe PagesDomains::RetryAcmeOrderService, feature_category: :pages do
+ let_it_be(:project) { create(:project) }
+
+ let(:domain) { create(:pages_domain, project: project, auto_ssl_enabled: true, auto_ssl_failed: true) }
let(:service) { described_class.new(domain) }
it 'clears auto_ssl_failed' do
- expect do
- service.execute
- end.to change { domain.auto_ssl_failed }.from(true).to(false)
+ expect { service.execute }
+ .to change { domain.auto_ssl_failed }
+ .from(true).to(false)
+ .and publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace.id,
+ root_namespace_id: project.root_namespace.id,
+ domain: domain.domain
+ )
end
- it 'schedules renewal worker' do
+ it 'schedules renewal worker and publish PagesDomainUpdatedEvent event' do
expect(PagesDomainSslRenewalWorker).to receive(:perform_async).with(domain.id).and_return(nil).once
- service.execute
+ expect { service.execute }
+ .to publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace.id,
+ root_namespace_id: project.root_namespace.id,
+ domain: domain.domain
+ )
end
it "doesn't schedule renewal worker if Let's Encrypt integration is not enabled" do
@@ -24,7 +40,8 @@ RSpec.describe PagesDomains::RetryAcmeOrderService do
expect(PagesDomainSslRenewalWorker).not_to receive(:new)
- service.execute
+ expect { service.execute }
+ .to not_publish_event(PagesDomains::PagesDomainUpdatedEvent)
end
it "doesn't schedule renewal worker if auto ssl has not failed yet" do
@@ -32,6 +49,7 @@ RSpec.describe PagesDomains::RetryAcmeOrderService do
expect(PagesDomainSslRenewalWorker).not_to receive(:new)
- service.execute
+ expect { service.execute }
+ .to not_publish_event(PagesDomains::PagesDomainUpdatedEvent)
end
end
diff --git a/spec/services/personal_access_tokens/revoke_service_spec.rb b/spec/services/personal_access_tokens/revoke_service_spec.rb
index f16b6f00a0a..562d6017405 100644
--- a/spec/services/personal_access_tokens/revoke_service_spec.rb
+++ b/spec/services/personal_access_tokens/revoke_service_spec.rb
@@ -8,7 +8,12 @@ RSpec.describe PersonalAccessTokens::RevokeService do
it { expect(service.token.revoked?).to be true }
it 'logs the event' do
- expect(Gitlab::AppLogger).to receive(:info).with(/PAT REVOCATION: revoked_by: '#{current_user.username}', revoked_for: '#{token.user.username}', token_id: '\d+'/)
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ class: described_class.to_s,
+ message: 'PAT Revoked',
+ revoked_by: revoked_by,
+ revoked_for: token.user.username,
+ token_id: token.id)
subject
end
@@ -29,7 +34,9 @@ RSpec.describe PersonalAccessTokens::RevokeService do
let_it_be(:current_user) { create(:admin) }
let_it_be(:token) { create(:personal_access_token) }
- it_behaves_like 'a successfully revoked token'
+ it_behaves_like 'a successfully revoked token' do
+ let(:revoked_by) { current_user.username }
+ end
end
context 'when admin mode is disabled' do
@@ -52,7 +59,38 @@ RSpec.describe PersonalAccessTokens::RevokeService do
context 'token belongs to current_user' do
let_it_be(:token) { create(:personal_access_token, user: current_user) }
- it_behaves_like 'a successfully revoked token'
+ it_behaves_like 'a successfully revoked token' do
+ let(:revoked_by) { current_user.username }
+ end
+ end
+ end
+
+ context 'when source' do
+ let(:service) { described_class.new(nil, token: token, source: source) }
+
+ let_it_be(:current_user) { nil }
+
+ context 'when source is valid' do
+ let_it_be(:source) { 'secret_detection' }
+ let_it_be(:token) { create(:personal_access_token) }
+
+ it_behaves_like 'a successfully revoked token' do
+ let(:revoked_by) { 'secret_detection' }
+ end
+ end
+
+ context 'when source is invalid' do
+ let_it_be(:source) { 'external_request' }
+ let_it_be(:token) { create(:personal_access_token) }
+
+ it_behaves_like 'an unsuccessfully revoked token'
+ end
+
+ context 'when source is missing' do
+ let_it_be(:source) { nil }
+ let_it_be(:token) { create(:personal_access_token) }
+
+ it_behaves_like 'an unsuccessfully revoked token'
end
end
end
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index edf4bbe0f7f..72bb0adbf56 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -9,6 +9,16 @@ RSpec.describe Projects::AfterRenameService do
let!(:full_path_before_rename) { project.full_path }
let!(:path_after_rename) { "#{project.path}-renamed" }
let!(:full_path_after_rename) { "#{project.full_path}-renamed" }
+ let!(:repo_before_rename) { project.repository.raw }
+ let!(:wiki_repo_before_rename) { project.wiki.repository.raw }
+
+ let(:repo_after_rename) do
+ Gitlab::Git::Repository.new(project.repository_storage, "#{full_path_after_rename}.git", nil, nil)
+ end
+
+ let(:wiki_repo_after_rename) do
+ Gitlab::Git::Repository.new(project.repository_storage, "#{full_path_after_rename}.wiki.git", nil, nil)
+ end
describe '#execute' do
context 'using legacy storage' do
@@ -35,13 +45,15 @@ RSpec.describe Projects::AfterRenameService do
.to receive(:rename_project)
.with(path_before_rename, path_after_rename, project.namespace.full_path)
- expect_repository_exist("#{full_path_before_rename}.git")
- expect_repository_exist("#{full_path_before_rename}.wiki.git")
+ expect(repo_before_rename).to exist
+ expect(wiki_repo_before_rename).to exist
service_execute
- expect_repository_exist("#{full_path_after_rename}.git")
- expect_repository_exist("#{full_path_after_rename}.wiki.git")
+ expect(repo_before_rename).not_to exist
+ expect(wiki_repo_before_rename).not_to exist
+ expect(repo_after_rename).to exist
+ expect(wiki_repo_after_rename).to exist
end
context 'container registry with images' do
@@ -212,13 +224,4 @@ RSpec.describe Projects::AfterRenameService do
described_class.new(project, path_before: path_before_rename, full_path_before: full_path_before_rename).execute
end
-
- def expect_repository_exist(full_path_with_extension)
- expect(
- TestEnv.storage_dir_exists?(
- project.repository_storage,
- full_path_with_extension
- )
- ).to be_truthy
- end
end
diff --git a/spec/services/projects/container_repository/destroy_service_spec.rb b/spec/services/projects/container_repository/destroy_service_spec.rb
index 20e75d94e05..0ec0aecaa04 100644
--- a/spec/services/projects/container_repository/destroy_service_spec.rb
+++ b/spec/services/projects/container_repository/destroy_service_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Projects::ContainerRepository::DestroyService do
let!(:repository) { create(:container_repository, :root, project: project) }
it 'does not delete a repository' do
- expect { subject.execute(repository) }.not_to change { ContainerRepository.all.count }
+ expect { subject.execute(repository) }.not_to change { ContainerRepository.count }
end
end
@@ -29,23 +29,62 @@ RSpec.describe Projects::ContainerRepository::DestroyService do
let!(:repository) { create(:container_repository, :root, project: project) }
before do
- stub_container_registry_tags(repository: :any, tags: [])
+ stub_container_registry_tags(repository: :any, tags: %w[latest stable])
end
it 'deletes the repository' do
- expect(repository).to receive(:delete_tags!).and_call_original
- expect { described_class.new(project, user).execute(repository) }.to change { ContainerRepository.all.count }.by(-1)
+ expect_cleanup_tags_service_with(container_repository: repository, return_status: :success)
+ expect { subject.execute(repository) }.to change { ContainerRepository.count }.by(-1)
end
- context 'when destroy fails' do
- it 'set delete_status' do
+ it 'sends disable_timeout = true as part of the params as default' do
+ expect_cleanup_tags_service_with(container_repository: repository, return_status: :success, disable_timeout: true)
+ expect { subject.execute(repository) }.to change { ContainerRepository.count }.by(-1)
+ end
+
+ it 'sends disable_timeout = false as part of the params if it is set to false' do
+ expect_cleanup_tags_service_with(container_repository: repository, return_status: :success, disable_timeout: false)
+ expect { subject.execute(repository, disable_timeout: false) }.to change { ContainerRepository.count }.by(-1)
+ end
+
+ context 'when deleting the tags fails' do
+ it 'sets status as deleted_failed' do
+ expect_cleanup_tags_service_with(container_repository: repository, return_status: :error)
+ allow(Gitlab::AppLogger).to receive(:error).and_call_original
+
+ subject.execute(repository)
+
+ expect(repository).to be_delete_failed
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with("Container repository with ID: #{repository.id} and path: #{repository.path} failed with message: error in deleting tags")
+ end
+ end
+
+ context 'when destroying the repository fails' do
+ it 'sets status as deleted_failed' do
+ expect_cleanup_tags_service_with(container_repository: repository, return_status: :success)
allow(repository).to receive(:destroy).and_return(false)
+ allow(repository.errors).to receive(:full_messages).and_return(['Error 1', 'Error 2'])
+ allow(Gitlab::AppLogger).to receive(:error).and_call_original
subject.execute(repository)
expect(repository).to be_delete_failed
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with("Container repository with ID: #{repository.id} and path: #{repository.path} failed with message: Error 1. Error 2")
end
end
+
+ def expect_cleanup_tags_service_with(container_repository:, return_status:, disable_timeout: true)
+ delete_tags_service = instance_double(Projects::ContainerRepository::CleanupTagsService)
+
+ expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new).with(
+ container_repository: container_repository,
+ params: described_class::CLEANUP_TAGS_SERVICE_PARAMS.merge('disable_timeout' => disable_timeout)
+ ).and_return(delete_tags_service)
+
+ expect(delete_tags_service).to receive(:execute).and_return(status: return_status)
+ end
end
end
end
diff --git a/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
index 59827ea035e..b06a5709bd5 100644
--- a/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
@@ -49,6 +49,9 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService do
it_behaves_like 'when regex matching everything is specified',
delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]]
+ it_behaves_like 'when regex matching everything is specified and latest is not kept',
+ delete_expectations: [%w[latest A], %w[Ba Bb], %w[C D], %w[E]]
+
it_behaves_like 'when delete regex matching specific tags is used'
it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex'
@@ -97,6 +100,19 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService do
is_expected.to eq(response)
end
+
+ context 'when disable_timeout is set to true' do
+ let(:params) do
+ { 'name_regex_delete' => '.*', 'disable_timeout' => true }
+ end
+
+ it 'does not check if it timed out' do
+ expect(service).not_to receive(:timeout?)
+ end
+
+ it_behaves_like 'when regex matching everything is specified',
+ delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]]
+ end
end
end
diff --git a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
index 8d8907119f0..f03912dba80 100644
--- a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
@@ -24,6 +24,10 @@ RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService do
context 'with tags to delete' do
let(:timeout) { 10 }
+ before do
+ stub_application_setting(container_registry_delete_tags_service_timeout: timeout)
+ end
+
it_behaves_like 'deleting tags'
it 'succeeds when tag delete returns 404' do
@@ -48,10 +52,6 @@ RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService do
end
end
- before do
- stub_application_setting(container_registry_delete_tags_service_timeout: timeout)
- end
-
context 'with timeout' do
context 'set to a valid value' do
before do
diff --git a/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
index 2d034d577ac..7227834b131 100644
--- a/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
@@ -51,6 +51,16 @@ RSpec.describe Projects::ContainerRepository::ThirdParty::CleanupTagsService, :c
},
supports_caching: true
+ it_behaves_like 'when regex matching everything is specified and latest is not kept',
+ delete_expectations: [%w[A Ba Bb C D E latest]],
+ service_response_extra: {
+ before_truncate_size: 7,
+ after_truncate_size: 7,
+ before_delete_size: 7,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
it_behaves_like 'when delete regex matching specific tags is used',
service_response_extra: {
before_truncate_size: 2,
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 9c8aeb5cf7b..f42ab198a04 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -145,6 +145,20 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
end
+
+ context 'when the passed in namespace is for a bot user' do
+ let(:bot_user) { create(:user, :project_bot) }
+ let(:opts) do
+ { name: project_name, namespace_id: bot_user.namespace.id }
+ end
+
+ it 'raises an error' do
+ project = create_project(bot_user, opts)
+
+ expect(project.errors.errors.length).to eq 1
+ expect(project.errors.messages[:namespace].first).to eq(("is not valid"))
+ end
+ end
end
describe 'after create actions' do
@@ -908,27 +922,6 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
- it_behaves_like 'measurable service' do
- before do
- opts.merge!(
- current_user: user,
- path: 'foo'
- )
- end
-
- let(:base_log_data) do
- {
- class: Projects::CreateService.name,
- current_user: user.name,
- project_full_path: "#{user.namespace.full_path}/#{opts[:path]}"
- }
- end
-
- after do
- create_project(user, opts)
- end
- end
-
context 'with specialized project_authorization workers' do
let_it_be(:other_user) { create(:user) }
let_it_be(:group) { create(:group) }
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index f7f02769f6a..ff2de45661f 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publisher do
include ProjectForksHelper
+ include BatchDestroyDependentAssociationsHelper
let_it_be(:user) { create(:user) }
@@ -331,8 +332,8 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
context 'when image repository deletion succeeds' do
it 'removes tags' do
- expect_any_instance_of(ContainerRepository)
- .to receive(:delete_tags!).and_return(true)
+ expect_any_instance_of(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:execute).and_return({ status: :success })
destroy_project(project, user)
end
@@ -340,8 +341,8 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
context 'when image repository deletion fails' do
it 'raises an exception' do
- expect_any_instance_of(ContainerRepository)
- .to receive(:delete_tags!).and_raise(RuntimeError)
+ expect_any_instance_of(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:execute).and_raise(RuntimeError)
expect(destroy_project(project, user)).to be false
end
@@ -548,6 +549,30 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
end
end
+ context 'associations destoyed in batches' do
+ let!(:merge_request) { create(:merge_request, source_project: project) }
+ let!(:issue) { create(:issue, project: project) }
+ let!(:label) { create(:label, project: project) }
+
+ it 'destroys the associations marked as `dependent: :destroy`, in batches' do
+ query_recorder = ActiveRecord::QueryRecorder.new do
+ destroy_project(project, user, {})
+ end
+
+ expect(project.merge_requests).to be_empty
+ expect(project.issues).to be_empty
+ expect(project.labels).to be_empty
+
+ expected_queries = [
+ delete_in_batches_regexps(:merge_requests, :target_project_id, project, [merge_request]),
+ delete_in_batches_regexps(:issues, :project_id, project, [issue]),
+ delete_in_batches_regexps(:labels, :project_id, project, [label])
+ ].flatten
+
+ expect(query_recorder.log).to include(*expected_queries)
+ end
+ end
+
def destroy_project(project, user, params = {})
described_class.new(project, user, params).public_send(async ? :async_execute : :execute)
end
diff --git a/spec/services/projects/download_service_spec.rb b/spec/services/projects/download_service_spec.rb
index 7d4fce814f5..f158b11a9fa 100644
--- a/spec/services/projects/download_service_spec.rb
+++ b/spec/services/projects/download_service_spec.rb
@@ -21,8 +21,8 @@ RSpec.describe Projects::DownloadService do
context 'for URLs that are on the whitelist' do
before do
# `ssrf_filter` resolves the hostname. See https://github.com/carrierwaveuploader/carrierwave/commit/91714adda998bc9e8decf5b1f5d260d808761304
- stub_request(:get, %r{http://[\d\.]+/rails_sample.jpg}).to_return(body: File.read(Rails.root + 'spec/fixtures/rails_sample.jpg'))
- stub_request(:get, %r{http://[\d\.]+/doc_sample.txt}).to_return(body: File.read(Rails.root + 'spec/fixtures/doc_sample.txt'))
+ stub_request(:get, %r{http://[\d.]+/rails_sample.jpg}).to_return(body: File.read(Rails.root + 'spec/fixtures/rails_sample.jpg'))
+ stub_request(:get, %r{http://[\d.]+/doc_sample.txt}).to_return(body: File.read(Rails.root + 'spec/fixtures/doc_sample.txt'))
end
context 'an image file' do
diff --git a/spec/services/projects/import_export/export_service_spec.rb b/spec/services/projects/import_export/export_service_spec.rb
index 285687505e9..2c1ebe27014 100644
--- a/spec/services/projects/import_export/export_service_spec.rb
+++ b/spec/services/projects/import_export/export_service_spec.rb
@@ -216,24 +216,9 @@ RSpec.describe Projects::ImportExport::ExportService do
it 'fails' do
expected_message =
"User with ID: %s does not have required permissions for Project: %s with ID: %s" %
- [another_user.id, project.name, project.id]
+ [another_user.id, project.name, project.id]
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error).with_message(expected_message)
end
end
-
- it_behaves_like 'measurable service' do
- let(:base_log_data) do
- {
- class: described_class.name,
- current_user: user.name,
- project_full_path: project.full_path,
- file_path: shared.export_path
- }
- end
-
- after do
- service.execute(after_export_strategy)
- end
- end
end
end
diff --git a/spec/services/projects/import_export/parallel_export_service_spec.rb b/spec/services/projects/import_export/parallel_export_service_spec.rb
new file mode 100644
index 00000000000..b9f2867077c
--- /dev/null
+++ b/spec/services/projects/import_export/parallel_export_service_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ImportExport::ParallelExportService, feature_category: :importers do
+ let_it_be(:user) { create(:user) }
+
+ let(:export_job) { create(:project_export_job) }
+ let(:after_export_strategy) { Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy.new }
+ let(:project) { export_job.project }
+
+ before do
+ allow_next_instance_of(Gitlab::ImportExport::Project::ExportedRelationsMerger) do |saver|
+ allow(saver).to receive(:save).and_return(true)
+ end
+
+ allow_next_instance_of(Gitlab::ImportExport::VersionSaver) do |saver|
+ allow(saver).to receive(:save).and_return(true)
+ end
+ end
+
+ describe '#execute' do
+ subject(:service) { described_class.new(export_job, user, after_export_strategy) }
+
+ it 'creates a project export archive file' do
+ expect(Gitlab::ImportExport::Saver).to receive(:save)
+ .with(exportable: project, shared: project.import_export_shared)
+
+ service.execute
+ end
+
+ it 'logs export progress' do
+ allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
+
+ logger = service.instance_variable_get(:@logger)
+ messages = [
+ 'Parallel project export started',
+ 'Parallel project export - Gitlab::ImportExport::VersionSaver saver started',
+ 'Parallel project export - Gitlab::ImportExport::Project::ExportedRelationsMerger saver started',
+ 'Parallel project export finished successfully'
+ ]
+ messages.each do |message|
+ expect(logger).to receive(:info).ordered.with(hash_including(message: message))
+ end
+
+ service.execute
+ end
+
+ it 'executes after export stragegy on export success' do
+ allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
+
+ expect(after_export_strategy).to receive(:execute)
+
+ service.execute
+ end
+
+ it 'ensures files are cleaned up' do
+ shared = project.import_export_shared
+ FileUtils.mkdir_p(shared.archive_path)
+ FileUtils.mkdir_p(shared.export_path)
+
+ allow(Gitlab::ImportExport::Saver).to receive(:save).and_raise(StandardError)
+
+ expect { service.execute }.to raise_error(StandardError)
+
+ expect(File.exist?(shared.export_path)).to eq(false)
+ expect(File.exist?(shared.archive_path)).to eq(false)
+ end
+
+ context 'when export fails' do
+ it 'notifies the error to the user' do
+ allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(false)
+
+ allow(project.import_export_shared).to receive(:errors).and_return(['Error'])
+
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:project_not_exported).with(project, user, ['Error'])
+ end
+
+ service.execute
+ end
+ end
+
+ context 'when after export stragegy fails' do
+ it 'notifies the error to the user' do
+ allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
+ allow(after_export_strategy).to receive(:execute).and_return(false)
+ allow(project.import_export_shared).to receive(:errors).and_return(['Error'])
+
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:project_not_exported).with(project, user, ['Error'])
+ end
+
+ service.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index b3f8980a7bd..bb11b2e617e 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -419,25 +419,5 @@ RSpec.describe Projects::ImportService do
end
end
end
-
- it_behaves_like 'measurable service' do
- let(:base_log_data) do
- {
- class: described_class.name,
- current_user: user.name,
- project_full_path: project.full_path,
- import_type: project.import_type,
- file_path: project.import_source
- }
- end
-
- before do
- project.import_type = 'github'
- end
-
- after do
- subject.execute
- end
- end
end
end
diff --git a/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb
index d472d6493c3..80b3c4d0403 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
]
end
- subject { described_class.new(project, remote_uri: remote_uri) }
+ subject(:service) { described_class.new(project, remote_uri: remote_uri) }
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
@@ -48,19 +48,24 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
allow(Gitlab::HTTP).to receive(:post).and_return(response)
end
- describe '#execute' do
- let(:download_objects) { subject.execute(new_oids) }
-
+ describe '#each_link' do
it 'retrieves each download link of every non existent lfs object' do
- download_objects.each do |lfs_download_object|
- expect(lfs_download_object.link).to eq "#{import_url}/gitlab-lfs/objects/#{lfs_download_object.oid}"
- end
+ links = []
+ service.each_link(new_oids) { |lfs_download_object| links << lfs_download_object.link }
+
+ expect(links).to contain_exactly(
+ "#{import_url}/gitlab-lfs/objects/oid1",
+ "#{import_url}/gitlab-lfs/objects/oid2"
+ )
end
it 'stores headers' do
- download_objects.each do |lfs_download_object|
- expect(lfs_download_object.headers).to eq(headers)
+ expected_headers = []
+ service.each_link(new_oids) do |lfs_download_object|
+ expected_headers << lfs_download_object.headers
end
+
+ expect(expected_headers).to contain_exactly(headers, headers)
end
context 'when lfs objects size is larger than the batch size' do
@@ -97,10 +102,13 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
stub_successful_request([data[4]])
end
- it 'retreives them in batches' do
- subject.execute(new_oids).each do |lfs_download_object|
+ it 'retrieves them in batches' do
+ checksum = 0
+ service.each_link(new_oids) do |lfs_download_object|
expect(lfs_download_object.link).to eq "#{import_url}/gitlab-lfs/objects/#{lfs_download_object.oid}"
+ checksum += 1
end
+ expect(checksum).to eq new_oids.size
end
end
@@ -127,9 +135,12 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
an_instance_of(error_class), project_id: project.id, batch_size: 5, oids_count: 5
)
- subject.execute(new_oids).each do |lfs_download_object|
+ checksum = 0
+ service.each_link(new_oids) do |lfs_download_object|
expect(lfs_download_object.link).to eq "#{import_url}/gitlab-lfs/objects/#{lfs_download_object.oid}"
+ checksum += 1
end
+ expect(checksum).to eq new_oids.size
end
end
@@ -153,7 +164,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
an_instance_of(error_class), project_id: project.id, batch_size: 2, oids_count: 5
)
- expect { subject.execute(new_oids) }.to raise_error(described_class::DownloadLinksError)
+ expect { service.each_link(new_oids) }.to raise_error(described_class::DownloadLinksError)
end
end
end
@@ -165,21 +176,23 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
let(:import_url) { 'http://user:password@www.gitlab.com/demo/repo.git' }
it 'adds credentials to the download_link' do
- result = subject.execute(new_oids)
-
- result.each do |lfs_download_object|
+ checksum = 0
+ service.each_link(new_oids) do |lfs_download_object|
expect(lfs_download_object.link.starts_with?('http://user:password@')).to be_truthy
+ checksum += 1
end
+ expect(checksum).to eq new_oids.size
end
end
context 'when lfs_endpoint does not have any credentials' do
it 'does not add any credentials' do
- result = subject.execute(new_oids)
-
- result.each do |lfs_download_object|
+ checksum = 0
+ service.each_link(new_oids) do |lfs_download_object|
expect(lfs_download_object.link.starts_with?('http://user:password@')).to be_falsey
+ checksum += 1
end
+ expect(checksum).to eq new_oids.size
end
end
end
@@ -189,17 +202,18 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
let(:lfs_endpoint) { "#{import_url_with_credentials}/info/lfs/objects/batch" }
it 'downloads without any credentials' do
- result = subject.execute(new_oids)
-
- result.each do |lfs_download_object|
+ checksum = 0
+ service.each_link(new_oids) do |lfs_download_object|
expect(lfs_download_object.link.starts_with?('http://user:password@')).to be_falsey
+ checksum += 1
end
+ expect(checksum).to eq new_oids.size
end
end
end
end
- describe '#get_download_links' do
+ describe '#download_links_for' do
context 'if request fails' do
before do
request_timeout_net_response = Net::HTTPRequestTimeout.new('', '', '')
@@ -208,7 +222,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
end
it 'raises an error' do
- expect { subject.send(:get_download_links, new_oids) }.to raise_error(described_class::DownloadLinksError)
+ expect { subject.send(:download_links_for, new_oids) }.to raise_error(described_class::DownloadLinksError)
end
end
@@ -218,7 +232,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
allow(response).to receive(:body).and_return(body)
allow(Gitlab::HTTP).to receive(:post).and_return(response)
- expect { subject.send(:get_download_links, new_oids) }.to raise_error(described_class::DownloadLinksError)
+ expect { subject.send(:download_links_for, new_oids) }.to raise_error(described_class::DownloadLinksError)
end
end
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index 6c7164c5e06..c815ad38843 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -108,7 +108,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do
end
end
- context 'when file download fails' do
+ context 'when file downloading response code is not success' do
before do
allow(Gitlab::HTTP).to receive(:get).and_return(code: 500, 'success?' => false)
end
@@ -122,6 +122,20 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do
end
end
+ context 'when file downloading request timeout few times' do
+ before do
+ allow(Gitlab::HTTP).to receive(:get).and_raise(Net::OpenTimeout)
+ end
+
+ it_behaves_like 'no lfs object is created'
+
+ it 'retries to get LFS object 3 times before raising exception' do
+ subject.execute
+
+ expect(Gitlab::HTTP).to have_received(:get).exactly(3).times
+ end
+ end
+
context 'when file download returns a redirect' do
let(:redirect_link) { 'http://external-link' }
diff --git a/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
index b36b0b8d6b2..32b86ade81e 100644
--- a/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_import_service_spec.rb
@@ -5,7 +5,12 @@ RSpec.describe Projects::LfsPointers::LfsImportService do
let(:project) { create(:project) }
let(:user) { project.creator }
let(:import_url) { 'http://www.gitlab.com/demo/repo.git' }
- let(:oid_download_links) { { 'oid1' => "#{import_url}/gitlab-lfs/objects/oid1", 'oid2' => "#{import_url}/gitlab-lfs/objects/oid2" } }
+ let(:oid_download_links) do
+ [
+ { 'oid1' => "#{import_url}/gitlab-lfs/objects/oid1" },
+ { 'oid2' => "#{import_url}/gitlab-lfs/objects/oid2" }
+ ]
+ end
subject { described_class.new(project, user) }
@@ -17,7 +22,8 @@ RSpec.describe Projects::LfsPointers::LfsImportService do
it 'downloads lfs objects' do
service = double
expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
- expect(instance).to receive(:execute).and_return(oid_download_links)
+ expect(instance).to receive(:each_list_item)
+ .and_yield(oid_download_links[0]).and_yield(oid_download_links[1])
end
expect(Projects::LfsPointers::LfsDownloadService).to receive(:new).and_return(service).twice
expect(service).to receive(:execute).twice
@@ -30,7 +36,7 @@ RSpec.describe Projects::LfsPointers::LfsImportService do
context 'when no downloadable lfs object links' do
it 'does not call LfsDownloadService' do
expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
- expect(instance).to receive(:execute).and_return({})
+ expect(instance).to receive(:each_list_item)
end
expect(Projects::LfsPointers::LfsDownloadService).not_to receive(:new)
@@ -44,7 +50,7 @@ RSpec.describe Projects::LfsPointers::LfsImportService do
it 'returns error' do
error_message = "error message"
expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
- expect(instance).to receive(:execute).and_raise(StandardError, error_message)
+ expect(instance).to receive(:each_list_item).and_raise(StandardError, error_message)
end
result = subject.execute
diff --git a/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb
index adcc2b85706..59eb1ed7a29 100644
--- a/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb
@@ -9,7 +9,13 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
let!(:lfs_objects_project) { create_list(:lfs_objects_project, 2, project: project) }
let!(:existing_lfs_objects) { LfsObject.pluck(:oid, :size).to_h }
let(:oids) { { 'oid1' => 123, 'oid2' => 125 } }
- let(:oid_download_links) { { 'oid1' => "#{import_url}/gitlab-lfs/objects/oid1", 'oid2' => "#{import_url}/gitlab-lfs/objects/oid2" } }
+ let(:oid_download_links) do
+ [
+ { 'oid1' => "#{import_url}/gitlab-lfs/objects/oid1" },
+ { 'oid2' => "#{import_url}/gitlab-lfs/objects/oid2" }
+ ]
+ end
+
let(:all_oids) { existing_lfs_objects.merge(oids) }
let(:remote_uri) { URI.parse(lfs_endpoint) }
@@ -21,17 +27,24 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
allow_any_instance_of(Projects::LfsPointers::LfsListService).to receive(:execute).and_return(all_oids)
end
- describe '#execute' do
+ describe '#each_list_item' do
context 'when no lfs pointer is linked' do
before do
- allow_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).and_return(oid_download_links)
- expect(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:new).with(project, remote_uri: URI.parse(default_endpoint)).and_call_original
+ allow_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService)
+ .to receive(:each_link).with(oids)
+ .and_yield(oid_download_links[0])
+ .and_yield(oid_download_links[1])
end
it 'retrieves all lfs pointers in the project repository' do
+ expect(Projects::LfsPointers::LfsDownloadLinkListService)
+ .to receive(:new).with(project, remote_uri: URI.parse(default_endpoint))
+ .and_call_original
expect_any_instance_of(Projects::LfsPointers::LfsListService).to receive(:execute)
- subject.execute
+ checksum = 0
+ subject.each_list_item { |lfs_object| checksum += 1 }
+ expect(checksum).to eq 2
end
context 'when no LFS objects exist' do
@@ -40,17 +53,23 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
end
it 'retrieves all LFS objects' do
- expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with(all_oids)
+ expect(Projects::LfsPointers::LfsDownloadLinkListService)
+ .to receive(:new).with(project, remote_uri: URI.parse(default_endpoint)).and_call_original
+ expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService)
+ .to receive(:each_link).with(all_oids)
- subject.execute
+ subject.each_list_item {}
end
end
context 'when some LFS objects already exist' do
it 'retrieves the download links of non-existent objects' do
- expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:execute).with(oids)
+ expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService)
+ .to receive(:each_link).with(oids)
- subject.execute
+ checksum = 0
+ subject.each_list_item { |lfs_object| checksum += 1 }
+ expect(checksum).to eq 2
end
end
end
@@ -62,16 +81,15 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
context 'when url points to the same import url host' do
let(:lfs_endpoint) { "#{import_url}/different_endpoint" }
- let(:service) { double }
-
- before do
- allow(service).to receive(:execute)
- end
+ let(:service) { instance_double(Projects::LfsPointers::LfsDownloadLinkListService, each_link: nil) }
it 'downloads lfs object using the new endpoint' do
- expect(Projects::LfsPointers::LfsDownloadLinkListService).to receive(:new).with(project, remote_uri: remote_uri).and_return(service)
+ expect(Projects::LfsPointers::LfsDownloadLinkListService)
+ .to receive(:new)
+ .with(project, remote_uri: remote_uri)
+ .and_return(service)
- subject.execute
+ subject.each_list_item {}
end
context 'when import url has credentials' do
@@ -79,10 +97,14 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
it 'adds the credentials to the new endpoint' do
expect(Projects::LfsPointers::LfsDownloadLinkListService)
- .to receive(:new).with(project, remote_uri: URI.parse("http://user:password@www.gitlab.com/demo/repo.git/different_endpoint"))
+ .to receive(:new)
+ .with(
+ project,
+ remote_uri: URI.parse("http://user:password@www.gitlab.com/demo/repo.git/different_endpoint")
+ )
.and_return(service)
- subject.execute
+ subject.each_list_item {}
end
context 'when url has its own credentials' do
@@ -93,7 +115,7 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
.to receive(:new).with(project, remote_uri: remote_uri)
.and_return(service)
- subject.execute
+ subject.each_list_item {}
end
end
end
@@ -105,7 +127,7 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
it 'disables lfs from the project' do
expect(project.lfs_enabled?).to be_truthy
- subject.execute
+ subject.each_list_item {}
expect(project.lfs_enabled?).to be_falsey
end
@@ -113,7 +135,7 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
it 'does not download anything' do
expect_any_instance_of(Projects::LfsPointers::LfsListService).not_to receive(:execute)
- subject.execute
+ subject.each_list_item {}
end
end
end
diff --git a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
index 6a715312097..a3cff345f68 100644
--- a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
+++ b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
@@ -28,6 +28,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
end
let(:now) { Time.zone.now }
+ let(:statistics) { project.statistics }
around do |example|
freeze_time { example.run }
@@ -45,7 +46,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
end
it 'increments the counter attribute by the total size of the current batch of artifacts' do
- expect { service.execute }.to change { project.statistics.get_counter_value(:build_artifacts_size) }.to(3)
+ expect { service.execute }.to change { statistics.counter(:build_artifacts_size).get }.to(3)
end
it 'updates the last_job_artifact_id to the ID of the last artifact from the batch' do
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 8f505c31c5a..4d75786a4c3 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -263,7 +263,7 @@ RSpec.describe Projects::TransferService do
end
context 'when transfer fails' do
- let!(:original_path) { project_path(project) }
+ let!(:original_path) { project.repository.relative_path }
def attempt_project_transfer(&block)
expect do
@@ -277,21 +277,11 @@ RSpec.describe Projects::TransferService do
expect_any_instance_of(Labels::TransferService).to receive(:execute).and_raise(ActiveRecord::StatementInvalid, "PG ERROR")
end
- def project_path(project)
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- project.repository.path_to_repo
- end
- end
-
- def current_path
- project_path(project)
- end
-
it 'rolls back repo location' do
attempt_project_transfer
expect(project.repository.raw.exists?).to be(true)
- expect(original_path).to eq current_path
+ expect(original_path).to eq project.repository.relative_path
end
it 'rolls back project full path in gitaly' do
@@ -462,6 +452,22 @@ RSpec.describe Projects::TransferService do
end
end
+ context 'target namespace belongs to bot user', :enable_admin_mode do
+ let(:bot) { create(:user, :project_bot) }
+ let(:target) { bot.namespace }
+ let(:executor) { create(:user, :admin) }
+
+ it 'does not allow project transfer' do
+ namespace = project.namespace
+
+ transfer_result = execute_transfer
+
+ expect(transfer_result).to eq false
+ expect(project.namespace).to eq(namespace)
+ expect(project.errors[:new_namespace]).to include("You don't have permission to transfer projects into that namespace.")
+ end
+ end
+
context 'when user does not own the project' do
let(:project) { create(:project, :repository, :legacy_storage) }
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index a69db3b9970..d908a169898 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -320,10 +320,11 @@ RSpec.describe Projects::UpdatePagesService do
end
context 'when retrying the job' do
+ let(:stage) { create(:ci_stage, position: 1_000_000, name: 'deploy', pipeline: pipeline) }
let!(:older_deploy_job) do
create(:generic_commit_status, :failed, pipeline: pipeline,
ref: build.ref,
- stage: 'deploy',
+ ci_stage: stage,
name: 'pages:deploy')
end
@@ -337,13 +338,15 @@ RSpec.describe Projects::UpdatePagesService do
expect(execute).to eq(:success)
expect(older_deploy_job.reload).to be_retried
+ expect(deploy_status.ci_stage).to eq(stage)
+ expect(deploy_status.stage_idx).to eq(stage.position)
end
end
private
def deploy_status
- GenericCommitStatus.find_by(name: 'pages:deploy')
+ GenericCommitStatus.where(name: 'pages:deploy').last
end
def execute
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 7d8951bf111..3cda6bc2627 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -1,5 +1,4 @@
# frozen_string_literal: true
-
require 'spec_helper'
RSpec.describe Projects::UpdateService do
@@ -303,6 +302,25 @@ RSpec.describe Projects::UpdateService do
expect(project.default_branch).to eq 'master'
expect(project.previous_default_branch).to be_nil
end
+
+ context 'when repository has an ambiguous branch named "HEAD"' do
+ before do
+ allow(project.repository.raw).to receive(:write_ref).and_return(false)
+ allow(project.repository).to receive(:branch_names) { %w[fix master main HEAD] }
+ end
+
+ it 'returns an error to the user' do
+ result = update_project(project, admin, default_branch: 'fix')
+
+ expect(result).to include(status: :error)
+ expect(result[:message]).to include("Could not set the default branch. Do you have a branch named 'HEAD' in your repository?")
+
+ project.reload
+
+ expect(project.default_branch).to eq 'master'
+ expect(project.previous_default_branch).to be_nil
+ end
+ end
end
context 'when we update project but not enabling a wiki' do
@@ -422,25 +440,6 @@ RSpec.describe Projects::UpdateService do
expect(feature.feature_flags_access_level).not_to eq(ProjectFeature::DISABLED)
expect(feature.environments_access_level).not_to eq(ProjectFeature::DISABLED)
end
-
- context 'when split_operations_visibility_permissions feature is disabled' do
- before do
- stub_feature_flags(split_operations_visibility_permissions: false)
- end
-
- it 'syncs the changes to the related fields' do
- result = update_project(project, user, project_feature_attributes: feature_params)
-
- expect(result).to eq({ status: :success })
- feature = project.project_feature
-
- expect(feature.operations_access_level).to eq(ProjectFeature::DISABLED)
- expect(feature.monitor_access_level).to eq(ProjectFeature::DISABLED)
- expect(feature.infrastructure_access_level).to eq(ProjectFeature::DISABLED)
- expect(feature.feature_flags_access_level).to eq(ProjectFeature::DISABLED)
- expect(feature.environments_access_level).to eq(ProjectFeature::DISABLED)
- end
- end
end
context 'when updating a project that contains container images' do
diff --git a/spec/services/protected_branches/api_service_spec.rb b/spec/services/protected_branches/api_service_spec.rb
index 94484f5a7b9..c98e253267b 100644
--- a/spec/services/protected_branches/api_service_spec.rb
+++ b/spec/services/protected_branches/api_service_spec.rb
@@ -3,32 +3,55 @@
require 'spec_helper'
RSpec.describe ProtectedBranches::ApiService do
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user, maintainer_projects: [project]) }
-
- it 'creates a protected branch with prefilled defaults' do
- expect(::ProtectedBranches::CreateService).to receive(:new).with(
- project, user, hash_including(
- push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
- merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }]
- )
- ).and_call_original
-
- expect(described_class.new(project, user, { name: 'new name' }).create).to be_valid
+ shared_examples 'execute with entity' do
+ it 'creates a protected branch with prefilled defaults' do
+ expect(::ProtectedBranches::CreateService).to receive(:new).with(
+ entity, user, hash_including(
+ push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
+ merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }]
+ )
+ ).and_call_original
+
+ expect(described_class.new(entity, user, { name: 'new name' }).create).to be_valid
+ end
+
+ it 'updates a protected branch without prefilled defaults' do
+ expect(::ProtectedBranches::UpdateService).to receive(:new).with(
+ entity, user, hash_including(
+ push_access_levels_attributes: [],
+ merge_access_levels_attributes: []
+ )
+ ).and_call_original
+
+ expect do
+ expect(described_class.new(entity, user, { name: 'new name' }).update(protected_branch)).to be_valid
+ end.not_to change { protected_branch.reload.allow_force_push }
+ end
+ end
+
+ context 'with entity project' do
+ let_it_be_with_reload(:entity) { create(:project) }
+ let_it_be_with_reload(:protected_branch) { create(:protected_branch, project: entity, allow_force_push: true) }
+ let(:user) { entity.first_owner }
+
+ it_behaves_like 'execute with entity'
end
- it 'updates a protected branch without prefilled defaults' do
- protected_branch = create(:protected_branch, project: project, allow_force_push: true)
+ context 'with entity group' do
+ let_it_be_with_reload(:entity) { create(:group) }
+ let_it_be_with_reload(:user) { create(:user) }
+ let_it_be_with_reload(:protected_branch) do
+ create(:protected_branch, group: entity, project: nil, allow_force_push: true)
+ end
- expect(::ProtectedBranches::UpdateService).to receive(:new).with(
- project, user, hash_including(
- push_access_levels_attributes: [],
- merge_access_levels_attributes: []
- )
- ).and_call_original
+ before do
+ allow(Ability).to receive(:allowed?).with(user, :update_protected_branch, protected_branch).and_return(true)
+ allow(Ability)
+ .to receive(:allowed?)
+ .with(user, :create_protected_branch, instance_of(ProtectedBranch))
+ .and_return(true)
+ end
- expect do
- expect(described_class.new(project, user, { name: 'new name' }).update(protected_branch)).to be_valid
- end.not_to change { protected_branch.reload.allow_force_push }
+ it_behaves_like 'execute with entity'
end
end
diff --git a/spec/services/protected_branches/cache_service_spec.rb b/spec/services/protected_branches/cache_service_spec.rb
index d7a3258160b..ea434922661 100644
--- a/spec/services/protected_branches/cache_service_spec.rb
+++ b/spec/services/protected_branches/cache_service_spec.rb
@@ -4,122 +4,150 @@
require 'spec_helper'
RSpec.describe ProtectedBranches::CacheService, :clean_gitlab_redis_cache do
- subject(:service) { described_class.new(project, user) }
+ shared_examples 'execute with entity' do
+ subject(:service) { described_class.new(entity, user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { project.first_owner }
+ let(:immediate_expiration) { 0 }
- let(:immediate_expiration) { 0 }
-
- describe '#fetch' do
- it 'caches the value' do
- expect(service.fetch('main') { true }).to eq(true)
- expect(service.fetch('not-found') { false }).to eq(false)
-
- # Uses cached values
- expect(service.fetch('main') { false }).to eq(true)
- expect(service.fetch('not-found') { true }).to eq(false)
- end
-
- it 'sets expiry on the key' do
- stub_const("#{described_class.name}::CACHE_EXPIRE_IN", immediate_expiration)
-
- expect(service.fetch('main') { true }).to eq(true)
- expect(service.fetch('not-found') { false }).to eq(false)
-
- expect(service.fetch('main') { false }).to eq(false)
- expect(service.fetch('not-found') { true }).to eq(true)
- end
-
- it 'does not set an expiry on the key after the hash is already created' do
- expect(service.fetch('main') { true }).to eq(true)
+ describe '#fetch' do
+ it 'caches the value' do
+ expect(service.fetch('main') { true }).to eq(true)
+ expect(service.fetch('not-found') { false }).to eq(false)
- stub_const("#{described_class.name}::CACHE_EXPIRE_IN", immediate_expiration)
+ # Uses cached values
+ expect(service.fetch('main') { false }).to eq(true)
+ expect(service.fetch('not-found') { true }).to eq(false)
+ end
- expect(service.fetch('not-found') { false }).to eq(false)
+ it 'sets expiry on the key' do
+ stub_const("#{described_class.name}::CACHE_EXPIRE_IN", immediate_expiration)
- expect(service.fetch('main') { false }).to eq(true)
- expect(service.fetch('not-found') { true }).to eq(false)
- end
+ expect(service.fetch('main') { true }).to eq(true)
+ expect(service.fetch('not-found') { false }).to eq(false)
- context 'when CACHE_LIMIT is exceeded' do
- before do
- stub_const("#{described_class.name}::CACHE_LIMIT", 2)
+ expect(service.fetch('main') { false }).to eq(false)
+ expect(service.fetch('not-found') { true }).to eq(true)
end
- it 'recreates cache' do
+ it 'does not set an expiry on the key after the hash is already created' do
expect(service.fetch('main') { true }).to eq(true)
+
+ stub_const("#{described_class.name}::CACHE_EXPIRE_IN", immediate_expiration)
+
expect(service.fetch('not-found') { false }).to eq(false)
- # Uses cached values
expect(service.fetch('main') { false }).to eq(true)
expect(service.fetch('not-found') { true }).to eq(false)
+ end
- # Overflow
- expect(service.fetch('new-branch') { true }).to eq(true)
+ context 'when CACHE_LIMIT is exceeded' do
+ before do
+ stub_const("#{described_class.name}::CACHE_LIMIT", 2)
+ end
- # Refreshes values
- expect(service.fetch('main') { false }).to eq(false)
- expect(service.fetch('not-found') { true }).to eq(true)
- end
- end
+ it 'recreates cache' do
+ expect(service.fetch('main') { true }).to eq(true)
+ expect(service.fetch('not-found') { false }).to eq(false)
+
+ # Uses cached values
+ expect(service.fetch('main') { false }).to eq(true)
+ expect(service.fetch('not-found') { true }).to eq(false)
- context 'when dry_run is on' do
- it 'does not use cached value' do
- expect(service.fetch('main', dry_run: true) { true }).to eq(true)
- expect(service.fetch('main', dry_run: true) { false }).to eq(false)
+ # Overflow
+ expect(service.fetch('new-branch') { true }).to eq(true)
+
+ # Refreshes values
+ expect(service.fetch('main') { false }).to eq(false)
+ expect(service.fetch('not-found') { true }).to eq(true)
+ end
end
- context 'when cache mismatch' do
- it 'logs an error' do
+ context 'when dry_run is on' do
+ it 'does not use cached value' do
expect(service.fetch('main', dry_run: true) { true }).to eq(true)
+ expect(service.fetch('main', dry_run: true) { false }).to eq(false)
+ end
+
+ context 'when cache mismatch' do
+ it 'logs an error' do
+ expect(service.fetch('main', dry_run: true) { true }).to eq(true)
+
+ expect(Gitlab::AppLogger).to receive(:error).with(
+ {
+ 'class' => described_class.name,
+ 'message' => /Cache mismatch/,
+ 'record_class' => entity.class.name,
+ 'record_id' => entity.id,
+ 'record_path' => entity.full_path
+ }
+ )
+
+ expect(service.fetch('main', dry_run: true) { false }).to eq(false)
+ end
+ end
- expect(Gitlab::AppLogger).to receive(:error).with(
- {
- 'class' => described_class.name,
- 'message' => /Cache mismatch/,
- 'project_id' => project.id,
- 'project_path' => project.full_path
- }
- )
+ context 'when cache matches' do
+ it 'does not log an error' do
+ expect(service.fetch('main', dry_run: true) { true }).to eq(true)
- expect(service.fetch('main', dry_run: true) { false }).to eq(false)
+ expect(Gitlab::AppLogger).not_to receive(:error)
+
+ expect(service.fetch('main', dry_run: true) { true }).to eq(true)
+ end
end
end
+ end
- context 'when cache matches' do
- it 'does not log an error' do
- expect(service.fetch('main', dry_run: true) { true }).to eq(true)
+ describe '#refresh' do
+ it 'clears cached values' do
+ expect(service.fetch('main') { true }).to eq(true)
+ expect(service.fetch('not-found') { false }).to eq(false)
- expect(Gitlab::AppLogger).not_to receive(:error)
+ service.refresh
- expect(service.fetch('main', dry_run: true) { true }).to eq(true)
+ # Recreates cache
+ expect(service.fetch('main') { false }).to eq(false)
+ expect(service.fetch('not-found') { true }).to eq(true)
+ end
+ end
+
+ describe 'metrics' do
+ it 'records hit ratio metrics' do
+ expect_next_instance_of(Gitlab::Cache::Metrics) do |metrics|
+ expect(metrics).to receive(:increment_cache_miss).once
+ expect(metrics).to receive(:increment_cache_hit).exactly(4).times
end
+
+ 5.times { service.fetch('main') { true } }
end
end
end
- describe '#refresh' do
- it 'clears cached values' do
- expect(service.fetch('main') { true }).to eq(true)
- expect(service.fetch('not-found') { false }).to eq(false)
+ context 'with entity project' do
+ let_it_be_with_reload(:entity) { create(:project) }
+ let(:user) { entity.first_owner }
- service.refresh
+ it_behaves_like 'execute with entity'
+ end
+
+ context 'with entity group' do
+ let_it_be_with_reload(:entity) { create(:group) }
+ let_it_be_with_reload(:user) { create(:user) }
- # Recreates cache
- expect(service.fetch('main') { false }).to eq(false)
- expect(service.fetch('not-found') { true }).to eq(true)
+ before do
+ entity.add_owner(user)
end
- end
- describe 'metrics' do
- it 'records hit ratio metrics' do
- expect_next_instance_of(Gitlab::Cache::Metrics) do |metrics|
- expect(metrics).to receive(:increment_cache_miss).once
- expect(metrics).to receive(:increment_cache_hit).exactly(4).times
+ context 'when feature flag enabled' do
+ it_behaves_like 'execute with entity'
+ end
+
+ context 'when feature flag disabled' do
+ before do
+ stub_feature_flags(group_protected_branches: false)
end
- 5.times { service.fetch('main') { true } }
+ it_behaves_like 'execute with entity'
end
end
end
diff --git a/spec/services/protected_branches/create_service_spec.rb b/spec/services/protected_branches/create_service_spec.rb
index b42524e761c..9c8fe769ed8 100644
--- a/spec/services/protected_branches/create_service_spec.rb
+++ b/spec/services/protected_branches/create_service_spec.rb
@@ -3,70 +3,75 @@
require 'spec_helper'
RSpec.describe ProtectedBranches::CreateService do
- let_it_be_with_reload(:project) { create(:project) }
-
- let(:user) { project.first_owner }
- let(:params) do
- {
- name: name,
- merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
- push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }]
- }
- end
-
- subject(:service) { described_class.new(project, user, params) }
-
- describe '#execute' do
- let(:name) { 'master' }
-
- it 'creates a new protected branch' do
- expect { service.execute }.to change(ProtectedBranch, :count).by(1)
- expect(project.protected_branches.last.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
- expect(project.protected_branches.last.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
+ shared_examples 'execute with entity' do
+ let(:params) do
+ {
+ name: name,
+ merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
+ push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }]
+ }
end
- it 'refreshes the cache' do
- expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
- expect(cache_service).to receive(:refresh)
- end
+ subject(:service) { described_class.new(entity, user, params) }
- service.execute
- end
-
- context 'when protecting a branch with a name that contains HTML tags' do
- let(:name) { 'foo<b>bar<\b>' }
+ describe '#execute' do
+ let(:name) { 'master' }
it 'creates a new protected branch' do
expect { service.execute }.to change(ProtectedBranch, :count).by(1)
- expect(project.protected_branches.last.name).to eq(name)
+ expect(entity.protected_branches.last.push_access_levels.map(&:access_level)).to match_array([Gitlab::Access::MAINTAINER])
+ expect(entity.protected_branches.last.merge_access_levels.map(&:access_level)).to match_array([Gitlab::Access::MAINTAINER])
end
- end
- context 'when user does not have permission' do
- let(:user) { create(:user) }
+ it 'refreshes the cache' do
+ expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
+ expect(cache_service).to receive(:refresh)
+ end
- before do
- project.add_developer(user)
+ service.execute
end
- it 'creates a new protected branch if we skip authorization step' do
- expect { service.execute(skip_authorization: true) }.to change(ProtectedBranch, :count).by(1)
+ context 'when protecting a branch with a name that contains HTML tags' do
+ let(:name) { 'foo<b>bar<\b>' }
+
+ it 'creates a new protected branch' do
+ expect { service.execute }.to change(ProtectedBranch, :count).by(1)
+ expect(entity.protected_branches.last.name).to eq(name)
+ end
end
- it 'raises Gitlab::Access:AccessDeniedError' do
- expect { service.execute }.to raise_error(Gitlab::Access::AccessDeniedError)
+ context 'when a policy restricts rule creation' do
+ it "prevents creation of the protected branch rule" do
+ disallow(:create_protected_branch, an_instance_of(ProtectedBranch))
+
+ expect do
+ service.execute
+ end.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+
+ it 'creates a new protected branch if we skip authorization step' do
+ expect { service.execute(skip_authorization: true) }.to change(ProtectedBranch, :count).by(1)
+ end
end
end
+ end
- context 'when a policy restricts rule creation' do
- it "prevents creation of the protected branch rule" do
- disallow(:create_protected_branch, an_instance_of(ProtectedBranch))
+ context 'with entity project' do
+ let_it_be_with_reload(:entity) { create(:project) }
+ let(:user) { entity.first_owner }
- expect do
- service.execute
- end.to raise_error(Gitlab::Access::AccessDeniedError)
- end
+ it_behaves_like 'execute with entity'
+ end
+
+ context 'with entity group' do
+ let_it_be_with_reload(:entity) { create(:group) }
+ let_it_be_with_reload(:user) { create(:user) }
+
+ before do
+ allow(Ability).to receive(:allowed?).with(user, :create_protected_branch, instance_of(ProtectedBranch)).and_return(true)
end
+
+ it_behaves_like 'execute with entity'
end
def disallow(ability, protected_branch)
diff --git a/spec/services/protected_branches/destroy_service_spec.rb b/spec/services/protected_branches/destroy_service_spec.rb
index 123deeea005..421d4aae5bb 100644
--- a/spec/services/protected_branches/destroy_service_spec.rb
+++ b/spec/services/protected_branches/destroy_service_spec.rb
@@ -3,37 +3,54 @@
require 'spec_helper'
RSpec.describe ProtectedBranches::DestroyService do
- let_it_be_with_reload(:project) { create(:project) }
+ shared_examples 'execute with entity' do
+ subject(:service) { described_class.new(entity, user) }
- let!(:protected_branch) { create(:protected_branch, project: project) }
- let(:user) { project.first_owner }
+ describe '#execute' do
+ it 'destroys a protected branch' do
+ service.execute(protected_branch)
- subject(:service) { described_class.new(project, user) }
-
- describe '#execute' do
- it 'destroys a protected branch' do
- service.execute(protected_branch)
+ expect(protected_branch).to be_destroyed
+ end
- expect(protected_branch).to be_destroyed
- end
+ it 'refreshes the cache' do
+ expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
+ expect(cache_service).to receive(:refresh)
+ end
- it 'refreshes the cache' do
- expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
- expect(cache_service).to receive(:refresh)
+ service.execute(protected_branch)
end
- service.execute(protected_branch)
+ context 'when a policy restricts rule deletion' do
+ it "prevents deletion of the protected branch rule" do
+ disallow(:destroy_protected_branch, protected_branch)
+
+ expect do
+ service.execute(protected_branch)
+ end.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
end
+ end
- context 'when a policy restricts rule deletion' do
- it "prevents deletion of the protected branch rule" do
- disallow(:destroy_protected_branch, protected_branch)
+ context 'with entity project' do
+ let_it_be_with_reload(:entity) { create(:project) }
+ let!(:protected_branch) { create(:protected_branch, project: entity) }
+ let(:user) { entity.first_owner }
- expect do
- service.execute(protected_branch)
- end.to raise_error(Gitlab::Access::AccessDeniedError)
- end
+ it_behaves_like 'execute with entity'
+ end
+
+ context 'with entity group' do
+ let_it_be_with_reload(:entity) { create(:group) }
+ let_it_be_with_reload(:user) { create(:user) }
+ let!(:protected_branch) { create(:protected_branch, group: entity, project: nil) }
+
+ before do
+ allow(Ability).to receive(:allowed?).with(user, :destroy_protected_branch, protected_branch).and_return(true)
end
+
+ it_behaves_like 'execute with entity'
end
def disallow(ability, protected_branch)
diff --git a/spec/services/protected_branches/update_service_spec.rb b/spec/services/protected_branches/update_service_spec.rb
index 2ff6c3c489a..c70cc032a6a 100644
--- a/spec/services/protected_branches/update_service_spec.rb
+++ b/spec/services/protected_branches/update_service_spec.rb
@@ -3,54 +3,64 @@
require 'spec_helper'
RSpec.describe ProtectedBranches::UpdateService do
- let_it_be_with_reload(:project) { create(:project) }
+ shared_examples 'execute with entity' do
+ let(:params) { { name: new_name } }
- let!(:protected_branch) { create(:protected_branch, project: project) }
- let(:user) { project.first_owner }
- let(:params) { { name: new_name } }
+ subject(:service) { described_class.new(entity, user, params) }
- subject(:service) { described_class.new(project, user, params) }
+ describe '#execute' do
+ let(:new_name) { 'new protected branch name' }
+ let(:result) { service.execute(protected_branch) }
- describe '#execute' do
- let(:new_name) { 'new protected branch name' }
- let(:result) { service.execute(protected_branch) }
+ it 'updates a protected branch' do
+ expect(result.reload.name).to eq(params[:name])
+ end
- it 'updates a protected branch' do
- expect(result.reload.name).to eq(params[:name])
- end
+ it 'refreshes the cache' do
+ expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
+ expect(cache_service).to receive(:refresh)
+ end
- it 'refreshes the cache' do
- expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
- expect(cache_service).to receive(:refresh)
+ result
end
- result
- end
-
- context 'when updating name of a protected branch to one that contains HTML tags' do
- let(:new_name) { 'foo<b>bar<\b>' }
- let(:result) { service.execute(protected_branch) }
+ context 'when updating name of a protected branch to one that contains HTML tags' do
+ let(:new_name) { 'foo<b>bar<\b>' }
+ let(:result) { service.execute(protected_branch) }
- it 'updates a protected branch' do
- expect(result.reload.name).to eq(new_name)
+ it 'updates a protected branch' do
+ expect(result.reload.name).to eq(new_name)
+ end
end
- end
- context 'without admin_project permissions' do
- let(:user) { create(:user) }
+ context 'when a policy restricts rule update' do
+ it "prevents update of the protected branch rule" do
+ disallow(:update_protected_branch, protected_branch)
- it "raises error" do
- expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError)
+ expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
end
end
+ end
- context 'when a policy restricts rule update' do
- it "prevents update of the protected branch rule" do
- disallow(:update_protected_branch, protected_branch)
+ context 'with entity project' do
+ let_it_be_with_reload(:entity) { create(:project) }
+ let!(:protected_branch) { create(:protected_branch, project: entity) }
+ let(:user) { entity.first_owner }
- expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError)
- end
+ it_behaves_like 'execute with entity'
+ end
+
+ context 'with entity group' do
+ let_it_be_with_reload(:entity) { create(:group) }
+ let_it_be_with_reload(:user) { create(:user) }
+ let!(:protected_branch) { create(:protected_branch, group: entity, project: nil) }
+
+ before do
+ allow(Ability).to receive(:allowed?).with(user, :update_protected_branch, protected_branch).and_return(true)
end
+
+ it_behaves_like 'execute with entity'
end
def disallow(ability, protected_branch)
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index f9c16c84121..8eccb9e41bb 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -567,7 +567,13 @@ RSpec.describe QuickActions::InterpretService do
it 'returns the duplicate message' do
_, _, message = service.execute(content, issuable)
- expect(message).to eq("Marked this issue as a duplicate of #{issue_duplicate.to_reference(project)}.")
+ expect(message).to eq("Closed this issue. Marked as related to, and a duplicate of, #{issue_duplicate.to_reference(project)}.")
+ end
+
+ it 'includes duplicate reference' do
+ _, explanations = service.explain(content, issuable)
+
+ expect(explanations).to eq(["Closes this issue. Marks as related to, and a duplicate of, #{issue_duplicate.to_reference(project)}."])
end
end
@@ -2491,6 +2497,16 @@ RSpec.describe QuickActions::InterpretService do
expect(message).to eq('One or more contacts were successfully removed.')
end
end
+
+ context 'when using an alias' do
+ it 'returns the correct execution message' do
+ content = "/labels ~#{bug.title}"
+
+ _, _, message = service.execute(content, issue)
+
+ expect(message).to eq("Added ~\"Bug\" label.")
+ end
+ end
end
describe '#explain' do
diff --git a/spec/services/repositories/housekeeping_service_spec.rb b/spec/services/repositories/housekeeping_service_spec.rb
index fbd9affb33c..57245136fbe 100644
--- a/spec/services/repositories/housekeeping_service_spec.rb
+++ b/spec/services/repositories/housekeeping_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Repositories::HousekeepingService do
+RSpec.describe Repositories::HousekeepingService, feature_category: :source_code_management do
it_behaves_like 'housekeeps repository' do
let_it_be(:resource) { create(:project, :repository) }
end
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index 26def474b88..90e80a45515 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe SearchService do
+RSpec.describe SearchService, feature_category: :global_search do
let_it_be(:user) { create(:user) }
let_it_be(:accessible_group) { create(:group, :private) }
diff --git a/spec/services/security/merge_reports_service_spec.rb b/spec/services/security/merge_reports_service_spec.rb
index 8415ed8a22f..249f4da5f34 100644
--- a/spec/services/security/merge_reports_service_spec.rb
+++ b/spec/services/security/merge_reports_service_spec.rb
@@ -187,25 +187,25 @@ RSpec.describe Security::MergeReportsService, '#execute' do
it 'deduplicates (except cwe and wasc) and sorts the vulnerabilities by severity (desc) then by compare key' do
expect(merged_report.findings).to(
eq([
- finding_cwe_2,
- finding_wasc_2,
- finding_cwe_1,
- finding_id_2_loc_2,
- finding_id_2_loc_1,
- finding_wasc_1,
- finding_id_1
- ])
+ finding_cwe_2,
+ finding_wasc_2,
+ finding_cwe_1,
+ finding_id_2_loc_2,
+ finding_id_2_loc_1,
+ finding_wasc_1,
+ finding_id_1
+ ])
)
end
it 'deduplicates scanned resources' do
expect(merged_report.scanned_resources).to(
eq([
- scanned_resource,
- scanned_resource_1,
- scanned_resource_2,
- scanned_resource_3
- ])
+ scanned_resource,
+ scanned_resource_1,
+ scanned_resource_2,
+ scanned_resource_3
+ ])
)
end
diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index 5dbf5edb776..37231307156 100644
--- a/spec/services/service_ping/submit_service_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -340,7 +340,7 @@ RSpec.describe ServicePing::SubmitService do
end
end
- def stub_response(url: service_ping_payload_url, body:, status: 201)
+ def stub_response(body:, url: service_ping_payload_url, status: 201)
stub_full_request(url, method: :post)
.to_return(
headers: { 'Content-Type' => 'application/json' },
diff --git a/spec/services/timelogs/create_service_spec.rb b/spec/services/timelogs/create_service_spec.rb
index b5ed4a005c7..73860619bcc 100644
--- a/spec/services/timelogs/create_service_spec.rb
+++ b/spec/services/timelogs/create_service_spec.rb
@@ -2,16 +2,12 @@
require 'spec_helper'
-RSpec.describe Timelogs::CreateService do
+RSpec.describe Timelogs::CreateService, feature_category: :team_planning do
let_it_be(:author) { create(:user) }
let_it_be(:project) { create(:project, :public) }
- let_it_be(:time_spent) { 3600 }
- let_it_be(:spent_at) { "2022-07-08" }
- let_it_be(:summary) { "Test summary" }
let(:issuable) { nil }
let(:users_container) { project }
- let(:service) { described_class.new(issuable, time_spent, spent_at, summary, user) }
describe '#execute' do
subject { service.execute }
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 774a6ddcfb3..c4ed34a693e 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -209,6 +209,15 @@ RSpec.describe TodoService do
it_behaves_like 'an incident management tracked event', :incident_management_incident_todo do
let(:current_user) { john_doe }
end
+
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:namespace) { project.namespace }
+ let(:category) { described_class.to_s }
+ let(:action) { 'incident_management_incident_todo' }
+ let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
+ let(:user) { john_doe }
+ end
end
end
@@ -1250,6 +1259,96 @@ RSpec.describe TodoService do
end
end
+ describe '#create_member_access_request' do
+ context 'snowplow event tracking' do
+ it 'does not track snowplow event when todos are for access request for project', :snowplow do
+ user = create(:user)
+ project = create(:project)
+ requester = create(:project_member, project: project, user: assignee)
+ project.add_owner(user)
+
+ expect_no_snowplow_event
+
+ service.create_member_access_request(requester)
+ end
+ end
+
+ context 'when the group has more than 10 owners' do
+ it 'creates todos for 10 recently active group owners' do
+ group = create(:group, :public)
+
+ users = create_list(:user, 12, :with_sign_ins)
+ users.each do |user|
+ group.add_owner(user)
+ end
+ ten_most_recently_active_group_owners = users.sort_by(&:last_sign_in_at).last(10)
+ excluded_group_owners = users - ten_most_recently_active_group_owners
+
+ requester = create(:group_member, group: group, user: assignee)
+
+ service.create_member_access_request(requester)
+
+ ten_most_recently_active_group_owners.each do |owner|
+ expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 1
+ end
+
+ excluded_group_owners.each do |owner|
+ expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 0
+ end
+ end
+ end
+
+ context 'when total owners are less than 10' do
+ it 'creates todos for all group owners' do
+ group = create(:group, :public)
+
+ users = create_list(:user, 4, :with_sign_ins)
+ users.map do |user|
+ group.add_owner(user)
+ end
+
+ requester = create(:group_member, user: assignee, group: group)
+ requester.requested_at = Time.now.utc
+ requester.save!
+
+ service.create_member_access_request(requester)
+
+ users.each do |owner|
+ expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 1
+ end
+ end
+ end
+
+ context 'when multiple access requests are raised' do
+ it 'creates todos for 10 recently active group owners for multiple requests' do
+ group = create(:group, :public)
+
+ users = create_list(:user, 12, :with_sign_ins)
+ users.each do |user|
+ group.add_owner(user)
+ end
+ ten_most_recently_active_group_owners = users.sort_by(&:last_sign_in_at).last(10)
+ excluded_group_owners = users - ten_most_recently_active_group_owners
+
+ requester1 = create(:group_member, group: group, user: assignee)
+ requester2 = create(:group_member, group: group, user: non_member)
+
+ service.create_member_access_request(requester1)
+ service.create_member_access_request(requester2)
+
+ ten_most_recently_active_group_owners.each do |owner|
+ expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 1
+ expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: non_member).count).to eq 1
+ end
+
+ excluded_group_owners.each do |owner|
+ expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 0
+ expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: non_member).count).to eq 0
+ end
+ end
+ end
+ end
+
def should_create_todo(attributes = {})
attributes.reverse_merge!(
project: project,
diff --git a/spec/services/users/assigned_issues_count_service_spec.rb b/spec/services/users/assigned_issues_count_service_spec.rb
new file mode 100644
index 00000000000..afa6a0af3dd
--- /dev/null
+++ b/spec/services/users/assigned_issues_count_service_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::AssignedIssuesCountService, :use_clean_rails_memory_store_caching,
+ feature_category: :project_management do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:max_limit) { 10 }
+
+ let(:current_user) { user }
+
+ subject { described_class.new(current_user: current_user, max_limit: max_limit) }
+
+ it_behaves_like 'a counter caching service'
+
+ context 'when user has assigned open issues from archived and closed projects' do
+ before do
+ project = create(:project, :public)
+ archived_project = create(:project, :public, :archived)
+
+ create(:issue, project: project, author: user, assignees: [user])
+ create(:issue, :closed, project: project, author: user, assignees: [user])
+ create(:issue, project: archived_project, author: user, assignees: [user])
+ end
+
+ it 'count all assigned open issues excluding those from closed or archived projects' do
+ expect(subject.count).to eq(1)
+ end
+ end
+
+ context 'when the number of assigned open issues exceeds max_limit' do
+ let_it_be(:banned_user) { create(:user, :banned) }
+ let_it_be(:project) { create(:project).tap { |p| p.add_developer(user) } }
+
+ context 'when user is admin', :enable_admin_mode do
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:issues) { create_list(:issue, max_limit + 1, project: project, assignees: [admin]) }
+ let_it_be(:banned_issue) { create(:issue, project: project, assignees: [admin], author: banned_user) }
+
+ let(:current_user) { admin }
+
+ it 'returns the max_limit count' do
+ expect(subject.count).to eq max_limit
+ end
+ end
+
+ context 'when user is non-admin' do
+ let_it_be(:issues) { create_list(:issue, max_limit + 1, project: project, assignees: [user]) }
+ let_it_be(:closed_issue) { create(:issue, :closed, project: project, assignees: [user]) }
+ let_it_be(:banned_issue) { create(:issue, project: project, assignees: [user], author: banned_user) }
+
+ it 'returns the max_limit count' do
+ expect(subject.count).to eq max_limit
+ end
+ end
+ end
+end
diff --git a/spec/services/users/keys_count_service_spec.rb b/spec/services/users/keys_count_service_spec.rb
index aff267cce5e..607d2946b2c 100644
--- a/spec/services/users/keys_count_service_spec.rb
+++ b/spec/services/users/keys_count_service_spec.rb
@@ -17,6 +17,12 @@ RSpec.describe Users::KeysCountService, :use_clean_rails_memory_store_caching do
it 'returns the number of SSH keys as an Integer' do
expect(subject.count).to eq(1)
end
+
+ it 'does not count signing keys' do
+ create(:key, usage_type: :signing, user: user)
+
+ expect(subject.count).to eq(1)
+ end
end
describe '#uncached_count' do
diff --git a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
index 6082c7bd10e..827d6f652a4 100644
--- a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
+++ b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Users::MigrateRecordsToGhostUserService do
+ include BatchDestroyDependentAssociationsHelper
+
let!(:user) { create(:user) }
let(:service) { described_class.new(user, admin, execution_tracker) }
let(:execution_tracker) { instance_double(::Gitlab::Utils::ExecutionTracker, over_limit?: false) }
@@ -125,6 +127,12 @@ RSpec.describe Users::MigrateRecordsToGhostUserService do
let(:created_record) { create(:review, author: user) }
end
end
+
+ context 'for releases' do
+ include_examples 'migrating records to the ghost user', Release, [:author] do
+ let(:created_record) { create(:release, author: user) }
+ end
+ end
end
context 'on post-migrate cleanups' do
@@ -150,12 +158,6 @@ RSpec.describe Users::MigrateRecordsToGhostUserService do
def nullify_in_batches_regexp(table, column, user, batch_size: 100)
%r{^UPDATE "#{table}" SET "#{column}" = NULL WHERE "#{table}"."id" IN \(SELECT "#{table}"."id" FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id} LIMIT #{batch_size}\)}
end
-
- def delete_in_batches_regexps(table, column, user, items, batch_size: 1000)
- select_query = %r{^SELECT "#{table}".* FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id}.*ORDER BY "#{table}"."id" ASC LIMIT #{batch_size}}
-
- [select_query] + items.map { |item| %r{^DELETE FROM "#{table}" WHERE "#{table}"."id" = #{item.id}} }
- end
# rubocop:enable Layout/LineLength
it 'nullifies related associations in batches' do
diff --git a/spec/services/users/registrations_build_service_spec.rb b/spec/services/users/registrations_build_service_spec.rb
index bc3718dbdb2..fa53a4cc604 100644
--- a/spec/services/users/registrations_build_service_spec.rb
+++ b/spec/services/users/registrations_build_service_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Users::RegistrationsBuildService do
context 'when automatic user confirmation is not enabled' do
before do
- stub_application_setting(send_user_confirmation_email: true)
+ stub_application_setting_enum('email_confirmation_setting', 'hard')
end
context 'when skip_confirmation is true' do
@@ -44,7 +44,7 @@ RSpec.describe Users::RegistrationsBuildService do
context 'when automatic user confirmation is enabled' do
before do
- stub_application_setting(send_user_confirmation_email: false)
+ stub_application_setting_enum('email_confirmation_setting', 'off')
end
context 'when skip_confirmation is true' do
diff --git a/spec/services/web_hooks/log_execution_service_spec.rb b/spec/services/web_hooks/log_execution_service_spec.rb
index fd97d01fa9f..8a845f60ad2 100644
--- a/spec/services/web_hooks/log_execution_service_spec.rb
+++ b/spec/services/web_hooks/log_execution_service_spec.rb
@@ -42,14 +42,6 @@ RSpec.describe WebHooks::LogExecutionService do
service.execute
end
- it 'does not update the last failure when the feature flag is disabled' do
- stub_feature_flags(web_hooks_disable_failed: false)
-
- expect(project_hook).not_to receive(:update_last_failure)
-
- service.execute
- end
-
context 'obtaining an exclusive lease' do
let(:lease_key) { "web_hooks:update_hook_failure_state:#{project_hook.id}" }
@@ -136,19 +128,6 @@ RSpec.describe WebHooks::LogExecutionService do
expect { service.execute }.not_to change(project_hook, :recent_failures)
end
-
- context 'when the web_hooks_disable_failed FF is disabled' do
- before do
- # Hook will only be executed if the flag is disabled.
- stub_feature_flags(web_hooks_disable_failed: false)
- end
-
- it 'does not allow the failure count to overflow' do
- project_hook.update!(recent_failures: 32767)
-
- expect { service.execute }.not_to change(project_hook, :recent_failures)
- end
- end
end
context 'when response_category is :error' do
@@ -165,6 +144,24 @@ RSpec.describe WebHooks::LogExecutionService do
end
end
+ context 'with url_variables' do
+ before do
+ project_hook.update!(
+ url: 'http://example1.test/{foo}-{bar}',
+ url_variables: { 'foo' => 'supers3cret', 'bar' => 'token' }
+ )
+ end
+
+ let(:data) { super().merge(response_headers: { 'X-Token-Id' => 'supers3cret-token', 'X-Request' => 'PUBLIC-token' }) }
+ let(:expected_headers) { { 'X-Token-Id' => '{foo}-{bar}', 'X-Request' => 'PUBLIC-{bar}' } }
+
+ it 'logs the data and masks response headers' do
+ expect { service.execute }.to change(::WebHookLog, :count).by(1)
+
+ expect(WebHookLog.recent.first.response_headers).to eq(expected_headers)
+ end
+ end
+
context 'with X-Gitlab-Token' do
let(:request_headers) { { 'X-Gitlab-Token' => project_hook.token } }
diff --git a/spec/services/work_items/create_and_link_service_spec.rb b/spec/services/work_items/create_and_link_service_spec.rb
index e259a22d388..00372d460e1 100644
--- a/spec/services/work_items/create_and_link_service_spec.rb
+++ b/spec/services/work_items/create_and_link_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::CreateAndLinkService do
+RSpec.describe WorkItems::CreateAndLinkService, feature_category: :portfolio_management do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
@@ -94,7 +94,7 @@ RSpec.describe WorkItems::CreateAndLinkService do
end
it 'returns a link creation error message' do
- expect(service_result.errors).to contain_exactly(/only Issue and Incident can be parent of Task./)
+ expect(service_result.errors).to contain_exactly(/is not allowed to add this type of parent/)
end
end
end
diff --git a/spec/services/work_items/create_service_spec.rb b/spec/services/work_items/create_service_spec.rb
index 1bd7e15db67..a952486ee64 100644
--- a/spec/services/work_items/create_service_spec.rb
+++ b/spec/services/work_items/create_service_spec.rb
@@ -159,7 +159,7 @@ RSpec.describe WorkItems::CreateService do
{
title: 'Awesome work_item',
description: 'please fix',
- work_item_type: create(:work_item_type, :task)
+ work_item_type: WorkItems::Type.default_by_type(:task)
}
end
@@ -176,7 +176,7 @@ RSpec.describe WorkItems::CreateService do
let_it_be(:parent) { create(:work_item, :task, project: project) }
it_behaves_like 'fails creating work item and returns errors' do
- let(:error_message) { 'only Issue and Incident can be parent of Task.' }
+ let(:error_message) { 'is not allowed to add this type of parent' }
end
end
end
diff --git a/spec/services/work_items/parent_links/create_service_spec.rb b/spec/services/work_items/parent_links/create_service_spec.rb
index 0ba41373544..2f2e830845a 100644
--- a/spec/services/work_items/parent_links/create_service_spec.rb
+++ b/spec/services/work_items/parent_links/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::ParentLinks::CreateService do
+RSpec.describe WorkItems::ParentLinks::CreateService, feature_category: :portfolio_management do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:guest) { create(:user) }
@@ -117,7 +117,7 @@ RSpec.describe WorkItems::ParentLinks::CreateService do
end
it 'returns error status' do
- error = "#{issue.to_reference} cannot be added: only Task can be assigned as a child in hierarchy.. " \
+ error = "#{issue.to_reference} cannot be added: is not allowed to add this type of parent. " \
"#{other_project_task.to_reference} cannot be added: parent must be in the same project as child."
is_expected.to eq(service_error(error, http_status: 422))
@@ -139,7 +139,7 @@ RSpec.describe WorkItems::ParentLinks::CreateService do
let(:params) { { target_issuable: task1 } }
it 'returns error status' do
- error = "#{task1.to_reference} cannot be added: only Issue and Incident can be parent of Task."
+ error = "#{task1.to_reference} cannot be added: is not allowed to add this type of parent"
is_expected.to eq(service_error(error, http_status: 422))
end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index 68efb4c220b..87665bcad2c 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -284,7 +284,7 @@ RSpec.describe WorkItems::UpdateService do
it 'returns error status' do
expect(subject[:status]).to be(:error)
expect(subject[:message])
- .to match("#{child_work_item.to_reference} cannot be added: only Task can be assigned as a child in hierarchy.")
+ .to match("#{child_work_item.to_reference} cannot be added: is not allowed to add this type of parent")
end
it 'does not update work item attributes' do
diff --git a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
index 1b8c4c5f15f..5a5bb8a1674 100644
--- a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
+RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_category: :portfolio_management do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
@@ -81,7 +81,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
it_behaves_like 'raises a WidgetError' do
let(:message) do
- "#{child_issue.to_reference} cannot be added: only Task can be assigned as a child in hierarchy."
+ "#{child_issue.to_reference} cannot be added: is not allowed to add this type of parent"
end
end
end
@@ -136,7 +136,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
it_behaves_like 'raises a WidgetError' do
let(:message) do
- "#{work_item.to_reference} cannot be added: only Issue and Incident can be parent of Task."
+ "#{work_item.to_reference} cannot be added: is not allowed to add this type of parent"
end
end
end