summaryrefslogtreecommitdiff
path: root/spec/services
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services')
-rw-r--r--spec/services/alert_management/create_alert_issue_service_spec.rb27
-rw-r--r--spec/services/alert_management/process_prometheus_alert_service_spec.rb16
-rw-r--r--spec/services/boards/issues/list_service_spec.rb78
-rw-r--r--spec/services/boards/list_service_spec.rb24
-rw-r--r--spec/services/boards/lists/list_service_spec.rb22
-rw-r--r--spec/services/bulk_import_service_spec.rb17
-rw-r--r--spec/services/ci/create_pipeline_service/environment_spec.rb48
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb20
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb2
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb41
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb20
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_both.yml6
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_test.yml6
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_test_manual_review_deploy.yml22
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml23
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml9
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml13
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds_deploy_needs_both.yml13
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb53
-rw-r--r--spec/services/ci/register_job_service_spec.rb933
-rw-r--r--spec/services/ci/update_build_queue_service_spec.rb58
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb2
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb43
-rw-r--r--spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb46
-rw-r--r--spec/services/dependency_proxy/head_manifest_service_spec.rb9
-rw-r--r--spec/services/dependency_proxy/pull_manifest_service_spec.rb6
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb65
-rw-r--r--spec/services/groups/destroy_service_spec.rb6
-rw-r--r--spec/services/groups/group_links/create_service_spec.rb56
-rw-r--r--spec/services/groups/group_links/destroy_service_spec.rb4
-rw-r--r--spec/services/groups/group_links/update_service_spec.rb8
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb28
-rw-r--r--spec/services/import/github_service_spec.rb64
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb45
-rw-r--r--spec/services/issuable/process_assignees_spec.rb71
-rw-r--r--spec/services/issues/clone_service_spec.rb6
-rw-r--r--spec/services/issues/create_service_spec.rb6
-rw-r--r--spec/services/issues/move_service_spec.rb6
-rw-r--r--spec/services/labels/promote_service_spec.rb66
-rw-r--r--spec/services/members/invite_service_spec.rb173
-rw-r--r--spec/services/merge_requests/after_create_service_spec.rb26
-rw-r--r--spec/services/merge_requests/build_service_spec.rb123
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb13
-rw-r--r--spec/services/merge_requests/post_merge_service_spec.rb134
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb27
-rw-r--r--spec/services/merge_requests/retarget_chain_service_spec.rb154
-rw-r--r--spec/services/merge_requests/update_service_spec.rb179
-rw-r--r--spec/services/namespaces/in_product_marketing_emails_service_spec.rb39
-rw-r--r--spec/services/notes/build_service_spec.rb140
-rw-r--r--spec/services/notes/update_service_spec.rb34
-rw-r--r--spec/services/notification_service_spec.rb172
-rw-r--r--spec/services/onboarding_progress_service_spec.rb54
-rw-r--r--spec/services/packages/composer/create_package_service_spec.rb8
-rw-r--r--spec/services/packages/create_event_service_spec.rb24
-rw-r--r--spec/services/packages/create_temporary_package_service_spec.rb44
-rw-r--r--spec/services/packages/debian/find_or_create_incoming_service_spec.rb (renamed from spec/services/packages/debian/get_or_create_incoming_service_spec.rb)2
-rw-r--r--spec/services/packages/debian/find_or_create_package_service_spec.rb54
-rw-r--r--spec/services/packages/maven/metadata/append_package_file_service_spec.rb59
-rw-r--r--spec/services/packages/maven/metadata/create_versions_xml_service_spec.rb277
-rw-r--r--spec/services/packages/maven/metadata/sync_service_spec.rb154
-rw-r--r--spec/services/packages/npm/create_package_service_spec.rb51
-rw-r--r--spec/services/packages/nuget/create_package_service_spec.rb37
-rw-r--r--spec/services/packages/nuget/update_package_from_metadata_service_spec.rb5
-rw-r--r--spec/services/packages/rubygems/dependency_resolver_service_spec.rb100
-rw-r--r--spec/services/pages/legacy_storage_lease_spec.rb8
-rw-r--r--spec/services/projects/alerting/notify_service_spec.rb8
-rw-r--r--spec/services/projects/branches_by_mode_service_spec.rb4
-rw-r--r--spec/services/projects/create_service_spec.rb43
-rw-r--r--spec/services/projects/destroy_service_spec.rb503
-rw-r--r--spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb4
-rw-r--r--spec/services/projects/update_pages_configuration_service_spec.rb17
-rw-r--r--spec/services/projects/update_pages_service_spec.rb35
-rw-r--r--spec/services/projects/update_service_spec.rb2
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb94
-rw-r--r--spec/services/repositories/changelog_service_spec.rb118
-rw-r--r--spec/services/snippets/schedule_bulk_repository_shard_moves_service_spec.rb4
-rw-r--r--spec/services/system_hooks_service_spec.rb3
-rw-r--r--spec/services/system_note_service_spec.rb13
-rw-r--r--spec/services/system_notes/alert_management_service_spec.rb13
-rw-r--r--spec/services/system_notes/merge_requests_service_spec.rb2
-rw-r--r--spec/services/users/dismiss_user_callout_service_spec.rb27
-rw-r--r--spec/services/users/refresh_authorized_projects_service_spec.rb8
82 files changed, 3599 insertions, 1378 deletions
diff --git a/spec/services/alert_management/create_alert_issue_service_spec.rb b/spec/services/alert_management/create_alert_issue_service_spec.rb
index 2834322be7b..695e90ebd92 100644
--- a/spec/services/alert_management/create_alert_issue_service_spec.rb
+++ b/spec/services/alert_management/create_alert_issue_service_spec.rb
@@ -118,9 +118,36 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
context 'when the alert is generic' do
let(:alert) { generic_alert }
let(:issue) { subject.payload[:issue] }
+ let(:default_alert_title) { described_class::DEFAULT_ALERT_TITLE }
it_behaves_like 'creating an alert issue'
it_behaves_like 'setting an issue attributes'
+
+ context 'when alert title matches the default title exactly' do
+ before do
+ generic_alert.update!(title: default_alert_title)
+ end
+
+ it 'updates issue title with the IID' do
+ execute
+
+ expect(created_issue.title).to eq("New: Incident #{created_issue.iid}")
+ end
+ end
+
+ context 'when the alert title contains the default title' do
+ let(:non_default_alert_title) { "Not #{default_alert_title}" }
+
+ before do
+ generic_alert.update!(title: non_default_alert_title)
+ end
+
+ it 'does not change issue title' do
+ execute
+
+ expect(created_issue.title).to eq(non_default_alert_title)
+ end
+ end
end
context 'when issue cannot be created' do
diff --git a/spec/services/alert_management/process_prometheus_alert_service_spec.rb b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
index 288a33b71cd..9bd71ea6f64 100644
--- a/spec/services/alert_management/process_prometheus_alert_service_spec.rb
+++ b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
describe '#execute' do
let(:service) { described_class.new(project, payload) }
+ let(:source) { 'Prometheus' }
let(:auto_close_incident) { true }
let(:create_issue) { true }
let(:send_email) { true }
@@ -31,7 +32,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
subject(:execute) { service.execute }
context 'when alert payload is valid' do
- let(:parsed_payload) { Gitlab::AlertManagement::Payload.parse(project, payload, monitoring_tool: 'Prometheus') }
+ let(:parsed_payload) { Gitlab::AlertManagement::Payload.parse(project, payload, monitoring_tool: source) }
let(:fingerprint) { parsed_payload.gitlab_fingerprint }
let(:payload) do
{
@@ -112,9 +113,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
it_behaves_like 'Alert Notification Service sends notification email'
it_behaves_like 'processes incident issues'
- it 'creates a system note corresponding to alert creation' do
- expect { subject }.to change(Note, :count).by(1)
- end
+ it_behaves_like 'creates single system note based on the source of the alert'
context 'when auto-alert creation is disabled' do
let(:create_issue) { false }
@@ -158,17 +157,20 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
context 'when Prometheus alert status is resolved' do
let(:status) { 'resolved' }
- let!(:alert) { create(:alert_management_alert, project: project, fingerprint: fingerprint) }
+ let!(:alert) { create(:alert_management_alert, project: project, fingerprint: fingerprint, monitoring_tool: source) }
context 'when auto_resolve_incident set to true' do
context 'when status can be changed' do
it_behaves_like 'Alert Notification Service sends notification email'
it_behaves_like 'does not process incident issues'
- it 'resolves an existing alert' do
+ it 'resolves an existing alert without error' do
+ expect(Gitlab::AppLogger).not_to receive(:warn)
expect { execute }.to change { alert.reload.resolved? }.to(true)
end
+ it_behaves_like 'creates status-change system note for an auto-resolved alert'
+
context 'existing issue' do
let!(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint) }
@@ -215,6 +217,8 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
it 'does not resolve an existing alert' do
expect { execute }.not_to change { alert.reload.resolved? }
end
+
+ it_behaves_like 'creates single system note based on the source of the alert'
end
context 'when emails are disabled' do
diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb
index 29b49db42f9..2fd544ab949 100644
--- a/spec/services/boards/issues/list_service_spec.rb
+++ b/spec/services/boards/issues/list_service_spec.rb
@@ -4,40 +4,41 @@ require 'spec_helper'
RSpec.describe Boards::Issues::ListService do
describe '#execute' do
- context 'when parent is a project' do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:board) { create(:board, project: project) }
-
- let(:m1) { create(:milestone, project: project) }
- let(:m2) { create(:milestone, project: project) }
-
- let(:bug) { create(:label, project: project, name: 'Bug') }
- let(:development) { create(:label, project: project, name: 'Development') }
- let(:testing) { create(:label, project: project, name: 'Testing') }
- let(:p1) { create(:label, title: 'P1', project: project, priority: 1) }
- let(:p2) { create(:label, title: 'P2', project: project, priority: 2) }
- let(:p3) { create(:label, title: 'P3', project: project, priority: 3) }
-
- let!(:backlog) { create(:backlog_list, board: board) }
- let!(:list1) { create(:list, board: board, label: development, position: 0) }
- let!(:list2) { create(:list, board: board, label: testing, position: 1) }
- let!(:closed) { create(:closed_list, board: board) }
+ let_it_be(:user) { create(:user) }
- let!(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, title: 'Issue 1', labels: [bug]) }
- let!(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, title: 'Issue 2', labels: [p2]) }
- let!(:reopened_issue1) { create(:issue, :opened, project: project, title: 'Reopened Issue 1' ) }
-
- let!(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, development]) }
- let!(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) }
- let!(:list1_issue3) { create(:labeled_issue, project: project, milestone: m1, labels: [development, p1]) }
- let!(:list2_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [testing]) }
-
- let!(:closed_issue1) { create(:labeled_issue, :closed, project: project, labels: [bug], closed_at: 1.day.ago) }
- let!(:closed_issue2) { create(:labeled_issue, :closed, project: project, labels: [p3], closed_at: 2.days.ago) }
- let!(:closed_issue3) { create(:issue, :closed, project: project, closed_at: 1.week.ago) }
- let!(:closed_issue4) { create(:labeled_issue, :closed, project: project, labels: [p1], closed_at: 1.year.ago) }
- let!(:closed_issue5) { create(:labeled_issue, :closed, project: project, labels: [development], closed_at: 2.years.ago) }
+ context 'when parent is a project' do
+ let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be(:board) { create(:board, project: project) }
+
+ let_it_be(:m1) { create(:milestone, project: project) }
+ let_it_be(:m2) { create(:milestone, project: project) }
+
+ let_it_be(:bug) { create(:label, project: project, name: 'Bug') }
+ let_it_be(:development) { create(:label, project: project, name: 'Development') }
+ let_it_be(:testing) { create(:label, project: project, name: 'Testing') }
+ let_it_be(:p1) { create(:label, title: 'P1', project: project, priority: 1) }
+ let_it_be(:p2) { create(:label, title: 'P2', project: project, priority: 2) }
+ let_it_be(:p3) { create(:label, title: 'P3', project: project, priority: 3) }
+
+ let_it_be(:backlog) { create(:backlog_list, board: board) }
+ let_it_be(:list1) { create(:list, board: board, label: development, position: 0) }
+ let_it_be(:list2) { create(:list, board: board, label: testing, position: 1) }
+ let_it_be(:closed) { create(:closed_list, board: board) }
+
+ let_it_be(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, title: 'Issue 1', labels: [bug]) }
+ let_it_be(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, title: 'Issue 2', labels: [p2]) }
+ let_it_be(:reopened_issue1) { create(:issue, :opened, project: project, title: 'Reopened Issue 1' ) }
+
+ let_it_be(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, development]) }
+ let_it_be(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) }
+ let_it_be(:list1_issue3) { create(:labeled_issue, project: project, milestone: m1, labels: [development, p1]) }
+ let_it_be(:list2_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [testing]) }
+
+ let_it_be(:closed_issue1) { create(:labeled_issue, :closed, project: project, labels: [bug], closed_at: 1.day.ago) }
+ let_it_be(:closed_issue2) { create(:labeled_issue, :closed, project: project, labels: [p3], closed_at: 2.days.ago) }
+ let_it_be(:closed_issue3) { create(:issue, :closed, project: project, closed_at: 1.week.ago) }
+ let_it_be(:closed_issue4) { create(:labeled_issue, :closed, project: project, labels: [p1], closed_at: 1.year.ago) }
+ let_it_be(:closed_issue5) { create(:labeled_issue, :closed, project: project, labels: [development], closed_at: 2.years.ago) }
let(:parent) { project }
@@ -48,14 +49,16 @@ RSpec.describe Boards::Issues::ListService do
it_behaves_like 'issues list service'
context 'when project is archived' do
- let(:project) { create(:project, :archived) }
+ before do
+ project.update!(archived: true)
+ end
it_behaves_like 'issues list service'
end
end
+ # rubocop: disable RSpec/MultipleMemoizedHelpers
context 'when parent is a group' do
- let(:user) { create(:user) }
let(:project) { create(:project, :empty_repo, namespace: group) }
let(:project1) { create(:project, :empty_repo, namespace: group) }
let(:project_archived) { create(:project, :empty_repo, :archived, namespace: group) }
@@ -104,7 +107,7 @@ RSpec.describe Boards::Issues::ListService do
group.add_developer(user)
end
- context 'and group has no parent' do
+ context 'when the group has no parent' do
let(:parent) { group }
let(:group) { create(:group) }
let(:board) { create(:board, group: group) }
@@ -112,7 +115,7 @@ RSpec.describe Boards::Issues::ListService do
it_behaves_like 'issues list service'
end
- context 'and group is an ancestor' do
+ context 'when the group is an ancestor' do
let(:parent) { create(:group) }
let(:group) { create(:group, parent: parent) }
let!(:backlog) { create(:backlog_list, board: board) }
@@ -125,5 +128,6 @@ RSpec.describe Boards::Issues::ListService do
it_behaves_like 'issues list service'
end
end
+ # rubocop: enable RSpec/MultipleMemoizedHelpers
end
end
diff --git a/spec/services/boards/list_service_spec.rb b/spec/services/boards/list_service_spec.rb
deleted file mode 100644
index 7c94332a78d..00000000000
--- a/spec/services/boards/list_service_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Boards::ListService do
- describe '#execute' do
- context 'when board parent is a project' do
- let(:parent) { create(:project) }
-
- subject(:service) { described_class.new(parent, double) }
-
- it_behaves_like 'boards list service'
- it_behaves_like 'multiple boards list service'
- end
-
- context 'when board parent is a group' do
- let(:parent) { create(:group) }
-
- subject(:service) { described_class.new(parent, double) }
-
- it_behaves_like 'boards list service'
- end
- end
-end
diff --git a/spec/services/boards/lists/list_service_spec.rb b/spec/services/boards/lists/list_service_spec.rb
index dfe65f3d241..21619abf6aa 100644
--- a/spec/services/boards/lists/list_service_spec.rb
+++ b/spec/services/boards/lists/list_service_spec.rb
@@ -8,6 +8,26 @@ RSpec.describe Boards::Lists::ListService do
describe '#execute' do
let(:service) { described_class.new(parent, user) }
+ shared_examples 'hidden lists' do
+ let!(:list) { create(:list, board: board, label: label) }
+
+ context 'when hide_backlog_list is true' do
+ it 'hides backlog list' do
+ board.update!(hide_backlog_list: true)
+
+ expect(service.execute(board)).to match_array([board.closed_list, list])
+ end
+ end
+
+ context 'when hide_closed_list is true' do
+ it 'hides closed list' do
+ board.update!(hide_closed_list: true)
+
+ expect(service.execute(board)).to match_array([board.backlog_list, list])
+ end
+ end
+ end
+
context 'when board parent is a project' do
let(:project) { create(:project) }
let(:board) { create(:board, project: project) }
@@ -16,6 +36,7 @@ RSpec.describe Boards::Lists::ListService do
let(:parent) { project }
it_behaves_like 'lists list service'
+ it_behaves_like 'hidden lists'
end
context 'when board parent is a group' do
@@ -26,6 +47,7 @@ RSpec.describe Boards::Lists::ListService do
let(:parent) { group }
it_behaves_like 'lists list service'
+ it_behaves_like 'hidden lists'
end
end
end
diff --git a/spec/services/bulk_import_service_spec.rb b/spec/services/bulk_import_service_spec.rb
index e4a50b9d523..1b60a5cb0f8 100644
--- a/spec/services/bulk_import_service_spec.rb
+++ b/spec/services/bulk_import_service_spec.rb
@@ -48,5 +48,22 @@ RSpec.describe BulkImportService do
subject.execute
end
+
+ it 'returns success ServiceResponse' do
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_success
+ end
+
+ it 'returns ServiceResponse with error if validation fails' do
+ params[0][:source_full_path] = nil
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ expect(result.message).to eq("Validation failed: Source full path can't be blank")
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service/environment_spec.rb b/spec/services/ci/create_pipeline_service/environment_spec.rb
new file mode 100644
index 00000000000..0ed63012325
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/environment_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:developer) { create(:user) }
+ let(:service) { described_class.new(project, user, ref: 'master') }
+ let(:user) { developer }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ describe '#execute' do
+ subject { service.execute(:push) }
+
+ context 'with deployment tier' do
+ before do
+ config = YAML.dump(
+ deploy: {
+ script: 'ls',
+ environment: { name: "review/$CI_COMMIT_REF_NAME", deployment_tier: tier }
+ })
+
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ let(:tier) { 'development' }
+
+ it 'creates the environment with the expected tier' do
+ is_expected.to be_created_successfully
+
+ expect(Environment.find_by_name("review/master")).to be_development
+ end
+
+ context 'when tier is testing' do
+ let(:tier) { 'testing' }
+
+ it 'creates the environment with the expected tier' do
+ is_expected.to be_created_successfully
+
+ expect(Environment.find_by_name("review/master")).to be_testing
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index 04ecac6a85a..e97e74c1515 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -174,33 +174,19 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
it 'overrides VAR1' do
- variables = job.scoped_variables_hash
+ variables = job.scoped_variables.to_hash
expect(variables['VAR1']).to eq('overridden var 1')
expect(variables['VAR2']).to eq('my var 2')
expect(variables['VAR3']).to be_nil
end
-
- context 'when FF ci_rules_variables is disabled' do
- before do
- stub_feature_flags(ci_rules_variables: false)
- end
-
- it 'does not affect variables' do
- variables = job.scoped_variables_hash
-
- expect(variables['VAR1']).to eq('my var 1')
- expect(variables['VAR2']).to eq('my var 2')
- expect(variables['VAR3']).to be_nil
- end
- end
end
context 'when matching to the second rule' do
let(:ref) { 'refs/heads/feature' }
it 'overrides VAR2 and adds VAR3' do
- variables = job.scoped_variables_hash
+ variables = job.scoped_variables.to_hash
expect(variables['VAR1']).to eq('my var 1')
expect(variables['VAR2']).to eq('overridden var 2')
@@ -212,7 +198,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/wip' }
it 'does not affect vars' do
- variables = job.scoped_variables_hash
+ variables = job.scoped_variables.to_hash
expect(variables['VAR1']).to eq('my var 1')
expect(variables['VAR2']).to eq('my var 2')
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 1005985b3e4..9fafc57a770 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -101,7 +101,7 @@ RSpec.describe Ci::CreatePipelineService do
describe 'recording a conversion event' do
it 'schedules a record conversion event worker' do
- expect(Experiments::RecordConversionEventWorker).to receive(:perform_async).with(:ci_syntax_templates, user.id)
+ expect(Experiments::RecordConversionEventWorker).to receive(:perform_async).with(:ci_syntax_templates_b, user.id)
pipeline
end
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index 8df5d0bc159..3dbf2dbb8f1 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -13,10 +13,14 @@ RSpec.describe Ci::ExpirePipelineCacheService do
pipelines_path = "/#{project.full_path}/-/pipelines.json"
new_mr_pipelines_path = "/#{project.full_path}/-/merge_requests/new.json"
pipeline_path = "/#{project.full_path}/-/pipelines/#{pipeline.id}.json"
+ graphql_pipeline_path = "/api/graphql:pipelines/id/#{pipeline.id}"
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(pipelines_path)
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(new_mr_pipelines_path)
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(pipeline_path)
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
+ expect(store).to receive(:touch).with(pipelines_path)
+ expect(store).to receive(:touch).with(new_mr_pipelines_path)
+ expect(store).to receive(:touch).with(pipeline_path)
+ expect(store).to receive(:touch).with(graphql_pipeline_path)
+ end
subject.execute(pipeline)
end
@@ -59,5 +63,36 @@ RSpec.describe Ci::ExpirePipelineCacheService do
expect(Project.find(project_with_repo.id).pipeline_status.has_status?).to be_falsey
end
end
+
+ context 'when the pipeline is triggered by another pipeline' do
+ let(:source) { create(:ci_sources_pipeline, pipeline: pipeline) }
+
+ it 'updates the cache of dependent pipeline' do
+ dependent_pipeline_path = "/#{source.source_project.full_path}/-/pipelines/#{source.source_pipeline.id}.json"
+
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
+ allow(store).to receive(:touch)
+ expect(store).to receive(:touch).with(dependent_pipeline_path)
+ end
+
+ subject.execute(pipeline)
+ end
+ end
+
+ context 'when the pipeline triggered another pipeline' do
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+ let(:source) { create(:ci_sources_pipeline, source_job: build) }
+
+ it 'updates the cache of dependent pipeline' do
+ dependent_pipeline_path = "/#{source.project.full_path}/-/pipelines/#{source.pipeline.id}.json"
+
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
+ allow(store).to receive(:touch)
+ expect(store).to receive(:touch).with(dependent_pipeline_path)
+ end
+
+ subject.execute(pipeline)
+ end
+ end
end
end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
index bbd7422b435..215f33a42a3 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -843,12 +843,26 @@ RSpec.shared_examples 'Pipeline Processing Service' do
create(:ci_build_need, build: deploy, name: 'linux:build')
end
- it 'makes deploy DAG to be waiting for optional manual to finish' do
+ it 'makes deploy DAG to be skipped' do
expect(process_pipeline).to be_truthy
- expect(stages).to eq(%w(skipped created))
+ expect(stages).to eq(%w(skipped skipped))
expect(all_builds.manual).to contain_exactly(linux_build)
- expect(all_builds.created).to contain_exactly(deploy)
+ expect(all_builds.skipped).to contain_exactly(deploy)
+ end
+
+ context 'when FF ci_fix_pipeline_status_for_dag_needs_manual is disabled' do
+ before do
+ stub_feature_flags(ci_fix_pipeline_status_for_dag_needs_manual: false)
+ end
+
+ it 'makes deploy DAG to be waiting for optional manual to finish' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(skipped created))
+ expect(all_builds.manual).to contain_exactly(linux_build)
+ expect(all_builds.created).to contain_exactly(deploy)
+ end
end
end
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_both.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_both.yml
index 60f803bc3d0..96377b00c85 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_both.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_both.yml
@@ -30,12 +30,12 @@ transitions:
- event: success
jobs: [build]
expect:
- pipeline: running
+ pipeline: success
stages:
build: success
test: skipped
- deploy: created
+ deploy: skipped
jobs:
build: success
test: manual
- deploy: created
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_test.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_test.yml
index 4e4b2f22224..69640630ef4 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_test.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_test.yml
@@ -30,12 +30,12 @@ transitions:
- event: success
jobs: [build]
expect:
- pipeline: running
+ pipeline: success
stages:
build: success
test: skipped
- deploy: created
+ deploy: skipped
jobs:
build: success
test: manual
- deploy: created
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_test_manual_review_deploy.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_test_manual_review_deploy.yml
index fef28dcfbbe..8de484d6793 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_build_test_manual_review_deploy.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_test_manual_review_deploy.yml
@@ -54,29 +54,29 @@ transitions:
stages:
build: success
test: pending
- review: created
- deploy: created
+ review: skipped
+ deploy: skipped
jobs:
build: success
test: pending
release_test: manual
- review: created
- staging: created
- production: created
+ review: skipped
+ staging: skipped
+ production: skipped
- event: success
jobs: [test]
expect:
- pipeline: running
+ pipeline: success
stages:
build: success
test: success
- review: created
- deploy: created
+ review: skipped
+ deploy: skipped
jobs:
build: success
test: success
release_test: manual
- review: created
- staging: created
- production: created
+ review: skipped
+ staging: skipped
+ production: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml
index d8ca563b141..b8fcdd1566a 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml
@@ -12,13 +12,13 @@ config:
init:
expect:
- pipeline: created
+ pipeline: skipped
stages:
test: skipped
- deploy: created
+ deploy: skipped
jobs:
test: manual
- deploy: created
+ deploy: skipped
transitions:
- event: enqueue
@@ -27,10 +27,10 @@ transitions:
pipeline: pending
stages:
test: pending
- deploy: created
+ deploy: skipped
jobs:
test: pending
- deploy: created
+ deploy: skipped
- event: run
jobs: [test]
@@ -38,21 +38,18 @@ transitions:
pipeline: running
stages:
test: running
- deploy: created
+ deploy: skipped
jobs:
test: running
- deploy: created
+ deploy: skipped
- event: drop
jobs: [test]
expect:
- pipeline: running
+ pipeline: success
stages:
test: success
- deploy: pending
+ deploy: skipped
jobs:
test: failed
- deploy: pending
-
-# TOOD: should we run deploy?
-# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml
index ba0a20f49a7..a4a98bf4629 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml
@@ -13,15 +13,12 @@ config:
init:
expect:
- pipeline: created
+ pipeline: pending
stages:
test: skipped
- deploy: created
+ deploy: pending
jobs:
test: manual
- deploy: created
+ deploy: pending
transitions: []
-
-# TODO: should we run `deploy`?
-# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml
index d375c6a49e0..81aad4940b6 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml
@@ -13,13 +13,13 @@ config:
init:
expect:
- pipeline: created
+ pipeline: skipped
stages:
test: skipped
- deploy: created
+ deploy: skipped
jobs:
test: manual
- deploy: created
+ deploy: skipped
transitions:
- event: enqueue
@@ -28,10 +28,10 @@ transitions:
pipeline: pending
stages:
test: pending
- deploy: created
+ deploy: skipped
jobs:
test: pending
- deploy: created
+ deploy: skipped
- event: drop
jobs: [test]
@@ -43,6 +43,3 @@ transitions:
jobs:
test: failed
deploy: skipped
-
-# TODO: should we run `deploy`?
-# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds_deploy_needs_both.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds_deploy_needs_both.yml
index 34073b92ccc..a5bb103d1a5 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds_deploy_needs_both.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds_deploy_needs_both.yml
@@ -19,24 +19,21 @@ init:
pipeline: pending
stages:
test: pending
- deploy: created
+ deploy: skipped
jobs:
test1: pending
test2: manual
- deploy: created
+ deploy: skipped
transitions:
- event: success
jobs: [test1]
expect:
- pipeline: running
+ pipeline: success
stages:
test: success
- deploy: created
+ deploy: skipped
jobs:
test1: success
test2: manual
- deploy: created
-
-# TODO: should deploy run?
-# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
+ deploy: skipped
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index d316c9a262b..e02536fd07f 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -43,42 +43,59 @@ RSpec.describe Ci::ProcessPipelineService do
let!(:build) { create_build('build') }
let!(:test) { create_build('test') }
- it 'returns unique statuses' do
- subject.execute
+ context 'when FF ci_remove_update_retried_from_process_pipeline is enabled' do
+ it 'does not update older builds as retried' do
+ subject.execute
- expect(all_builds.latest).to contain_exactly(build, test)
- expect(all_builds.retried).to contain_exactly(build_retried)
+ expect(all_builds.latest).to contain_exactly(build, build_retried, test)
+ expect(all_builds.retried).to be_empty
+ end
end
- context 'counter ci_legacy_update_jobs_as_retried_total' do
- let(:counter) { double(increment: true) }
-
+ context 'when FF ci_remove_update_retried_from_process_pipeline is disabled' do
before do
- allow(Gitlab::Metrics).to receive(:counter).and_call_original
- allow(Gitlab::Metrics).to receive(:counter)
- .with(:ci_legacy_update_jobs_as_retried_total, anything)
- .and_return(counter)
+ stub_feature_flags(ci_remove_update_retried_from_process_pipeline: false)
end
- it 'increments the counter' do
- expect(counter).to receive(:increment)
-
+ it 'returns unique statuses' do
subject.execute
+
+ expect(all_builds.latest).to contain_exactly(build, test)
+ expect(all_builds.retried).to contain_exactly(build_retried)
end
- context 'when the previous build has already retried column true' do
+ context 'counter ci_legacy_update_jobs_as_retried_total' do
+ let(:counter) { double(increment: true) }
+
before do
- build_retried.update_columns(retried: true)
+ allow(Gitlab::Metrics).to receive(:counter).and_call_original
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(:ci_legacy_update_jobs_as_retried_total, anything)
+ .and_return(counter)
end
- it 'does not increment the counter' do
- expect(counter).not_to receive(:increment)
+ it 'increments the counter' do
+ expect(counter).to receive(:increment)
subject.execute
end
+
+ context 'when the previous build has already retried column true' do
+ before do
+ build_retried.update_columns(retried: true)
+ end
+
+ it 'does not increment the counter' do
+ expect(counter).not_to receive(:increment)
+
+ subject.execute
+ end
+ end
end
end
+ private
+
def create_build(name, **opts)
create(:ci_build, :created, pipeline: pipeline, name: name, **opts)
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 88770c8095b..9187dd4f300 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -13,573 +13,656 @@ module Ci
let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
describe '#execute' do
- context 'runner follow tag list' do
- it "picks build with the same tag" do
- pending_job.update!(tag_list: ["linux"])
- specific_runner.update!(tag_list: ["linux"])
- expect(execute(specific_runner)).to eq(pending_job)
- end
-
- it "does not pick build with different tag" do
- pending_job.update!(tag_list: ["linux"])
- specific_runner.update!(tag_list: ["win32"])
- expect(execute(specific_runner)).to be_falsey
- end
+ shared_examples 'handles runner assignment' do
+ context 'runner follow tag list' do
+ it "picks build with the same tag" do
+ pending_job.update!(tag_list: ["linux"])
+ specific_runner.update!(tag_list: ["linux"])
+ expect(execute(specific_runner)).to eq(pending_job)
+ end
- it "picks build without tag" do
- expect(execute(specific_runner)).to eq(pending_job)
- end
+ it "does not pick build with different tag" do
+ pending_job.update!(tag_list: ["linux"])
+ specific_runner.update!(tag_list: ["win32"])
+ expect(execute(specific_runner)).to be_falsey
+ end
- it "does not pick build with tag" do
- pending_job.update!(tag_list: ["linux"])
- expect(execute(specific_runner)).to be_falsey
- end
+ it "picks build without tag" do
+ expect(execute(specific_runner)).to eq(pending_job)
+ end
- it "pick build without tag" do
- specific_runner.update!(tag_list: ["win32"])
- expect(execute(specific_runner)).to eq(pending_job)
- end
- end
+ it "does not pick build with tag" do
+ pending_job.update!(tag_list: ["linux"])
+ expect(execute(specific_runner)).to be_falsey
+ end
- context 'deleted projects' do
- before do
- project.update!(pending_delete: true)
+ it "pick build without tag" do
+ specific_runner.update!(tag_list: ["win32"])
+ expect(execute(specific_runner)).to eq(pending_job)
+ end
end
- context 'for shared runners' do
+ context 'deleted projects' do
before do
- project.update!(shared_runners_enabled: true)
+ project.update!(pending_delete: true)
end
- it 'does not pick a build' do
- expect(execute(shared_runner)).to be_nil
+ context 'for shared runners' do
+ before do
+ project.update!(shared_runners_enabled: true)
+ end
+
+ it 'does not pick a build' do
+ expect(execute(shared_runner)).to be_nil
+ end
end
- end
- context 'for specific runner' do
- it 'does not pick a build' do
- expect(execute(specific_runner)).to be_nil
+ context 'for specific runner' do
+ it 'does not pick a build' do
+ expect(execute(specific_runner)).to be_nil
+ end
end
end
- end
- context 'allow shared runners' do
- before do
- project.update!(shared_runners_enabled: true)
- end
+ context 'allow shared runners' do
+ before do
+ project.update!(shared_runners_enabled: true)
+ end
+
+ context 'for multiple builds' do
+ let!(:project2) { create :project, shared_runners_enabled: true }
+ let!(:pipeline2) { create :ci_pipeline, project: project2 }
+ let!(:project3) { create :project, shared_runners_enabled: true }
+ let!(:pipeline3) { create :ci_pipeline, project: project3 }
+ let!(:build1_project1) { pending_job }
+ let!(:build2_project1) { FactoryBot.create :ci_build, pipeline: pipeline }
+ let!(:build3_project1) { FactoryBot.create :ci_build, pipeline: pipeline }
+ let!(:build1_project2) { FactoryBot.create :ci_build, pipeline: pipeline2 }
+ let!(:build2_project2) { FactoryBot.create :ci_build, pipeline: pipeline2 }
+ let!(:build1_project3) { FactoryBot.create :ci_build, pipeline: pipeline3 }
+
+ it 'prefers projects without builds first' do
+ # it gets for one build from each of the projects
+ expect(execute(shared_runner)).to eq(build1_project1)
+ expect(execute(shared_runner)).to eq(build1_project2)
+ expect(execute(shared_runner)).to eq(build1_project3)
+
+ # then it gets a second build from each of the projects
+ expect(execute(shared_runner)).to eq(build2_project1)
+ expect(execute(shared_runner)).to eq(build2_project2)
+
+ # in the end the third build
+ expect(execute(shared_runner)).to eq(build3_project1)
+ end
- context 'for multiple builds' do
- let!(:project2) { create :project, shared_runners_enabled: true }
- let!(:pipeline2) { create :ci_pipeline, project: project2 }
- let!(:project3) { create :project, shared_runners_enabled: true }
- let!(:pipeline3) { create :ci_pipeline, project: project3 }
- let!(:build1_project1) { pending_job }
- let!(:build2_project1) { FactoryBot.create :ci_build, pipeline: pipeline }
- let!(:build3_project1) { FactoryBot.create :ci_build, pipeline: pipeline }
- let!(:build1_project2) { FactoryBot.create :ci_build, pipeline: pipeline2 }
- let!(:build2_project2) { FactoryBot.create :ci_build, pipeline: pipeline2 }
- let!(:build1_project3) { FactoryBot.create :ci_build, pipeline: pipeline3 }
-
- it 'prefers projects without builds first' do
- # it gets for one build from each of the projects
- expect(execute(shared_runner)).to eq(build1_project1)
- expect(execute(shared_runner)).to eq(build1_project2)
- expect(execute(shared_runner)).to eq(build1_project3)
-
- # then it gets a second build from each of the projects
- expect(execute(shared_runner)).to eq(build2_project1)
- expect(execute(shared_runner)).to eq(build2_project2)
-
- # in the end the third build
- expect(execute(shared_runner)).to eq(build3_project1)
- end
-
- it 'equalises number of running builds' do
- # after finishing the first build for project 1, get a second build from the same project
- expect(execute(shared_runner)).to eq(build1_project1)
- build1_project1.reload.success
- expect(execute(shared_runner)).to eq(build2_project1)
-
- expect(execute(shared_runner)).to eq(build1_project2)
- build1_project2.reload.success
- expect(execute(shared_runner)).to eq(build2_project2)
- expect(execute(shared_runner)).to eq(build1_project3)
- expect(execute(shared_runner)).to eq(build3_project1)
+ it 'equalises number of running builds' do
+ # after finishing the first build for project 1, get a second build from the same project
+ expect(execute(shared_runner)).to eq(build1_project1)
+ build1_project1.reload.success
+ expect(execute(shared_runner)).to eq(build2_project1)
+
+ expect(execute(shared_runner)).to eq(build1_project2)
+ build1_project2.reload.success
+ expect(execute(shared_runner)).to eq(build2_project2)
+ expect(execute(shared_runner)).to eq(build1_project3)
+ expect(execute(shared_runner)).to eq(build3_project1)
+ end
end
- end
- context 'shared runner' do
- let(:response) { described_class.new(shared_runner).execute }
- let(:build) { response.build }
+ context 'shared runner' do
+ let(:response) { described_class.new(shared_runner).execute }
+ let(:build) { response.build }
- it { expect(build).to be_kind_of(Build) }
- it { expect(build).to be_valid }
- it { expect(build).to be_running }
- it { expect(build.runner).to eq(shared_runner) }
- it { expect(Gitlab::Json.parse(response.build_json)['id']).to eq(build.id) }
- end
+ it { expect(build).to be_kind_of(Build) }
+ it { expect(build).to be_valid }
+ it { expect(build).to be_running }
+ it { expect(build.runner).to eq(shared_runner) }
+ it { expect(Gitlab::Json.parse(response.build_json)['id']).to eq(build.id) }
+ end
- context 'specific runner' do
- let(:build) { execute(specific_runner) }
+ context 'specific runner' do
+ let(:build) { execute(specific_runner) }
- it { expect(build).to be_kind_of(Build) }
- it { expect(build).to be_valid }
- it { expect(build).to be_running }
- it { expect(build.runner).to eq(specific_runner) }
+ it { expect(build).to be_kind_of(Build) }
+ it { expect(build).to be_valid }
+ it { expect(build).to be_running }
+ it { expect(build.runner).to eq(specific_runner) }
+ end
end
- end
- context 'disallow shared runners' do
- before do
- project.update!(shared_runners_enabled: false)
- end
+ context 'disallow shared runners' do
+ before do
+ project.update!(shared_runners_enabled: false)
+ end
- context 'shared runner' do
- let(:build) { execute(shared_runner) }
+ context 'shared runner' do
+ let(:build) { execute(shared_runner) }
- it { expect(build).to be_nil }
- end
+ it { expect(build).to be_nil }
+ end
- context 'specific runner' do
- let(:build) { execute(specific_runner) }
+ context 'specific runner' do
+ let(:build) { execute(specific_runner) }
- it { expect(build).to be_kind_of(Build) }
- it { expect(build).to be_valid }
- it { expect(build).to be_running }
- it { expect(build.runner).to eq(specific_runner) }
+ it { expect(build).to be_kind_of(Build) }
+ it { expect(build).to be_valid }
+ it { expect(build).to be_running }
+ it { expect(build.runner).to eq(specific_runner) }
+ end
end
- end
- context 'disallow when builds are disabled' do
- before do
- project.update!(shared_runners_enabled: true, group_runners_enabled: true)
- project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
- end
+ context 'disallow when builds are disabled' do
+ before do
+ project.update!(shared_runners_enabled: true, group_runners_enabled: true)
+ project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
+ end
- context 'and uses shared runner' do
- let(:build) { execute(shared_runner) }
+ context 'and uses shared runner' do
+ let(:build) { execute(shared_runner) }
- it { expect(build).to be_nil }
- end
+ it { expect(build).to be_nil }
+ end
- context 'and uses group runner' do
- let(:build) { execute(group_runner) }
+ context 'and uses group runner' do
+ let(:build) { execute(group_runner) }
- it { expect(build).to be_nil }
- end
+ it { expect(build).to be_nil }
+ end
- context 'and uses project runner' do
- let(:build) { execute(specific_runner) }
+ context 'and uses project runner' do
+ let(:build) { execute(specific_runner) }
- it { expect(build).to be_nil }
+ it { expect(build).to be_nil }
+ end
end
- end
- context 'allow group runners' do
- before do
- project.update!(group_runners_enabled: true)
- end
+ context 'allow group runners' do
+ before do
+ project.update!(group_runners_enabled: true)
+ end
- context 'for multiple builds' do
- let!(:project2) { create(:project, group_runners_enabled: true, group: group) }
- let!(:pipeline2) { create(:ci_pipeline, project: project2) }
- let!(:project3) { create(:project, group_runners_enabled: true, group: group) }
- let!(:pipeline3) { create(:ci_pipeline, project: project3) }
+ context 'for multiple builds' do
+ let!(:project2) { create(:project, group_runners_enabled: true, group: group) }
+ let!(:pipeline2) { create(:ci_pipeline, project: project2) }
+ let!(:project3) { create(:project, group_runners_enabled: true, group: group) }
+ let!(:pipeline3) { create(:ci_pipeline, project: project3) }
- let!(:build1_project1) { pending_job }
- let!(:build2_project1) { create(:ci_build, pipeline: pipeline) }
- let!(:build3_project1) { create(:ci_build, pipeline: pipeline) }
- let!(:build1_project2) { create(:ci_build, pipeline: pipeline2) }
- let!(:build2_project2) { create(:ci_build, pipeline: pipeline2) }
- let!(:build1_project3) { create(:ci_build, pipeline: pipeline3) }
+ let!(:build1_project1) { pending_job }
+ let!(:build2_project1) { create(:ci_build, pipeline: pipeline) }
+ let!(:build3_project1) { create(:ci_build, pipeline: pipeline) }
+ let!(:build1_project2) { create(:ci_build, pipeline: pipeline2) }
+ let!(:build2_project2) { create(:ci_build, pipeline: pipeline2) }
+ let!(:build1_project3) { create(:ci_build, pipeline: pipeline3) }
- # these shouldn't influence the scheduling
- let!(:unrelated_group) { create(:group) }
- let!(:unrelated_project) { create(:project, group_runners_enabled: true, group: unrelated_group) }
- let!(:unrelated_pipeline) { create(:ci_pipeline, project: unrelated_project) }
- let!(:build1_unrelated_project) { create(:ci_build, pipeline: unrelated_pipeline) }
- let!(:unrelated_group_runner) { create(:ci_runner, :group, groups: [unrelated_group]) }
+ # these shouldn't influence the scheduling
+ let!(:unrelated_group) { create(:group) }
+ let!(:unrelated_project) { create(:project, group_runners_enabled: true, group: unrelated_group) }
+ let!(:unrelated_pipeline) { create(:ci_pipeline, project: unrelated_project) }
+ let!(:build1_unrelated_project) { create(:ci_build, pipeline: unrelated_pipeline) }
+ let!(:unrelated_group_runner) { create(:ci_runner, :group, groups: [unrelated_group]) }
- it 'does not consider builds from other group runners' do
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 6
- execute(group_runner)
+ it 'does not consider builds from other group runners' do
+ expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 6
+ execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 5
- execute(group_runner)
+ expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 5
+ execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 4
- execute(group_runner)
+ expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 4
+ execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 3
- execute(group_runner)
+ expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 3
+ execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 2
- execute(group_runner)
+ expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 2
+ execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 1
- execute(group_runner)
+ expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 1
+ execute(group_runner)
- expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 0
- expect(execute(group_runner)).to be_nil
+ expect(described_class.new(group_runner).send(:builds_for_group_runner).count).to eq 0
+ expect(execute(group_runner)).to be_nil
+ end
end
- end
- context 'group runner' do
- let(:build) { execute(group_runner) }
+ context 'group runner' do
+ let(:build) { execute(group_runner) }
- it { expect(build).to be_kind_of(Build) }
- it { expect(build).to be_valid }
- it { expect(build).to be_running }
- it { expect(build.runner).to eq(group_runner) }
+ it { expect(build).to be_kind_of(Build) }
+ it { expect(build).to be_valid }
+ it { expect(build).to be_running }
+ it { expect(build.runner).to eq(group_runner) }
+ end
end
- end
- context 'disallow group runners' do
- before do
- project.update!(group_runners_enabled: false)
- end
+ context 'disallow group runners' do
+ before do
+ project.update!(group_runners_enabled: false)
+ end
- context 'group runner' do
- let(:build) { execute(group_runner) }
+ context 'group runner' do
+ let(:build) { execute(group_runner) }
- it { expect(build).to be_nil }
+ it { expect(build).to be_nil }
+ end
end
- end
- context 'when first build is stalled' do
- before do
- allow_any_instance_of(Ci::RegisterJobService).to receive(:assign_runner!).and_call_original
- allow_any_instance_of(Ci::RegisterJobService).to receive(:assign_runner!)
- .with(pending_job, anything).and_raise(ActiveRecord::StaleObjectError)
- end
+ context 'when first build is stalled' do
+ before do
+ allow_any_instance_of(Ci::RegisterJobService).to receive(:assign_runner!).and_call_original
+ allow_any_instance_of(Ci::RegisterJobService).to receive(:assign_runner!)
+ .with(pending_job, anything).and_raise(ActiveRecord::StaleObjectError)
+ end
- subject { described_class.new(specific_runner).execute }
+ subject { described_class.new(specific_runner).execute }
- context 'with multiple builds are in queue' do
- let!(:other_build) { create :ci_build, pipeline: pipeline }
+ context 'with multiple builds are in queue' do
+ let!(:other_build) { create :ci_build, pipeline: pipeline }
- before do
- allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_project_runner)
- .and_return(Ci::Build.where(id: [pending_job, other_build]))
- end
+ before do
+ allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_project_runner)
+ .and_return(Ci::Build.where(id: [pending_job, other_build]))
+ end
- it "receives second build from the queue" do
- expect(subject).to be_valid
- expect(subject.build).to eq(other_build)
+ it "receives second build from the queue" do
+ expect(subject).to be_valid
+ expect(subject.build).to eq(other_build)
+ end
end
- end
- context 'when single build is in queue' do
- before do
- allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_project_runner)
- .and_return(Ci::Build.where(id: pending_job))
- end
+ context 'when single build is in queue' do
+ before do
+ allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_project_runner)
+ .and_return(Ci::Build.where(id: pending_job))
+ end
- it "does not receive any valid result" do
- expect(subject).not_to be_valid
+ it "does not receive any valid result" do
+ expect(subject).not_to be_valid
+ end
end
- end
- context 'when there is no build in queue' do
- before do
- allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_project_runner)
- .and_return(Ci::Build.none)
- end
+ context 'when there is no build in queue' do
+ before do
+ allow_any_instance_of(Ci::RegisterJobService).to receive(:builds_for_project_runner)
+ .and_return(Ci::Build.none)
+ end
- it "does not receive builds but result is valid" do
- expect(subject).to be_valid
- expect(subject.build).to be_nil
+ it "does not receive builds but result is valid" do
+ expect(subject).to be_valid
+ expect(subject.build).to be_nil
+ end
end
end
- end
- context 'when access_level of runner is not_protected' do
- let!(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
+ context 'when access_level of runner is not_protected' do
+ let!(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
- context 'when a job is protected' do
- let!(:pending_job) { create(:ci_build, :protected, pipeline: pipeline) }
+ context 'when a job is protected' do
+ let!(:pending_job) { create(:ci_build, :protected, pipeline: pipeline) }
- it 'picks the job' do
- expect(execute(specific_runner)).to eq(pending_job)
+ it 'picks the job' do
+ expect(execute(specific_runner)).to eq(pending_job)
+ end
end
- end
- context 'when a job is unprotected' do
- let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
+ context 'when a job is unprotected' do
+ let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
- it 'picks the job' do
- expect(execute(specific_runner)).to eq(pending_job)
+ it 'picks the job' do
+ expect(execute(specific_runner)).to eq(pending_job)
+ end
end
- end
- context 'when protected attribute of a job is nil' do
- let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
+ context 'when protected attribute of a job is nil' do
+ let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
- before do
- pending_job.update_attribute(:protected, nil)
- end
+ before do
+ pending_job.update_attribute(:protected, nil)
+ end
- it 'picks the job' do
- expect(execute(specific_runner)).to eq(pending_job)
+ it 'picks the job' do
+ expect(execute(specific_runner)).to eq(pending_job)
+ end
end
end
- end
- context 'when access_level of runner is ref_protected' do
- let!(:specific_runner) { create(:ci_runner, :project, :ref_protected, projects: [project]) }
+ context 'when access_level of runner is ref_protected' do
+ let!(:specific_runner) { create(:ci_runner, :project, :ref_protected, projects: [project]) }
- context 'when a job is protected' do
- let!(:pending_job) { create(:ci_build, :protected, pipeline: pipeline) }
+ context 'when a job is protected' do
+ let!(:pending_job) { create(:ci_build, :protected, pipeline: pipeline) }
- it 'picks the job' do
- expect(execute(specific_runner)).to eq(pending_job)
+ it 'picks the job' do
+ expect(execute(specific_runner)).to eq(pending_job)
+ end
end
- end
- context 'when a job is unprotected' do
- let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
+ context 'when a job is unprotected' do
+ let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
- it 'does not pick the job' do
- expect(execute(specific_runner)).to be_nil
+ it 'does not pick the job' do
+ expect(execute(specific_runner)).to be_nil
+ end
end
- end
- context 'when protected attribute of a job is nil' do
- let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
+ context 'when protected attribute of a job is nil' do
+ let!(:pending_job) { create(:ci_build, pipeline: pipeline) }
- before do
- pending_job.update_attribute(:protected, nil)
- end
+ before do
+ pending_job.update_attribute(:protected, nil)
+ end
- it 'does not pick the job' do
- expect(execute(specific_runner)).to be_nil
+ it 'does not pick the job' do
+ expect(execute(specific_runner)).to be_nil
+ end
end
end
- end
- context 'runner feature set is verified' do
- let(:options) { { artifacts: { reports: { junit: "junit.xml" } } } }
- let!(:pending_job) { create(:ci_build, :pending, pipeline: pipeline, options: options) }
+ context 'runner feature set is verified' do
+ let(:options) { { artifacts: { reports: { junit: "junit.xml" } } } }
+ let!(:pending_job) { create(:ci_build, :pending, pipeline: pipeline, options: options) }
- subject { execute(specific_runner, params) }
+ subject { execute(specific_runner, params) }
- context 'when feature is missing by runner' do
- let(:params) { {} }
+ context 'when feature is missing by runner' do
+ let(:params) { {} }
- it 'does not pick the build and drops the build' do
- expect(subject).to be_nil
- expect(pending_job.reload).to be_failed
- expect(pending_job).to be_runner_unsupported
+ it 'does not pick the build and drops the build' do
+ expect(subject).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job).to be_runner_unsupported
+ end
end
- end
- context 'when feature is supported by runner' do
- let(:params) do
- { info: { features: { upload_multiple_artifacts: true } } }
- end
+ context 'when feature is supported by runner' do
+ let(:params) do
+ { info: { features: { upload_multiple_artifacts: true } } }
+ end
- it 'does pick job' do
- expect(subject).not_to be_nil
+ it 'does pick job' do
+ expect(subject).not_to be_nil
+ end
end
end
- end
- context 'when "dependencies" keyword is specified' do
- shared_examples 'not pick' do
- it 'does not pick the build and drops the build' do
- expect(subject).to be_nil
- expect(pending_job.reload).to be_failed
- expect(pending_job).to be_missing_dependency_failure
+ context 'when "dependencies" keyword is specified' do
+ shared_examples 'not pick' do
+ it 'does not pick the build and drops the build' do
+ expect(subject).to be_nil
+ expect(pending_job.reload).to be_failed
+ expect(pending_job).to be_missing_dependency_failure
+ end
end
- end
- shared_examples 'validation is active' do
- context 'when depended job has not been completed yet' do
- let!(:pre_stage_job) { create(:ci_build, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ shared_examples 'validation is active' do
+ context 'when depended job has not been completed yet' do
+ let!(:pre_stage_job) { create(:ci_build, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
- it { expect(subject).to eq(pending_job) }
- end
+ it { expect(subject).to eq(pending_job) }
+ end
- context 'when artifacts of depended job has been expired' do
- let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ context 'when artifacts of depended job has been expired' do
+ let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
- it_behaves_like 'not pick'
- end
+ it_behaves_like 'not pick'
+ end
- context 'when artifacts of depended job has been erased' do
- let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
+ context 'when artifacts of depended job has been erased' do
+ let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
- before do
- pre_stage_job.erase
+ before do
+ pre_stage_job.erase
+ end
+
+ it_behaves_like 'not pick'
end
- it_behaves_like 'not pick'
+ context 'when job object is staled' do
+ let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ before do
+ allow_any_instance_of(Ci::Build).to receive(:drop!)
+ .and_raise(ActiveRecord::StaleObjectError.new(pending_job, :drop!))
+ end
+
+ it 'does not drop nor pick' do
+ expect(subject).to be_nil
+ end
+ end
end
- context 'when job object is staled' do
- let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ shared_examples 'validation is not active' do
+ context 'when depended job has not been completed yet' do
+ let!(:pre_stage_job) { create(:ci_build, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
- before do
- allow_any_instance_of(Ci::Build).to receive(:drop!)
- .and_raise(ActiveRecord::StaleObjectError.new(pending_job, :drop!))
+ it { expect(subject).to eq(pending_job) }
end
- it 'does not drop nor pick' do
- expect(subject).to be_nil
+ context 'when artifacts of depended job has been expired' do
+ let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+
+ it { expect(subject).to eq(pending_job) }
end
- end
- end
- shared_examples 'validation is not active' do
- context 'when depended job has not been completed yet' do
- let!(:pre_stage_job) { create(:ci_build, :manual, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ context 'when artifacts of depended job has been erased' do
+ let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
- it { expect(subject).to eq(pending_job) }
+ before do
+ pre_stage_job.erase
+ end
+
+ it { expect(subject).to eq(pending_job) }
+ end
end
- context 'when artifacts of depended job has been expired' do
- let!(:pre_stage_job) { create(:ci_build, :success, :expired, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ before do
+ stub_feature_flags(ci_validate_build_dependencies_override: false)
+ end
+
+ let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
- it { expect(subject).to eq(pending_job) }
+ let!(:pending_job) do
+ create(:ci_build, :pending,
+ pipeline: pipeline, stage_idx: 1,
+ options: { script: ["bash"], dependencies: ['test'] })
end
- context 'when artifacts of depended job has been erased' do
- let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0, erased_at: 1.minute.ago) }
+ subject { execute(specific_runner) }
+ context 'when validates for dependencies is enabled' do
before do
- pre_stage_job.erase
+ stub_feature_flags(ci_validate_build_dependencies_override: false)
end
- it { expect(subject).to eq(pending_job) }
+ it_behaves_like 'validation is active'
+
+ context 'when the main feature flag is enabled for a specific project' do
+ before do
+ stub_feature_flags(ci_validate_build_dependencies: pipeline.project)
+ end
+
+ it_behaves_like 'validation is active'
+ end
+
+ context 'when the main feature flag is enabled for a different project' do
+ before do
+ stub_feature_flags(ci_validate_build_dependencies: create(:project))
+ end
+
+ it_behaves_like 'validation is not active'
+ end
end
- end
- before do
- stub_feature_flags(ci_validate_build_dependencies_override: false)
+ context 'when validates for dependencies is disabled' do
+ before do
+ stub_feature_flags(ci_validate_build_dependencies_override: true)
+ end
+
+ it_behaves_like 'validation is not active'
+ end
end
- let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
+ context 'when build is degenerated' do
+ let!(:pending_job) { create(:ci_build, :pending, :degenerated, pipeline: pipeline) }
+
+ subject { execute(specific_runner, {}) }
+
+ it 'does not pick the build and drops the build' do
+ expect(subject).to be_nil
- let!(:pending_job) do
- create(:ci_build, :pending,
- pipeline: pipeline, stage_idx: 1,
- options: { script: ["bash"], dependencies: ['test'] })
+ pending_job.reload
+ expect(pending_job).to be_failed
+ expect(pending_job).to be_archived_failure
+ end
end
- subject { execute(specific_runner) }
+ context 'when build has data integrity problem' do
+ let!(:pending_job) do
+ create(:ci_build, :pending, pipeline: pipeline)
+ end
- context 'when validates for dependencies is enabled' do
before do
- stub_feature_flags(ci_validate_build_dependencies_override: false)
+ pending_job.update_columns(options: "string")
end
- it_behaves_like 'validation is active'
+ subject { execute(specific_runner, {}) }
- context 'when the main feature flag is enabled for a specific project' do
- before do
- stub_feature_flags(ci_validate_build_dependencies: pipeline.project)
- end
+ it 'does drop the build and logs both failures' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(anything, a_hash_including(build_id: pending_job.id))
+ .twice
+ .and_call_original
- it_behaves_like 'validation is active'
- end
-
- context 'when the main feature flag is enabled for a different project' do
- before do
- stub_feature_flags(ci_validate_build_dependencies: create(:project))
- end
+ expect(subject).to be_nil
- it_behaves_like 'validation is not active'
+ pending_job.reload
+ expect(pending_job).to be_failed
+ expect(pending_job).to be_data_integrity_failure
end
end
- context 'when validates for dependencies is disabled' do
+ context 'when build fails to be run!' do
+ let!(:pending_job) do
+ create(:ci_build, :pending, pipeline: pipeline)
+ end
+
before do
- stub_feature_flags(ci_validate_build_dependencies_override: true)
+ expect_any_instance_of(Ci::Build).to receive(:run!)
+ .and_raise(RuntimeError, 'scheduler error')
end
- it_behaves_like 'validation is not active'
+ subject { execute(specific_runner, {}) }
+
+ it 'does drop the build and logs failure' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(anything, a_hash_including(build_id: pending_job.id))
+ .once
+ .and_call_original
+
+ expect(subject).to be_nil
+
+ pending_job.reload
+ expect(pending_job).to be_failed
+ expect(pending_job).to be_scheduler_failure
+ end
end
- end
- context 'when build is degenerated' do
- let!(:pending_job) { create(:ci_build, :pending, :degenerated, pipeline: pipeline) }
+ context 'when an exception is raised during a persistent ref creation' do
+ before do
+ allow_any_instance_of(Ci::PersistentRef).to receive(:exist?) { false }
+ allow_any_instance_of(Ci::PersistentRef).to receive(:create_ref) { raise ArgumentError }
+ end
- subject { execute(specific_runner, {}) }
+ subject { execute(specific_runner, {}) }
- it 'does not pick the build and drops the build' do
- expect(subject).to be_nil
+ it 'picks the build' do
+ expect(subject).to eq(pending_job)
- pending_job.reload
- expect(pending_job).to be_failed
- expect(pending_job).to be_archived_failure
+ pending_job.reload
+ expect(pending_job).to be_running
+ end
end
- end
- context 'when build has data integrity problem' do
- let!(:pending_job) do
- create(:ci_build, :pending, pipeline: pipeline)
- end
+ context 'when only some builds can be matched by runner' do
+ let!(:specific_runner) { create(:ci_runner, :project, projects: [project], tag_list: %w[matching]) }
+ let!(:pending_job) { create(:ci_build, pipeline: pipeline, tag_list: %w[matching]) }
- before do
- pending_job.update_columns(options: "string")
+ before do
+ # create additional matching and non-matching jobs
+ create_list(:ci_build, 2, pipeline: pipeline, tag_list: %w[matching])
+ create(:ci_build, pipeline: pipeline, tag_list: %w[non-matching])
+ end
+
+ it "observes queue size of only matching jobs" do
+ # pending_job + 2 x matching ones
+ expect(Gitlab::Ci::Queue::Metrics.queue_size_total).to receive(:observe).with({}, 3)
+
+ expect(execute(specific_runner)).to eq(pending_job)
+ end
end
- subject { execute(specific_runner, {}) }
+ context 'when ci_register_job_temporary_lock is enabled' do
+ before do
+ stub_feature_flags(ci_register_job_temporary_lock: true)
- it 'does drop the build and logs both failures' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(anything, a_hash_including(build_id: pending_job.id))
- .twice
- .and_call_original
+ allow(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
+ end
- expect(subject).to be_nil
+ context 'when a build is temporarily locked' do
+ let(:service) { described_class.new(specific_runner) }
- pending_job.reload
- expect(pending_job).to be_failed
- expect(pending_job).to be_data_integrity_failure
- end
- end
+ before do
+ service.send(:acquire_temporary_lock, pending_job.id)
+ end
- context 'when build fails to be run!' do
- let!(:pending_job) do
- create(:ci_build, :pending, pipeline: pipeline)
- end
+ it 'skips this build and marks queue as invalid' do
+ expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
+ .with(operation: :queue_iteration)
+ expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
+ .with(operation: :build_temporary_locked)
- before do
- expect_any_instance_of(Ci::Build).to receive(:run!)
- .and_raise(RuntimeError, 'scheduler error')
- end
+ expect(service.execute).not_to be_valid
+ end
- subject { execute(specific_runner, {}) }
+ context 'when there is another build in queue' do
+ let!(:next_pending_job) { create(:ci_build, pipeline: pipeline) }
- it 'does drop the build and logs failure' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(anything, a_hash_including(build_id: pending_job.id))
- .once
- .and_call_original
+ it 'skips this build and picks another build' do
+ expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
+ .with(operation: :queue_iteration).twice
+ expect(Gitlab::Ci::Queue::Metrics.queue_operations_total).to receive(:increment)
+ .with(operation: :build_temporary_locked)
- expect(subject).to be_nil
+ result = service.execute
- pending_job.reload
- expect(pending_job).to be_failed
- expect(pending_job).to be_scheduler_failure
+ expect(result.build).to eq(next_pending_job)
+ expect(result).to be_valid
+ end
+ end
+ end
end
end
- context 'when an exception is raised during a persistent ref creation' do
+ context 'when ci_register_job_service_one_by_one is enabled' do
before do
- allow_any_instance_of(Ci::PersistentRef).to receive(:exist?) { false }
- allow_any_instance_of(Ci::PersistentRef).to receive(:create_ref) { raise ArgumentError }
+ stub_feature_flags(ci_register_job_service_one_by_one: true)
end
- subject { execute(specific_runner, {}) }
+ it 'picks builds one-by-one' do
+ expect(Ci::Build).to receive(:find).with(pending_job.id).and_call_original
- it 'picks the build' do
- expect(subject).to eq(pending_job)
+ expect(execute(specific_runner)).to eq(pending_job)
+ end
+
+ include_examples 'handles runner assignment'
+ end
- pending_job.reload
- expect(pending_job).to be_running
+ context 'when ci_register_job_service_one_by_one is disabled' do
+ before do
+ stub_feature_flags(ci_register_job_service_one_by_one: false)
end
+
+ include_examples 'handles runner assignment'
end
end
@@ -590,22 +673,14 @@ module Ci
before do
allow(Time).to receive(:now).and_return(current_time)
-
- # Stub defaults for any metrics other than the ones we're testing
- allow(Gitlab::Metrics).to receive(:counter)
- .with(any_args)
- .and_return(Gitlab::Metrics::NullMetric.instance)
- allow(Gitlab::Metrics).to receive(:histogram)
- .with(any_args)
- .and_return(Gitlab::Metrics::NullMetric.instance)
-
# Stub tested metrics
- allow(Gitlab::Metrics).to receive(:counter)
- .with(:job_register_attempts_total, anything)
- .and_return(attempt_counter)
- allow(Gitlab::Metrics).to receive(:histogram)
- .with(:job_queue_duration_seconds, anything, anything, anything)
- .and_return(job_queue_duration_seconds)
+ allow(Gitlab::Ci::Queue::Metrics)
+ .to receive(:attempt_counter)
+ .and_return(attempt_counter)
+
+ allow(Gitlab::Ci::Queue::Metrics)
+ .to receive(:job_queue_duration_seconds)
+ .and_return(job_queue_duration_seconds)
project.update!(shared_runners_enabled: true)
pending_job.update!(created_at: current_time - 3600, queued_at: current_time - 1800)
@@ -655,7 +730,7 @@ module Ci
context 'when shared runner is used' do
let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 tag2)) }
let(:expected_shared_runner) { true }
- let(:expected_shard) { Ci::RegisterJobService::DEFAULT_METRICS_SHARD }
+ let(:expected_shard) { ::Gitlab::Ci::Queue::Metrics::DEFAULT_METRICS_SHARD }
let(:expected_jobs_running_for_project_first_job) { 0 }
let(:expected_jobs_running_for_project_third_job) { 2 }
@@ -694,7 +769,7 @@ module Ci
context 'when specific runner is used' do
let(:runner) { create(:ci_runner, :project, projects: [project], tag_list: %w(tag1 metrics_shard::shard_tag tag2)) }
let(:expected_shared_runner) { false }
- let(:expected_shard) { Ci::RegisterJobService::DEFAULT_METRICS_SHARD }
+ let(:expected_shard) { ::Gitlab::Ci::Queue::Metrics::DEFAULT_METRICS_SHARD }
let(:expected_jobs_running_for_project_first_job) { '+Inf' }
let(:expected_jobs_running_for_project_third_job) { '+Inf' }
@@ -715,6 +790,46 @@ module Ci
end
end
+ context 'when max queue depth is reached' do
+ let!(:pending_job) { create(:ci_build, :pending, :degenerated, pipeline: pipeline) }
+ let!(:pending_job_2) { create(:ci_build, :pending, :degenerated, pipeline: pipeline) }
+ let!(:pending_job_3) { create(:ci_build, :pending, pipeline: pipeline) }
+
+ before do
+ stub_const("#{described_class}::MAX_QUEUE_DEPTH", 2)
+ end
+
+ context 'when feature is enabled' do
+ before do
+ stub_feature_flags(gitlab_ci_builds_queue_limit: true)
+ end
+
+ it 'returns 409 conflict' do
+ expect(Ci::Build.pending.unstarted.count).to eq 3
+
+ result = described_class.new(specific_runner).execute
+
+ expect(result).not_to be_valid
+ expect(result.build).to be_nil
+ end
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(gitlab_ci_builds_queue_limit: false)
+ end
+
+ it 'returns a valid result' do
+ expect(Ci::Build.pending.unstarted.count).to eq 3
+
+ result = described_class.new(specific_runner).execute
+
+ expect(result).to be_valid
+ expect(result.build).to eq pending_job_3
+ end
+ end
+ end
+
def execute(runner, params = {})
described_class.new(runner).execute(params).build
end
diff --git a/spec/services/ci/update_build_queue_service_spec.rb b/spec/services/ci/update_build_queue_service_spec.rb
index ebccfdc5140..2d9f80a249d 100644
--- a/spec/services/ci/update_build_queue_service_spec.rb
+++ b/spec/services/ci/update_build_queue_service_spec.rb
@@ -26,6 +26,25 @@ RSpec.describe Ci::UpdateBuildQueueService do
end
it_behaves_like 'refreshes runner'
+
+ it 'avoids running redundant queries' do
+ expect(Ci::Runner).not_to receive(:owned_or_instance_wide)
+
+ subject.execute(build)
+ end
+
+ context 'when feature flag ci_reduce_queries_when_ticking_runner_queue is disabled' do
+ before do
+ stub_feature_flags(ci_reduce_queries_when_ticking_runner_queue: false)
+ stub_feature_flags(ci_runners_short_circuit_assignable_for: false)
+ end
+
+ it 'runs redundant queries using `owned_or_instance_wide` scope' do
+ expect(Ci::Runner).to receive(:owned_or_instance_wide).and_call_original
+
+ subject.execute(build)
+ end
+ end
end
end
@@ -97,4 +116,43 @@ RSpec.describe Ci::UpdateBuildQueueService do
it_behaves_like 'does not refresh runner'
end
end
+
+ context 'avoids N+1 queries', :request_store do
+ let!(:build) { create(:ci_build, pipeline: pipeline, tag_list: %w[a b]) }
+ let!(:project_runner) { create(:ci_runner, :project, :online, projects: [project], tag_list: %w[a b c]) }
+
+ context 'when ci_preload_runner_tags and ci_reduce_queries_when_ticking_runner_queue are enabled' do
+ before do
+ stub_feature_flags(
+ ci_reduce_queries_when_ticking_runner_queue: true,
+ ci_preload_runner_tags: true
+ )
+ end
+
+ it 'does execute the same amount of queries regardless of number of runners' do
+ control_count = ActiveRecord::QueryRecorder.new { subject.execute(build) }.count
+
+ create_list(:ci_runner, 10, :project, :online, projects: [project], tag_list: %w[b c d])
+
+ expect { subject.execute(build) }.not_to exceed_all_query_limit(control_count)
+ end
+ end
+
+ context 'when ci_preload_runner_tags and ci_reduce_queries_when_ticking_runner_queue are disabled' do
+ before do
+ stub_feature_flags(
+ ci_reduce_queries_when_ticking_runner_queue: false,
+ ci_preload_runner_tags: false
+ )
+ end
+
+ it 'does execute more queries for more runners' do
+ control_count = ActiveRecord::QueryRecorder.new { subject.execute(build) }.count
+
+ create_list(:ci_runner, 10, :project, :online, projects: [project], tag_list: %w[b c d])
+
+ expect { subject.execute(build) }.to exceed_all_query_limit(control_count)
+ end
+ end
+ end
end
diff --git a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
index 90956e7b4ea..98963f57341 100644
--- a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
@@ -39,6 +39,8 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute'
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace)
stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME, namespace: namespace)
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME, namespace: namespace)
+ stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME, namespace: namespace)
+ stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CILIUM_ROLE_BINDING_NAME, namespace: namespace)
stub_kubeclient_get_secret(
api_url,
diff --git a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
index a4f018aec0c..11045dfe950 100644
--- a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
@@ -147,6 +147,8 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace)
stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME, namespace: namespace)
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME, namespace: namespace)
+ stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME, namespace: namespace)
+ stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CILIUM_ROLE_BINDING_NAME, namespace: namespace)
end
it 'creates a namespace object' do
@@ -243,6 +245,47 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
)
)
end
+
+ it 'creates a role granting cilium permissions to the service account' do
+ subject
+
+ expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/roles/#{Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME}").with(
+ body: hash_including(
+ metadata: {
+ name: Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME,
+ namespace: namespace
+ },
+ rules: [{
+ apiGroups: %w(cilium.io),
+ resources: %w(ciliumnetworkpolicies),
+ verbs: %w(get list create update patch)
+ }]
+ )
+ )
+ end
+
+ it 'creates a role binding granting cilium permissions to the service account' do
+ subject
+
+ expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{Clusters::Kubernetes::GITLAB_CILIUM_ROLE_BINDING_NAME}").with(
+ body: hash_including(
+ metadata: {
+ name: Clusters::Kubernetes::GITLAB_CILIUM_ROLE_BINDING_NAME,
+ namespace: namespace
+ },
+ roleRef: {
+ apiGroup: 'rbac.authorization.k8s.io',
+ kind: 'Role',
+ name: Clusters::Kubernetes::GITLAB_CILIUM_ROLE_NAME
+ },
+ subjects: [{
+ kind: 'ServiceAccount',
+ name: service_account_name,
+ namespace: namespace
+ }]
+ )
+ )
+ end
end
end
end
diff --git a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
index c375e5a2fa3..40a2f954786 100644
--- a/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/find_or_create_manifest_service_spec.rb
@@ -10,7 +10,12 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
let(:manifest) { dependency_proxy_manifest.file.read }
let(:group) { dependency_proxy_manifest.group }
let(:token) { Digest::SHA256.hexdigest('123') }
- let(:headers) { { 'docker-content-digest' => dependency_proxy_manifest.digest } }
+ let(:headers) do
+ {
+ 'docker-content-digest' => dependency_proxy_manifest.digest,
+ 'content-type' => dependency_proxy_manifest.content_type
+ }
+ end
describe '#execute' do
subject { described_class.new(group, image, tag, token).execute }
@@ -18,22 +23,37 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
context 'when no manifest exists' do
let_it_be(:image) { 'new-image' }
- before do
- stub_manifest_head(image, tag, digest: dependency_proxy_manifest.digest)
- stub_manifest_download(image, tag, headers: headers)
+ shared_examples 'downloading the manifest' do
+ it 'downloads manifest from remote registry if there is no cached one', :aggregate_failures do
+ expect { subject }.to change { group.dependency_proxy_manifests.count }.by(1)
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
+ expect(subject[:manifest]).to be_persisted
+ end
end
- it 'downloads manifest from remote registry if there is no cached one', :aggregate_failures do
- expect { subject }.to change { group.dependency_proxy_manifests.count }.by(1)
- expect(subject[:status]).to eq(:success)
- expect(subject[:manifest]).to be_a(DependencyProxy::Manifest)
- expect(subject[:manifest]).to be_persisted
+ context 'successful head request' do
+ before do
+ stub_manifest_head(image, tag, headers: headers)
+ stub_manifest_download(image, tag, headers: headers)
+ end
+
+ it_behaves_like 'downloading the manifest'
+ end
+
+ context 'failed head request' do
+ before do
+ stub_manifest_head(image, tag, status: :error)
+ stub_manifest_download(image, tag, headers: headers)
+ end
+
+ it_behaves_like 'downloading the manifest'
end
end
context 'when manifest exists' do
before do
- stub_manifest_head(image, tag, digest: dependency_proxy_manifest.digest)
+ stub_manifest_head(image, tag, headers: headers)
end
shared_examples 'using the cached manifest' do
@@ -48,15 +68,17 @@ RSpec.describe DependencyProxy::FindOrCreateManifestService do
context 'when digest is stale' do
let(:digest) { 'new-digest' }
+ let(:content_type) { 'new-content-type' }
before do
- stub_manifest_head(image, tag, digest: digest)
- stub_manifest_download(image, tag, headers: { 'docker-content-digest' => digest })
+ stub_manifest_head(image, tag, headers: { 'docker-content-digest' => digest, 'content-type' => content_type })
+ stub_manifest_download(image, tag, headers: { 'docker-content-digest' => digest, 'content-type' => content_type })
end
it 'downloads the new manifest and updates the existing record', :aggregate_failures do
expect(subject[:status]).to eq(:success)
expect(subject[:manifest]).to eq(dependency_proxy_manifest)
+ expect(subject[:manifest].content_type).to eq(content_type)
expect(subject[:manifest].digest).to eq(digest)
end
end
diff --git a/spec/services/dependency_proxy/head_manifest_service_spec.rb b/spec/services/dependency_proxy/head_manifest_service_spec.rb
index 7c7ebe4d181..9c1e4d650f8 100644
--- a/spec/services/dependency_proxy/head_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/head_manifest_service_spec.rb
@@ -8,12 +8,19 @@ RSpec.describe DependencyProxy::HeadManifestService do
let(:tag) { 'latest' }
let(:token) { Digest::SHA256.hexdigest('123') }
let(:digest) { '12345' }
+ let(:content_type) { 'foo' }
+ let(:headers) do
+ {
+ 'docker-content-digest' => digest,
+ 'content-type' => content_type
+ }
+ end
subject { described_class.new(image, tag, token).execute }
context 'remote request is successful' do
before do
- stub_manifest_head(image, tag, digest: digest)
+ stub_manifest_head(image, tag, headers: headers)
end
it { expect(subject[:status]).to eq(:success) }
diff --git a/spec/services/dependency_proxy/pull_manifest_service_spec.rb b/spec/services/dependency_proxy/pull_manifest_service_spec.rb
index b760839d1fb..b3053174cc0 100644
--- a/spec/services/dependency_proxy/pull_manifest_service_spec.rb
+++ b/spec/services/dependency_proxy/pull_manifest_service_spec.rb
@@ -9,7 +9,10 @@ RSpec.describe DependencyProxy::PullManifestService do
let(:token) { Digest::SHA256.hexdigest('123') }
let(:manifest) { { foo: 'bar' }.to_json }
let(:digest) { '12345' }
- let(:headers) { { 'docker-content-digest' => digest } }
+ let(:content_type) { 'foo' }
+ let(:headers) do
+ { 'docker-content-digest' => digest, 'content-type' => content_type }
+ end
subject { described_class.new(image, tag, token).execute_with_manifest(&method(:check_response)) }
@@ -25,6 +28,7 @@ RSpec.describe DependencyProxy::PullManifestService do
expect(response[:status]).to eq(:success)
expect(response[:file].read).to eq(manifest)
expect(response[:digest]).to eq(digest)
+ expect(response[:content_type]).to eq(content_type)
end
subject
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index 92488c62315..ae5e8445554 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Deployments::UpdateEnvironmentService do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
- let(:options) { { name: 'production' } }
+ let(:options) { { name: environment_name } }
let(:pipeline) do
create(
:ci_pipeline,
@@ -20,13 +20,14 @@ RSpec.describe Deployments::UpdateEnvironmentService do
pipeline: pipeline,
ref: 'master',
tag: false,
- environment: 'production',
+ environment: environment_name,
options: { environment: options },
project: project)
end
let(:deployment) { job.deployment }
let(:environment) { deployment.environment }
+ let(:environment_name) { 'production' }
subject(:service) { described_class.new(deployment) }
@@ -131,6 +132,66 @@ RSpec.describe Deployments::UpdateEnvironmentService do
end
end
end
+
+ context 'when deployment tier is specified' do
+ let(:environment_name) { 'customer-portal' }
+ let(:options) { { name: environment_name, deployment_tier: 'production' } }
+
+ context 'when tier has already been set' do
+ before do
+ environment.update_column(:tier, Environment.tiers[:other])
+ end
+
+ it 'overwrites the guessed tier by the specified deployment tier' do
+ expect { subject.execute }
+ .to change { environment.reset.tier }.from('other').to('production')
+ end
+ end
+
+ context 'when tier has not been set' do
+ before do
+ environment.update_column(:tier, nil)
+ end
+
+ it 'sets the specified deployment tier' do
+ expect { subject.execute }
+ .to change { environment.reset.tier }.from(nil).to('production')
+ end
+
+ context 'when deployment was created by an external CD system' do
+ before do
+ deployment.update_column(:deployable_id, nil)
+ end
+
+ it 'guesses the deployment tier' do
+ expect { subject.execute }
+ .to change { environment.reset.tier }.from(nil).to('other')
+ end
+ end
+
+ context 'when environment_tier feature flag is disabled' do
+ before do
+ stub_feature_flags(environment_tier: false)
+ end
+
+ it 'does not set the specified deployment tier' do
+ expect { subject.execute }.not_to change { environment.reset.tier }
+ end
+ end
+ end
+ end
+
+ context 'when deployment tier is not specified' do
+ let(:environment_name) { 'customer-portal' }
+ let(:options) { { name: environment_name } }
+
+ it 'guesses the deployment tier' do
+ environment.update_column(:tier, nil)
+
+ expect { subject.execute }
+ .to change { environment.reset.tier }.from(nil).to('other')
+ end
+ end
end
describe '#expanded_environment_url' do
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index 2f9bb72939a..a5fce315d91 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -229,10 +229,10 @@ RSpec.describe Groups::DestroyService do
# will still be executed for the nested group as they fall under the same hierarchy
# and hence we need to account for this scenario.
expect(UserProjectAccessChangedService)
- .to receive(:new).with(shared_with_group.user_ids_for_project_authorizations).and_call_original
+ .to receive(:new).with(shared_with_group.users_ids_of_direct_members).and_call_original
expect(UserProjectAccessChangedService)
- .not_to receive(:new).with(shared_group.user_ids_for_project_authorizations)
+ .not_to receive(:new).with(shared_group.users_ids_of_direct_members)
destroy_group(shared_group, user, false)
end
@@ -246,7 +246,7 @@ RSpec.describe Groups::DestroyService do
it 'makes use of a specific service to update project authorizations' do
expect(UserProjectAccessChangedService)
- .to receive(:new).with(shared_with_group.user_ids_for_project_authorizations).and_call_original
+ .to receive(:new).with(shared_with_group.users_ids_of_direct_members).and_call_original
destroy_group(shared_with_group, user, false)
end
diff --git a/spec/services/groups/group_links/create_service_spec.rb b/spec/services/groups/group_links/create_service_spec.rb
index fb88433d8f6..df994b9f2a3 100644
--- a/spec/services/groups/group_links/create_service_spec.rb
+++ b/spec/services/groups/group_links/create_service_spec.rb
@@ -74,46 +74,56 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
end
end
- context 'group hierarchies' do
+ context 'project authorizations based on group hierarchies' do
before do
group_parent.add_owner(parent_group_user)
group.add_owner(group_user)
group_child.add_owner(child_group_user)
end
- context 'group user' do
- let(:user) { group_user }
+ context 'project authorizations refresh' do
+ it 'is executed only for the direct members of the group' do
+ expect(UserProjectAccessChangedService).to receive(:new).with(contain_exactly(group_user.id)).and_call_original
- it 'create proper authorizations' do
subject.execute(shared_group)
-
- expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
- expect(Ability.allowed?(user, :read_project, project)).to be_truthy
- expect(Ability.allowed?(user, :read_project, project_child)).to be_truthy
end
end
- context 'parent group user' do
- let(:user) { parent_group_user }
+ context 'project authorizations' do
+ context 'group user' do
+ let(:user) { group_user }
- it 'create proper authorizations' do
- subject.execute(shared_group)
+ it 'create proper authorizations' do
+ subject.execute(shared_group)
- expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
- expect(Ability.allowed?(user, :read_project, project)).to be_falsey
- expect(Ability.allowed?(user, :read_project, project_child)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project)).to be_truthy
+ expect(Ability.allowed?(user, :read_project, project_child)).to be_truthy
+ end
end
- end
- context 'child group user' do
- let(:user) { child_group_user }
+ context 'parent group user' do
+ let(:user) { parent_group_user }
- it 'create proper authorizations' do
- subject.execute(shared_group)
+ it 'create proper authorizations' do
+ subject.execute(shared_group)
+
+ expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project_child)).to be_falsey
+ end
+ end
+
+ context 'child group user' do
+ let(:user) { child_group_user }
+
+ it 'create proper authorizations' do
+ subject.execute(shared_group)
- expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
- expect(Ability.allowed?(user, :read_project, project)).to be_falsey
- expect(Ability.allowed?(user, :read_project, project_child)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project_parent)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project)).to be_falsey
+ expect(Ability.allowed?(user, :read_project, project_child)).to be_falsey
+ end
end
end
end
diff --git a/spec/services/groups/group_links/destroy_service_spec.rb b/spec/services/groups/group_links/destroy_service_spec.rb
index 22fe8a1d58b..97fe23e9147 100644
--- a/spec/services/groups/group_links/destroy_service_spec.rb
+++ b/spec/services/groups/group_links/destroy_service_spec.rb
@@ -47,8 +47,8 @@ RSpec.describe Groups::GroupLinks::DestroyService, '#execute' do
it 'updates project authorization once per group' do
expect(GroupGroupLink).to receive(:delete).and_call_original
- expect(group).to receive(:refresh_members_authorized_projects).once
- expect(another_group).to receive(:refresh_members_authorized_projects).once
+ expect(group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true).once
+ expect(another_group).to receive(:refresh_members_authorized_projects).with(direct_members_only: true).once
subject.execute(links)
end
diff --git a/spec/services/groups/group_links/update_service_spec.rb b/spec/services/groups/group_links/update_service_spec.rb
index e4ff83d7926..436cdf89a0f 100644
--- a/spec/services/groups/group_links/update_service_spec.rb
+++ b/spec/services/groups/group_links/update_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Groups::GroupLinks::UpdateService, '#execute' do
let_it_be(:group) { create(:group, :private) }
let_it_be(:shared_group) { create(:group, :private) }
let_it_be(:project) { create(:project, group: shared_group) }
- let(:group_member) { create(:user) }
+ let(:group_member_user) { create(:user) }
let!(:link) { create(:group_group_link, shared_group: shared_group, shared_with_group: group) }
let(:expiry_date) { 1.month.from_now.to_date }
@@ -20,7 +20,7 @@ RSpec.describe Groups::GroupLinks::UpdateService, '#execute' do
subject { described_class.new(link).execute(group_link_params) }
before do
- group.add_developer(group_member)
+ group.add_developer(group_member_user)
end
it 'updates existing link' do
@@ -36,11 +36,11 @@ RSpec.describe Groups::GroupLinks::UpdateService, '#execute' do
end
it 'updates project permissions' do
- expect { subject }.to change { group_member.can?(:create_release, project) }.from(true).to(false)
+ expect { subject }.to change { group_member_user.can?(:create_release, project) }.from(true).to(false)
end
it 'executes UserProjectAccessChangedService' do
- expect_next_instance_of(UserProjectAccessChangedService) do |service|
+ expect_next_instance_of(UserProjectAccessChangedService, [group_member_user.id]) do |service|
expect(service).to receive(:execute)
end
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index 0c7765dcd38..ad5c4364deb 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Groups::ImportExport::ImportService do
end
context 'with group_import_ndjson feature flag disabled' do
- let(:user) { create(:admin) }
+ let(:user) { create(:user) }
let(:group) { create(:group) }
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
@@ -63,6 +63,8 @@ RSpec.describe Groups::ImportExport::ImportService do
before do
stub_feature_flags(group_import_ndjson: false)
+ group.add_owner(user)
+
ImportExportUpload.create!(group: group, import_file: import_file)
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
@@ -95,7 +97,7 @@ RSpec.describe Groups::ImportExport::ImportService do
end
context 'when importing a ndjson export' do
- let(:user) { create(:admin) }
+ let(:user) { create(:user) }
let(:group) { create(:group) }
let(:service) { described_class.new(group: group, user: user) }
let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
@@ -115,6 +117,10 @@ RSpec.describe Groups::ImportExport::ImportService do
end
context 'when user has correct permissions' do
+ before do
+ group.add_owner(user)
+ end
+
it 'imports group structure successfully' do
expect(subject).to be_truthy
end
@@ -147,8 +153,6 @@ RSpec.describe Groups::ImportExport::ImportService do
end
context 'when user does not have correct permissions' do
- let(:user) { create(:user) }
-
it 'logs the error and raises an exception' do
expect(import_logger).to receive(:error).with(
group_id: group.id,
@@ -188,6 +192,10 @@ RSpec.describe Groups::ImportExport::ImportService do
context 'when there are errors with the sub-relations' do
let(:import_file) { fixture_file_upload('spec/fixtures/group_export_invalid_subrelations.tar.gz') }
+ before do
+ group.add_owner(user)
+ end
+
it 'successfully imports the group' do
expect(subject).to be_truthy
end
@@ -207,7 +215,7 @@ RSpec.describe Groups::ImportExport::ImportService do
end
context 'when importing a json export' do
- let(:user) { create(:admin) }
+ let(:user) { create(:user) }
let(:group) { create(:group) }
let(:service) { described_class.new(group: group, user: user) }
let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
@@ -227,6 +235,10 @@ RSpec.describe Groups::ImportExport::ImportService do
end
context 'when user has correct permissions' do
+ before do
+ group.add_owner(user)
+ end
+
it 'imports group structure successfully' do
expect(subject).to be_truthy
end
@@ -259,8 +271,6 @@ RSpec.describe Groups::ImportExport::ImportService do
end
context 'when user does not have correct permissions' do
- let(:user) { create(:user) }
-
it 'logs the error and raises an exception' do
expect(import_logger).to receive(:error).with(
group_id: group.id,
@@ -300,6 +310,10 @@ RSpec.describe Groups::ImportExport::ImportService do
context 'when there are errors with the sub-relations' do
let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export_invalid_subrelations.tar.gz') }
+ before do
+ group.add_owner(user)
+ end
+
it 'successfully imports the group' do
expect(subject).to be_truthy
end
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 408d7767254..776df01d399 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -54,6 +54,62 @@ RSpec.describe Import::GithubService do
expect { subject.execute(access_params, :github) }.to raise_error(exception)
end
+
+ context 'repository size validation' do
+ let(:repository_double) { double(name: 'repository', size: 99) }
+
+ before do
+ expect(client).to receive(:repository).and_return(repository_double)
+
+ allow_next_instance_of(Gitlab::LegacyGithubImport::ProjectCreator) do |creator|
+ allow(creator).to receive(:execute).and_return(double(persisted?: true))
+ end
+ end
+
+ context 'when there is no repository size limit defined' do
+ it 'skips the check and succeeds' do
+ expect(subject.execute(access_params, :github)).to include(status: :success)
+ end
+ end
+
+ context 'when the target namespace repository size limit is defined' do
+ let_it_be(:group) { create(:group, repository_size_limit: 100) }
+
+ before do
+ params[:target_namespace] = group.full_path
+ end
+
+ it 'succeeds when the repository is smaller than the limit' do
+ expect(subject.execute(access_params, :github)).to include(status: :success)
+ end
+
+ it 'returns error when the repository is larger than the limit' do
+ allow(repository_double).to receive(:size).and_return(101)
+
+ expect(subject.execute(access_params, :github)).to include(size_limit_error)
+ end
+ end
+
+ context 'when target namespace repository limit is not defined' do
+ let_it_be(:group) { create(:group) }
+
+ before do
+ stub_application_setting(repository_size_limit: 100)
+ end
+
+ context 'when application size limit is defined' do
+ it 'succeeds when the repository is smaller than the limit' do
+ expect(subject.execute(access_params, :github)).to include(status: :success)
+ end
+
+ it 'returns error when the repository is larger than the limit' do
+ allow(repository_double).to receive(:size).and_return(101)
+
+ expect(subject.execute(access_params, :github)).to include(size_limit_error)
+ end
+ end
+ end
+ end
end
context 'when remove_legacy_github_client feature flag is enabled' do
@@ -71,4 +127,12 @@ RSpec.describe Import::GithubService do
include_examples 'handles errors', Gitlab::LegacyGithubImport::Client
end
+
+ def size_limit_error
+ {
+ status: :error,
+ http_status: :unprocessable_entity,
+ message: '"repository" size (101 Bytes) is larger than the limit of 100 Bytes.'
+ }
+ end
end
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index 79543fe9f5d..c749f282cd3 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -31,23 +31,6 @@ RSpec.describe Issuable::BulkUpdateService do
end
end
- shared_examples 'updates iterations' do
- it 'succeeds' do
- result = bulk_update(issuables, sprint_id: iteration.id)
-
- expect(result.success?).to be_truthy
- expect(result.payload[:count]).to eq(issuables.count)
- end
-
- it 'updates the issuables iteration' do
- bulk_update(issuables, sprint_id: iteration.id)
-
- issuables.each do |issuable|
- expect(issuable.reload.iteration).to eq(iteration)
- end
- end
- end
-
shared_examples 'updating labels' do
def create_issue_with_labels(labels)
create(:labeled_issue, project: project, labels: labels)
@@ -250,21 +233,6 @@ RSpec.describe Issuable::BulkUpdateService do
it_behaves_like 'updates milestones'
end
- describe 'updating iterations' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
- let_it_be(:issuables) { [create(:issue, project: project)] }
- let_it_be(:iteration) { create(:iteration, group: group) }
-
- let(:parent) { project }
-
- before do
- group.add_reporter(user)
- end
-
- it_behaves_like 'updates iterations'
- end
-
describe 'updating labels' do
let(:bug) { create(:label, project: project) }
let(:regression) { create(:label, project: project) }
@@ -347,19 +315,6 @@ RSpec.describe Issuable::BulkUpdateService do
end
end
- describe 'updating iterations' do
- let_it_be(:iteration) { create(:iteration, group: group) }
- let_it_be(:project) { create(:project, :repository, group: group) }
-
- context 'when issues' do
- let_it_be(:issue1) { create(:issue, project: project) }
- let_it_be(:issue2) { create(:issue, project: project) }
- let_it_be(:issuables) { [issue1, issue2] }
-
- it_behaves_like 'updates iterations'
- end
- end
-
describe 'updating labels' do
let(:project) { create(:project, :repository, group: group) }
let(:bug) { create(:group_label, group: group) }
diff --git a/spec/services/issuable/process_assignees_spec.rb b/spec/services/issuable/process_assignees_spec.rb
new file mode 100644
index 00000000000..876c84957cc
--- /dev/null
+++ b/spec/services/issuable/process_assignees_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issuable::ProcessAssignees do
+ describe '#execute' do
+ it 'returns assignee_ids when assignee_ids are specified' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: %w(1 3 9),
+ extra_assignee_ids: %w(2 5 12))
+ result = process.execute
+
+ expect(result.sort).to eq(%w(5 7 9).sort)
+ end
+
+ it 'combines other ids when assignee_ids is empty' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: [],
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12))
+ result = process.execute
+
+ expect(result.sort).to eq(%w(1 2 3 5 6 12).sort)
+ end
+
+ it 'combines other ids when assignee_ids is nil' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: nil,
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12))
+ result = process.execute
+
+ expect(result.sort).to eq(%w(1 2 3 5 6 12).sort)
+ end
+
+ it 'combines other ids when assignee_ids and add_assignee_ids are nil' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: nil,
+ add_assignee_ids: nil,
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12))
+ result = process.execute
+
+ expect(result.sort).to eq(%w(1 2 3 5 12).sort)
+ end
+
+ it 'combines other ids when assignee_ids and remove_assignee_ids are nil' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: nil,
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: nil,
+ existing_assignee_ids: %w(1 3 11),
+ extra_assignee_ids: %w(2 5 12))
+ result = process.execute
+
+ expect(result.sort).to eq(%w(1 2 4 3 5 6 11 12).sort)
+ end
+
+ it 'combines ids when only add_assignee_ids and remove_assignee_ids are passed' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: nil,
+ add_assignee_ids: %w(2 4 6),
+ remove_assignee_ids: %w(4 7 11))
+ result = process.execute
+
+ expect(result.sort).to eq(%w(2 6).sort)
+ end
+ end
+end
diff --git a/spec/services/issues/clone_service_spec.rb b/spec/services/issues/clone_service_spec.rb
index 512a60b1382..9ceb4ffeec5 100644
--- a/spec/services/issues/clone_service_spec.rb
+++ b/spec/services/issues/clone_service_spec.rb
@@ -280,6 +280,12 @@ RSpec.describe Issues::CloneService do
expect(new_issue.designs.first.notes.size).to eq(1)
end
end
+
+ context 'issue relative position' do
+ let(:subject) { clone_service.execute(old_issue, new_project) }
+
+ it_behaves_like 'copy or reset relative position'
+ end
end
describe 'clone permissions' do
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index e42e9722297..d548e5ee74a 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -286,6 +286,12 @@ RSpec.describe Issues::CreateService do
issue
end
+
+ it 'schedules a namespace onboarding create action worker' do
+ expect(Namespaces::OnboardingIssueCreatedWorker).to receive(:perform_async).with(project.namespace.id)
+
+ issue
+ end
end
context 'issue create service' do
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 9b8d21bb8eb..eb124f07900 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -244,6 +244,12 @@ RSpec.describe Issues::MoveService do
expect(new_issue.designs.first.notes.size).to eq(1)
end
end
+
+ context 'issue relative position' do
+ let(:subject) { move_service.execute(old_issue, new_project) }
+
+ it_behaves_like 'copy or reset relative position'
+ end
end
describe 'move permissions' do
diff --git a/spec/services/labels/promote_service_spec.rb b/spec/services/labels/promote_service_spec.rb
index 15d53857f33..81c24b26c9f 100644
--- a/spec/services/labels/promote_service_spec.rb
+++ b/spec/services/labels/promote_service_spec.rb
@@ -4,9 +4,9 @@ require 'spec_helper'
RSpec.describe Labels::PromoteService do
describe '#execute' do
- let!(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
- context 'project without group' do
+ context 'without a group' do
let!(:project_1) { create(:project) }
let!(:project_label_1_1) { create(:label, project: project_1) }
@@ -18,40 +18,40 @@ RSpec.describe Labels::PromoteService do
end
end
- context 'project with group' do
- let!(:promoted_label_name) { "Promoted Label" }
- let!(:untouched_label_name) { "Untouched Label" }
- let!(:promoted_description) { "Promoted Description" }
- let!(:promoted_color) { "#0000FF" }
- let!(:label_2_1_priority) { 1 }
- let!(:label_3_1_priority) { 2 }
+ context 'with a group' do
+ let_it_be(:promoted_label_name) { "Promoted Label" }
+ let_it_be(:untouched_label_name) { "Untouched Label" }
+ let_it_be(:promoted_description) { "Promoted Description" }
+ let_it_be(:promoted_color) { "#0000FF" }
+ let_it_be(:label_2_1_priority) { 1 }
+ let_it_be(:label_3_1_priority) { 2 }
- let!(:group_1) { create(:group) }
- let!(:group_2) { create(:group) }
+ let_it_be(:group_1) { create(:group) }
+ let_it_be(:group_2) { create(:group) }
- let!(:project_1) { create(:project, namespace: group_1) }
- let!(:project_2) { create(:project, namespace: group_1) }
- let!(:project_3) { create(:project, namespace: group_1) }
- let!(:project_4) { create(:project, namespace: group_2) }
+ let_it_be(:project_1) { create(:project, :repository, namespace: group_1) }
+ let_it_be(:project_2) { create(:project, :repository, namespace: group_1) }
+ let_it_be(:project_3) { create(:project, :repository, namespace: group_1) }
+ let_it_be(:project_4) { create(:project, :repository, namespace: group_2) }
# Labels/issues can't be lazily created so we might as well eager initialize
# all other objects too since we use them inside
- let!(:project_label_1_1) { create(:label, project: project_1, name: promoted_label_name, color: promoted_color, description: promoted_description) }
- let!(:project_label_1_2) { create(:label, project: project_1, name: untouched_label_name) }
- let!(:project_label_2_1) { create(:label, project: project_2, priority: label_2_1_priority, name: promoted_label_name, color: "#FF0000") }
- let!(:project_label_3_1) { create(:label, project: project_3, priority: label_3_1_priority, name: promoted_label_name) }
- let!(:project_label_3_2) { create(:label, project: project_3, priority: 1, name: untouched_label_name) }
- let!(:project_label_4_1) { create(:label, project: project_4, name: promoted_label_name) }
+ let_it_be(:project_label_1_1) { create(:label, project: project_1, name: promoted_label_name, color: promoted_color, description: promoted_description) }
+ let_it_be(:project_label_1_2) { create(:label, project: project_1, name: untouched_label_name) }
+ let_it_be(:project_label_2_1) { create(:label, project: project_2, priority: label_2_1_priority, name: promoted_label_name, color: "#FF0000") }
+ let_it_be(:project_label_3_1) { create(:label, project: project_3, priority: label_3_1_priority, name: promoted_label_name) }
+ let_it_be(:project_label_3_2) { create(:label, project: project_3, priority: 1, name: untouched_label_name) }
+ let_it_be(:project_label_4_1) { create(:label, project: project_4, name: promoted_label_name) }
- let!(:issue_1_1) { create(:labeled_issue, project: project_1, labels: [project_label_1_1, project_label_1_2]) }
- let!(:issue_1_2) { create(:labeled_issue, project: project_1, labels: [project_label_1_2]) }
- let!(:issue_2_1) { create(:labeled_issue, project: project_2, labels: [project_label_2_1]) }
- let!(:issue_4_1) { create(:labeled_issue, project: project_4, labels: [project_label_4_1]) }
+ let_it_be(:issue_1_1) { create(:labeled_issue, project: project_1, labels: [project_label_1_1, project_label_1_2]) }
+ let_it_be(:issue_1_2) { create(:labeled_issue, project: project_1, labels: [project_label_1_2]) }
+ let_it_be(:issue_2_1) { create(:labeled_issue, project: project_2, labels: [project_label_2_1]) }
+ let_it_be(:issue_4_1) { create(:labeled_issue, project: project_4, labels: [project_label_4_1]) }
- let!(:merge_3_1) { create(:labeled_merge_request, source_project: project_3, target_project: project_3, labels: [project_label_3_1, project_label_3_2]) }
+ let_it_be(:merge_3_1) { create(:labeled_merge_request, source_project: project_3, target_project: project_3, labels: [project_label_3_1, project_label_3_2]) }
- let!(:issue_board_2_1) { create(:board, project: project_2) }
- let!(:issue_board_list_2_1) { create(:list, board: issue_board_2_1, label: project_label_2_1) }
+ let_it_be(:issue_board_2_1) { create(:board, project: project_2) }
+ let_it_be(:issue_board_list_2_1) { create(:list, board: issue_board_2_1, label: project_label_2_1) }
let(:new_label) { group_1.labels.find_by(title: promoted_label_name) }
@@ -82,8 +82,8 @@ RSpec.describe Labels::PromoteService do
expect { service.execute(project_label_1_1) }.to change { Subscription.count }.from(4).to(3)
- expect(new_label.subscribed?(user)).to be_truthy
- expect(new_label.subscribed?(user2)).to be_truthy
+ expect(new_label).to be_subscribed(user)
+ expect(new_label).to be_subscribed(user2)
end
it 'recreates priorities' do
@@ -165,12 +165,12 @@ RSpec.describe Labels::PromoteService do
service.execute(project_label_1_1)
Label.reflect_on_all_associations.each do |association|
- expect(project_label_1_1.send(association.name).any?).to be_falsey
+ expect(project_label_1_1.send(association.name).reset).not_to be_any
end
end
end
- context 'if there is an existing identical group label' do
+ context 'when there is an existing identical group label' do
let!(:existing_group_label) { create(:group_label, group: group_1, title: project_label_1_1.title ) }
it 'uses the existing group label' do
@@ -187,7 +187,7 @@ RSpec.describe Labels::PromoteService do
it_behaves_like 'promoting a project label to a group label'
end
- context 'if there is no existing identical group label' do
+ context 'when there is no existing identical group label' do
let(:existing_group_label) { nil }
it 'recreates the label as a group label' do
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index 08cdf0d3ae1..cced93896a5 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -2,76 +2,155 @@
require 'spec_helper'
-RSpec.describe Members::InviteService do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
- let(:project_user) { create(:user) }
-
- before do
- project.add_maintainer(user)
+RSpec.describe Members::InviteService, :aggregate_failures do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.owner }
+ let_it_be(:project_user) { create(:user) }
+ let(:params) { {} }
+ let(:base_params) { { access_level: Gitlab::Access::GUEST } }
+
+ subject(:result) { described_class.new(user, base_params.merge(params)).execute(project) }
+
+ context 'when email is previously unused by current members' do
+ let(:params) { { email: 'email@example.org' } }
+
+ it 'successfully creates a member' do
+ expect { result }.to change(ProjectMember, :count).by(1)
+ expect(result[:status]).to eq(:success)
+ end
end
- it 'adds an existing user to members' do
- params = { email: project_user.email.to_s, access_level: Gitlab::Access::GUEST }
- result = described_class.new(user, params).execute(project)
+ context 'when emails are passed as an array' do
+ let(:params) { { email: %w[email@example.org email2@example.org] } }
- expect(result[:status]).to eq(:success)
- expect(project.users).to include project_user
+ it 'successfully creates members' do
+ expect { result }.to change(ProjectMember, :count).by(2)
+ expect(result[:status]).to eq(:success)
+ end
end
- it 'creates a new user for an unknown email address' do
- params = { email: 'email@example.org', access_level: Gitlab::Access::GUEST }
- result = described_class.new(user, params).execute(project)
+ context 'when emails are passed as an empty string' do
+ let(:params) { { email: '' } }
- expect(result[:status]).to eq(:success)
+ it 'returns an error' do
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Email cannot be blank')
+ end
end
- it 'limits the number of emails to 100' do
- emails = Array.new(101).map { |n| "email#{n}@example.com" }
- params = { email: emails, access_level: Gitlab::Access::GUEST }
+ context 'when email param is not included' do
+ it 'returns an error' do
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Email cannot be blank')
+ end
+ end
- result = described_class.new(user, params).execute(project)
+ context 'when email is not a valid email' do
+ let(:params) { { email: '_bogus_' } }
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Too many users specified (limit is 100)')
+ it 'returns an error' do
+ expect { result }.not_to change(ProjectMember, :count)
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]['_bogus_']).to eq("Invite email is invalid")
+ end
end
- it 'does not invite an invalid email' do
- params = { email: project_user.id.to_s, access_level: Gitlab::Access::GUEST }
- result = described_class.new(user, params).execute(project)
+ context 'when duplicate email addresses are passed' do
+ let(:params) { { email: 'email@example.org,email@example.org' } }
+
+ it 'only creates one member per unique address' do
+ expect { result }.to change(ProjectMember, :count).by(1)
+ expect(result[:status]).to eq(:success)
+ end
+ end
- expect(result[:status]).to eq(:error)
- expect(result[:message][project_user.id.to_s]).to eq("Invite email is invalid")
- expect(project.users).not_to include project_user
+ context 'when observing email limits' do
+ let_it_be(:emails) { Array(1..101).map { |n| "email#{n}@example.com" } }
+
+ context 'when over the allowed default limit of emails' do
+ let(:params) { { email: emails } }
+
+ it 'limits the number of emails to 100' do
+ expect { result }.not_to change(ProjectMember, :count)
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Too many users specified (limit is 100)')
+ end
+ end
+
+ context 'when over the allowed custom limit of emails' do
+ let(:params) { { email: 'email@example.org,email2@example.org', limit: 1 } }
+
+ it 'limits the number of emails to the limit supplied' do
+ expect { result }.not_to change(ProjectMember, :count)
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Too many users specified (limit is 1)')
+ end
+ end
+
+ context 'when limit allowed is disabled via limit param' do
+ let(:params) { { email: emails, limit: -1 } }
+
+ it 'does not limit number of emails' do
+ expect { result }.to change(ProjectMember, :count).by(101)
+ expect(result[:status]).to eq(:success)
+ end
+ end
end
- it 'does not invite to an invalid access level' do
- params = { email: project_user.email, access_level: -1 }
- result = described_class.new(user, params).execute(project)
+ context 'when email belongs to an existing user' do
+ let(:params) { { email: project_user.email } }
- expect(result[:status]).to eq(:error)
- expect(result[:message][project_user.email]).to eq("Access level is not included in the list")
+ it 'adds an existing user to members' do
+ expect { result }.to change(ProjectMember, :count).by(1)
+ expect(result[:status]).to eq(:success)
+ expect(project.users).to include project_user
+ end
end
- it 'does not add a member with an existing invite' do
- invited_member = create(:project_member, :invited, project: project)
+ context 'when access level is not valid' do
+ let(:params) { { email: project_user.email, access_level: -1 } }
- params = { email: invited_member.invite_email,
- access_level: Gitlab::Access::GUEST }
- result = described_class.new(user, params).execute(project)
+ it 'returns an error' do
+ expect { result }.not_to change(ProjectMember, :count)
+ expect(result[:status]).to eq(:error)
+ expect(result[:message][project_user.email]).to eq("Access level is not included in the list")
+ end
+ end
+
+ context 'when invite already exists for an included email' do
+ let!(:invited_member) { create(:project_member, :invited, project: project) }
+ let(:params) { { email: "#{invited_member.invite_email},#{project_user.email}" } }
- expect(result[:status]).to eq(:error)
- expect(result[:message][invited_member.invite_email]).to eq("Member already invited to #{project.name}")
+ it 'adds new email and returns an error for the already invited email' do
+ expect { result }.to change(ProjectMember, :count).by(1)
+ expect(result[:status]).to eq(:error)
+ expect(result[:message][invited_member.invite_email]).to eq("Member already invited to #{project.name}")
+ expect(project.users).to include project_user
+ end
end
- it 'does not add a member with an access_request' do
- requested_member = create(:project_member, :access_request, project: project)
+ context 'when access request already exists for an included email' do
+ let!(:requested_member) { create(:project_member, :access_request, project: project) }
+ let(:params) { { email: "#{requested_member.user.email},#{project_user.email}" } }
+
+ it 'adds new email and returns an error for the already invited email' do
+ expect { result }.to change(ProjectMember, :count).by(1)
+ expect(result[:status]).to eq(:error)
+ expect(result[:message][requested_member.user.email])
+ .to eq("Member cannot be invited because they already requested to join #{project.name}")
+ expect(project.users).to include project_user
+ end
+ end
- params = { email: requested_member.user.email,
- access_level: Gitlab::Access::GUEST }
- result = described_class.new(user, params).execute(project)
+ context 'when email is already a member on the project' do
+ let!(:existing_member) { create(:project_member, :guest, project: project) }
+ let(:params) { { email: "#{existing_member.user.email},#{project_user.email}" } }
- expect(result[:status]).to eq(:error)
- expect(result[:message][requested_member.user.email]).to eq("Member cannot be invited because they already requested to join #{project.name}")
+ it 'adds new email and returns an error for the already invited email' do
+ expect { result }.to change(ProjectMember, :count).by(1)
+ expect(result[:status]).to eq(:error)
+ expect(result[:message][existing_member.user.email]).to eq("Already a member of #{project.name}")
+ expect(project.users).to include project_user
+ end
end
end
diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb
index f21feb70bc5..dce351d8a31 100644
--- a/spec/services/merge_requests/after_create_service_spec.rb
+++ b/spec/services/merge_requests/after_create_service_spec.rb
@@ -32,6 +32,10 @@ RSpec.describe MergeRequests::AfterCreateService do
.to receive(:track_create_mr_action)
.with(user: merge_request.author)
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_mr_including_ci_config)
+ .with(user: merge_request.author, merge_request: merge_request)
+
execute_service
end
@@ -67,5 +71,27 @@ RSpec.describe MergeRequests::AfterCreateService do
it_behaves_like 'records an onboarding progress action', :merge_request_created do
let(:namespace) { merge_request.target_project.namespace }
end
+
+ context 'when merge request is in unchecked state' do
+ before do
+ merge_request.mark_as_unchecked!
+ execute_service
+ end
+
+ it 'does not change its state' do
+ expect(merge_request.reload).to be_unchecked
+ end
+ end
+
+ context 'when merge request is in preparing state' do
+ before do
+ merge_request.mark_as_preparing!
+ execute_service
+ end
+
+ it 'marks the merge request as unchecked' do
+ expect(merge_request.reload).to be_unchecked
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index 22b3456708f..8adf6d69f73 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -19,8 +19,21 @@ RSpec.describe MergeRequests::BuildService do
let(:label_ids) { [] }
let(:merge_request) { service.execute }
let(:compare) { double(:compare, commits: commits) }
- let(:commit_1) { double(:commit_1, sha: 'f00ba7', safe_message: "Initial commit\n\nCreate the app") }
- let(:commit_2) { double(:commit_2, sha: 'f00ba7', safe_message: 'This is a bad commit message!') }
+ let(:commit_1) do
+ double(:commit_1, sha: 'f00ba6', safe_message: 'Initial commit',
+ gitaly_commit?: false, id: 'f00ba6', parent_ids: ['f00ba5'])
+ end
+
+ let(:commit_2) do
+ double(:commit_2, sha: 'f00ba7', safe_message: "Closes #1234 Second commit\n\nCreate the app",
+ gitaly_commit?: false, id: 'f00ba7', parent_ids: ['f00ba6'])
+ end
+
+ let(:commit_3) do
+ double(:commit_3, sha: 'f00ba8', safe_message: 'This is a bad commit message!',
+ gitaly_commit?: false, id: 'f00ba8', parent_ids: ['f00ba7'])
+ end
+
let(:commits) { nil }
let(:params) do
@@ -47,6 +60,7 @@ RSpec.describe MergeRequests::BuildService do
allow(CompareService).to receive_message_chain(:new, :execute).and_return(compare)
allow(project).to receive(:commit).and_return(commit_1)
allow(project).to receive(:commit).and_return(commit_2)
+ allow(project).to receive(:commit).and_return(commit_3)
end
shared_examples 'allows the merge request to be created' do
@@ -137,7 +151,7 @@ RSpec.describe MergeRequests::BuildService do
context 'when target branch is missing' do
let(:target_branch) { nil }
- let(:commits) { Commit.decorate([commit_1], project) }
+ let(:commits) { Commit.decorate([commit_2], project) }
before do
stub_compare
@@ -199,8 +213,8 @@ RSpec.describe MergeRequests::BuildService do
end
context 'one commit in the diff' do
- let(:commits) { Commit.decorate([commit_1], project) }
- let(:commit_description) { commit_1.safe_message.split(/\n+/, 2).last }
+ let(:commits) { Commit.decorate([commit_2], project) }
+ let(:commit_description) { commit_2.safe_message.split(/\n+/, 2).last }
before do
stub_compare
@@ -209,7 +223,7 @@ RSpec.describe MergeRequests::BuildService do
it_behaves_like 'allows the merge request to be created'
it 'uses the title of the commit as the title of the merge request' do
- expect(merge_request.title).to eq(commit_1.safe_message.split("\n").first)
+ expect(merge_request.title).to eq(commit_2.safe_message.split("\n").first)
end
it 'uses the description of the commit as the description of the merge request' do
@@ -225,10 +239,10 @@ RSpec.describe MergeRequests::BuildService do
end
context 'commit has no description' do
- let(:commits) { Commit.decorate([commit_2], project) }
+ let(:commits) { Commit.decorate([commit_3], project) }
it 'uses the title of the commit as the title of the merge request' do
- expect(merge_request.title).to eq(commit_2.safe_message)
+ expect(merge_request.title).to eq(commit_3.safe_message)
end
it 'sets the description to nil' do
@@ -257,7 +271,7 @@ RSpec.describe MergeRequests::BuildService do
end
it 'uses the title of the commit as the title of the merge request' do
- expect(merge_request.title).to eq('Initial commit')
+ expect(merge_request.title).to eq('Closes #1234 Second commit')
end
it 'appends the closing description' do
@@ -310,8 +324,8 @@ RSpec.describe MergeRequests::BuildService do
end
end
- context 'more than one commit in the diff' do
- let(:commits) { Commit.decorate([commit_1, commit_2], project) }
+ context 'no multi-line commit messages in the diff' do
+ let(:commits) { Commit.decorate([commit_1, commit_3], project) }
before do
stub_compare
@@ -365,6 +379,55 @@ RSpec.describe MergeRequests::BuildService do
end
end
end
+ end
+
+ context 'a multi-line commit message in the diff' do
+ let(:commits) { Commit.decorate([commit_1, commit_2, commit_3], project) }
+
+ before do
+ stub_compare
+ end
+
+ it_behaves_like 'allows the merge request to be created'
+
+ it 'uses the first line of the first multi-line commit message as the title' do
+ expect(merge_request.title).to eq('Closes #1234 Second commit')
+ end
+
+ it 'adds the remaining lines of the first multi-line commit message as the description' do
+ expect(merge_request.description).to eq('Create the app')
+ end
+
+ context 'when the source branch matches an issue' do
+ where(:issue_tracker, :source_branch, :title, :closing_message) do
+ :jira | 'FOO-123-fix-issue' | 'Resolve FOO-123 "Fix issue"' | 'Closes FOO-123'
+ :jira | 'fix-issue' | 'Fix issue' | nil
+ :custom_issue_tracker | '123-fix-issue' | 'Resolve #123 "Fix issue"' | 'Closes #123'
+ :custom_issue_tracker | 'fix-issue' | 'Fix issue' | nil
+ :internal | '123-fix-issue' | 'Resolve "A bug"' | 'Closes #123'
+ :internal | 'fix-issue' | 'Fix issue' | nil
+ :internal | '124-fix-issue' | '124 fix issue' | nil
+ end
+
+ with_them do
+ before do
+ if issue_tracker == :internal
+ issue.update!(iid: 123)
+ else
+ create(:"#{issue_tracker}_service", project: project)
+ project.reload
+ end
+ end
+
+ it 'sets the correct title' do
+ expect(merge_request.title).to eq('Closes #1234 Second commit')
+ end
+
+ it 'sets the closing description' do
+ expect(merge_request.description).to eq("Create the app#{closing_message ? "\n\n" + closing_message : ''}")
+ end
+ end
+ end
context 'when the issue is not accessible to user' do
let(:source_branch) { "#{issue.iid}-fix-issue" }
@@ -373,12 +436,12 @@ RSpec.describe MergeRequests::BuildService do
project.team.truncate
end
- it 'uses branch title as the merge request title' do
- expect(merge_request.title).to eq("#{issue.iid} fix issue")
+ it 'uses the first line of the first multi-line commit message as the title' do
+ expect(merge_request.title).to eq('Closes #1234 Second commit')
end
- it 'does not set a description' do
- expect(merge_request.description).to be_nil
+ it 'adds the remaining lines of the first multi-line commit message as the description' do
+ expect(merge_request.description).to eq('Create the app')
end
end
@@ -386,12 +449,12 @@ RSpec.describe MergeRequests::BuildService do
let(:source_branch) { "#{issue.iid}-fix-issue" }
let(:issue_confidential) { true }
- it 'uses the title of the branch as the merge request title' do
- expect(merge_request.title).to eq("#{issue.iid} fix issue")
+ it 'uses the first line of the first multi-line commit message as the title' do
+ expect(merge_request.title).to eq('Closes #1234 Second commit')
end
- it 'does not set a description' do
- expect(merge_request.description).to be_nil
+ it 'adds the remaining lines of the first multi-line commit message as the description' do
+ expect(merge_request.description).to eq('Create the app')
end
end
end
@@ -399,7 +462,7 @@ RSpec.describe MergeRequests::BuildService do
context 'source branch does not exist' do
before do
allow(project).to receive(:commit).with(source_branch).and_return(nil)
- allow(project).to receive(:commit).with(target_branch).and_return(commit_1)
+ allow(project).to receive(:commit).with(target_branch).and_return(commit_2)
end
it_behaves_like 'forbids the merge request from being created' do
@@ -409,7 +472,7 @@ RSpec.describe MergeRequests::BuildService do
context 'target branch does not exist' do
before do
- allow(project).to receive(:commit).with(source_branch).and_return(commit_1)
+ allow(project).to receive(:commit).with(source_branch).and_return(commit_2)
allow(project).to receive(:commit).with(target_branch).and_return(nil)
end
@@ -433,7 +496,7 @@ RSpec.describe MergeRequests::BuildService do
context 'upstream project has disabled merge requests' do
let(:upstream_project) { create(:project, :merge_requests_disabled) }
let(:project) { create(:project, forked_from_project: upstream_project) }
- let(:commits) { Commit.decorate([commit_1], project) }
+ let(:commits) { Commit.decorate([commit_2], project) }
it 'sets target project correctly' do
expect(merge_request.target_project).to eq(project)
@@ -441,8 +504,8 @@ RSpec.describe MergeRequests::BuildService do
end
context 'target_project is set and accessible by current_user' do
- let(:target_project) { create(:project, :public, :repository)}
- let(:commits) { Commit.decorate([commit_1], project) }
+ let(:target_project) { create(:project, :public, :repository) }
+ let(:commits) { Commit.decorate([commit_2], project) }
it 'sets target project correctly' do
expect(merge_request.target_project).to eq(target_project)
@@ -450,8 +513,8 @@ RSpec.describe MergeRequests::BuildService do
end
context 'target_project is set but not accessible by current_user' do
- let(:target_project) { create(:project, :private, :repository)}
- let(:commits) { Commit.decorate([commit_1], project) }
+ let(:target_project) { create(:project, :private, :repository) }
+ let(:commits) { Commit.decorate([commit_2], project) }
it 'sets target project correctly' do
expect(merge_request.target_project).to eq(project)
@@ -469,8 +532,8 @@ RSpec.describe MergeRequests::BuildService do
end
context 'source_project is set and accessible by current_user' do
- let(:source_project) { create(:project, :public, :repository)}
- let(:commits) { Commit.decorate([commit_1], project) }
+ let(:source_project) { create(:project, :public, :repository) }
+ let(:commits) { Commit.decorate([commit_2], project) }
before do
# To create merge requests _from_ a project the user needs at least
@@ -484,8 +547,8 @@ RSpec.describe MergeRequests::BuildService do
end
context 'source_project is set but not accessible by current_user' do
- let(:source_project) { create(:project, :private, :repository)}
- let(:commits) { Commit.decorate([commit_1], project) }
+ let(:source_project) { create(:project, :private, :repository) }
+ let(:commits) { Commit.decorate([commit_2], project) }
it 'sets source project correctly' do
expect(merge_request.source_project).to eq(project)
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 611f12c8146..87e5750ce6e 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -258,9 +258,8 @@ RSpec.describe MergeRequests::MergeService do
end
it 'removes the source branch using the author user' do
- expect(::Branches::DeleteService).to receive(:new)
- .with(merge_request.source_project, merge_request.author)
- .and_call_original
+ expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, merge_request.author.id)
+
service.execute(merge_request)
end
@@ -268,7 +267,8 @@ RSpec.describe MergeRequests::MergeService do
let(:service) { described_class.new(project, user, merge_params.merge('should_remove_source_branch' => false)) }
it 'does not delete the source branch' do
- expect(::Branches::DeleteService).not_to receive(:new)
+ expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
+
service.execute(merge_request)
end
end
@@ -280,9 +280,8 @@ RSpec.describe MergeRequests::MergeService do
end
it 'removes the source branch using the current user' do
- expect(::Branches::DeleteService).to receive(:new)
- .with(merge_request.source_project, user)
- .and_call_original
+ expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, user.id)
+
service.execute(merge_request)
end
end
diff --git a/spec/services/merge_requests/post_merge_service_spec.rb b/spec/services/merge_requests/post_merge_service_spec.rb
index 71329905558..247b053e729 100644
--- a/spec/services/merge_requests/post_merge_service_spec.rb
+++ b/spec/services/merge_requests/post_merge_service_spec.rb
@@ -130,139 +130,5 @@ RSpec.describe MergeRequests::PostMergeService do
expect(deploy_job.reload.canceled?).to be false
end
end
-
- context 'for a merge request chain' do
- before do
- ::MergeRequests::UpdateService
- .new(project, user, force_remove_source_branch: '1')
- .execute(merge_request)
- end
-
- context 'when there is another MR' do
- let!(:another_merge_request) do
- create(:merge_request,
- source_project: source_project,
- source_branch: 'my-awesome-feature',
- target_project: merge_request.source_project,
- target_branch: merge_request.source_branch
- )
- end
-
- shared_examples 'does not retarget merge request' do
- it 'another merge request is unchanged' do
- expect { subject }.not_to change { another_merge_request.reload.target_branch }
- .from(merge_request.source_branch)
- end
- end
-
- shared_examples 'retargets merge request' do
- it 'another merge request is retargeted' do
- expect(SystemNoteService)
- .to receive(:change_branch).once
- .with(another_merge_request, another_merge_request.project, user,
- 'target', 'delete',
- merge_request.source_branch, merge_request.target_branch)
-
- expect { subject }.to change { another_merge_request.reload.target_branch }
- .from(merge_request.source_branch)
- .to(merge_request.target_branch)
- end
-
- context 'when FF retarget_merge_requests is disabled' do
- before do
- stub_feature_flags(retarget_merge_requests: false)
- end
-
- include_examples 'does not retarget merge request'
- end
-
- context 'when source branch is to be kept' do
- before do
- ::MergeRequests::UpdateService
- .new(project, user, force_remove_source_branch: false)
- .execute(merge_request)
- end
-
- include_examples 'does not retarget merge request'
- end
- end
-
- context 'in the same project' do
- let(:source_project) { project }
-
- it_behaves_like 'retargets merge request'
-
- context 'and is closed' do
- before do
- another_merge_request.close
- end
-
- it_behaves_like 'does not retarget merge request'
- end
-
- context 'and is merged' do
- before do
- another_merge_request.mark_as_merged
- end
-
- it_behaves_like 'does not retarget merge request'
- end
- end
-
- context 'in forked project' do
- let!(:source_project) { fork_project(project) }
-
- context 'when user has access to source project' do
- before do
- source_project.add_developer(user)
- end
-
- it_behaves_like 'retargets merge request'
- end
-
- context 'when user does not have access to source project' do
- it_behaves_like 'does not retarget merge request'
- end
- end
-
- context 'and current and another MR is from a fork' do
- let(:project) { create(:project) }
- let(:source_project) { fork_project(project) }
-
- let(:merge_request) do
- create(:merge_request,
- source_project: source_project,
- target_project: project
- )
- end
-
- before do
- source_project.add_developer(user)
- end
-
- it_behaves_like 'does not retarget merge request'
- end
- end
-
- context 'when many merge requests are to be retargeted' do
- let!(:many_merge_requests) do
- create_list(:merge_request, 10, :unique_branches,
- source_project: merge_request.source_project,
- target_project: merge_request.source_project,
- target_branch: merge_request.source_branch
- )
- end
-
- it 'retargets only 4 of them' do
- subject
-
- expect(many_merge_requests.each(&:reload).pluck(:target_branch).tally)
- .to eq(
- merge_request.source_branch => 6,
- merge_request.target_branch => 4
- )
- end
- end
- end
end
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 747ecbf4fa4..2abe7a23bfe 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -72,6 +72,21 @@ RSpec.describe MergeRequests::RefreshService do
allow(NotificationService).to receive(:new) { notification_service }
end
+ context 'query count' do
+ it 'does not execute a lot of queries' do
+ # Hardcoded the query limit since the queries can also be reduced even
+ # if there are the same number of merge requests (e.g. by preloading
+ # associations). This should also fail in case additional queries are
+ # added elsewhere that affected this service.
+ #
+ # The limit is based on the number of queries executed at the current
+ # state of the service. As we reduce the number of queries executed in
+ # this service, the limit should be reduced as well.
+ expect { refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') }
+ .not_to exceed_query_limit(260)
+ end
+ end
+
it 'executes hooks with update action' do
refresh_service.execute(@oldrev, @newrev, 'refs/heads/master')
reload_mrs
@@ -155,6 +170,18 @@ RSpec.describe MergeRequests::RefreshService do
.not_to change { @merge_request.reload.merge_request_diff }
end
end
+
+ it 'calls the merge request activity counter' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_mr_including_ci_config)
+ .with(user: @merge_request.author, merge_request: @merge_request)
+
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_mr_including_ci_config)
+ .with(user: @another_merge_request.author, merge_request: @another_merge_request)
+
+ refresh_service.execute(@oldrev, @newrev, 'refs/heads/master')
+ end
end
context 'when pipeline exists for the source branch' do
diff --git a/spec/services/merge_requests/retarget_chain_service_spec.rb b/spec/services/merge_requests/retarget_chain_service_spec.rb
new file mode 100644
index 00000000000..3937fbe58c3
--- /dev/null
+++ b/spec/services/merge_requests/retarget_chain_service_spec.rb
@@ -0,0 +1,154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::RetargetChainService do
+ include ProjectForksHelper
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:merge_request, reload: true) { create(:merge_request, assignees: [user]) }
+ let_it_be(:project) { merge_request.project }
+
+ subject { described_class.new(project, user).execute(merge_request) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe '#execute' do
+ context 'when there is another MR' do
+ let!(:another_merge_request) do
+ create(:merge_request,
+ source_project: source_project,
+ source_branch: 'my-awesome-feature',
+ target_project: merge_request.source_project,
+ target_branch: merge_request.source_branch
+ )
+ end
+
+ shared_examples 'does not retarget merge request' do
+ it 'another merge request is unchanged' do
+ expect { subject }.not_to change { another_merge_request.reload.target_branch }
+ .from(merge_request.source_branch)
+ end
+ end
+
+ shared_examples 'retargets merge request' do
+ it 'another merge request is retargeted' do
+ expect(SystemNoteService)
+ .to receive(:change_branch).once
+ .with(another_merge_request, another_merge_request.project, user,
+ 'target', 'delete',
+ merge_request.source_branch, merge_request.target_branch)
+
+ expect { subject }.to change { another_merge_request.reload.target_branch }
+ .from(merge_request.source_branch)
+ .to(merge_request.target_branch)
+ end
+
+ context 'when FF retarget_merge_requests is disabled' do
+ before do
+ stub_feature_flags(retarget_merge_requests: false)
+ end
+
+ include_examples 'does not retarget merge request'
+ end
+ end
+
+ context 'in the same project' do
+ let(:source_project) { project }
+
+ context 'and current is merged' do
+ before do
+ merge_request.mark_as_merged
+ end
+
+ it_behaves_like 'retargets merge request'
+ end
+
+ context 'and current is closed' do
+ before do
+ merge_request.close
+ end
+
+ it_behaves_like 'does not retarget merge request'
+ end
+
+ context 'and another is closed' do
+ before do
+ another_merge_request.close
+ end
+
+ it_behaves_like 'does not retarget merge request'
+ end
+
+ context 'and another is merged' do
+ before do
+ another_merge_request.mark_as_merged
+ end
+
+ it_behaves_like 'does not retarget merge request'
+ end
+ end
+
+ context 'in forked project' do
+ let!(:source_project) { fork_project(project) }
+
+ context 'when user has access to source project' do
+ before do
+ source_project.add_developer(user)
+ merge_request.mark_as_merged
+ end
+
+ it_behaves_like 'retargets merge request'
+ end
+
+ context 'when user does not have access to source project' do
+ it_behaves_like 'does not retarget merge request'
+ end
+ end
+
+ context 'and current and another MR is from a fork' do
+ let(:project) { create(:project) }
+ let(:source_project) { fork_project(project) }
+
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: source_project,
+ target_project: project
+ )
+ end
+
+ before do
+ source_project.add_developer(user)
+ end
+
+ it_behaves_like 'does not retarget merge request'
+ end
+ end
+
+ context 'when many merge requests are to be retargeted' do
+ let!(:many_merge_requests) do
+ create_list(:merge_request, 10, :unique_branches,
+ source_project: merge_request.source_project,
+ target_project: merge_request.source_project,
+ target_branch: merge_request.source_branch
+ )
+ end
+
+ before do
+ merge_request.mark_as_merged
+ end
+
+ it 'retargets only 4 of them' do
+ subject
+
+ expect(many_merge_requests.each(&:reload).pluck(:target_branch).tally)
+ .to eq(
+ merge_request.source_branch => 6,
+ merge_request.target_branch => 4
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index edb95840604..7a7f684c6d0 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -48,6 +48,8 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
context 'valid params' do
+ let(:locked) { true }
+
let(:opts) do
{
title: 'New title',
@@ -58,7 +60,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
label_ids: [label.id],
target_branch: 'target',
force_remove_source_branch: '1',
- discussion_locked: true
+ discussion_locked: locked
}
end
@@ -117,6 +119,139 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
MergeRequests::UpdateService.new(project, user, opts).execute(draft_merge_request)
end
+
+ context 'when MR is locked' do
+ context 'when locked again' do
+ it 'does not track discussion locking' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_discussion_locked_action)
+
+ opts[:discussion_locked] = true
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+ end
+
+ context 'when unlocked' do
+ it 'tracks dicussion unlocking' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_discussion_unlocked_action).once.with(user: user)
+
+ opts[:discussion_locked] = false
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+ end
+ end
+
+ context 'when MR is unlocked' do
+ let(:locked) { false }
+
+ context 'when unlocked again' do
+ it 'does not track discussion unlocking' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_discussion_unlocked_action)
+
+ opts[:discussion_locked] = false
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+ end
+
+ context 'when locked' do
+ it 'tracks dicussion locking' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_discussion_locked_action).once.with(user: user)
+
+ opts[:discussion_locked] = true
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+ end
+ end
+
+ it 'tracks time estimate and spend time changes' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_time_estimate_changed_action).once.with(user: user)
+
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_time_spent_changed_action).once.with(user: user)
+
+ opts[:time_estimate] = 86400
+ opts[:spend_time] = {
+ duration: 3600,
+ user_id: user.id,
+ spent_at: Date.parse('2021-02-24')
+ }
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+
+ it 'tracks milestone change' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_milestone_changed_action).once.with(user: user)
+
+ opts[:milestone] = milestone
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+
+ it 'track labels change' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_labels_changed_action).once.with(user: user)
+
+ opts[:label_ids] = [label2.id]
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+
+ context 'assignees' do
+ context 'when assignees changed' do
+ it 'tracks assignees changed event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_assignees_changed_action).once.with(user: user)
+
+ opts[:assignees] = [user2]
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+ end
+
+ context 'when assignees did not change' do
+ it 'does not track assignees changed event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_assignees_changed_action)
+
+ opts[:assignees] = merge_request.assignees
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+ end
+ end
+
+ context 'reviewers' do
+ context 'when reviewers changed' do
+ it 'tracks reviewers changed event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_reviewers_changed_action).once.with(user: user)
+
+ opts[:reviewers] = [user2]
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+ end
+
+ context 'when reviewers did not change' do
+ it 'does not track reviewers changed event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_reviewers_changed_action)
+
+ opts[:reviewers] = merge_request.reviewers
+
+ MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ end
+ end
+ end
end
context 'updating milestone' do
@@ -656,6 +791,48 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
end
+ context 'when the draft status is changed' do
+ let!(:non_subscriber) { create(:user) }
+ let!(:subscriber) do
+ create(:user) { |u| merge_request.toggle_subscription(u, project) }
+ end
+
+ before do
+ project.add_developer(non_subscriber)
+ project.add_developer(subscriber)
+ end
+
+ context 'removing draft status' do
+ before do
+ merge_request.update_attribute(:title, 'Draft: New Title')
+ end
+
+ it 'sends notifications for subscribers', :sidekiq_might_not_need_inline do
+ opts = { title: 'New title' }
+
+ perform_enqueued_jobs do
+ @merge_request = described_class.new(project, user, opts).execute(merge_request)
+ end
+
+ should_email(subscriber)
+ should_not_email(non_subscriber)
+ end
+ end
+
+ context 'adding draft status' do
+ it 'does not send notifications', :sidekiq_might_not_need_inline do
+ opts = { title: 'Draft: New title' }
+
+ perform_enqueued_jobs do
+ @merge_request = described_class.new(project, user, opts).execute(merge_request)
+ end
+
+ should_not_email(subscriber)
+ should_not_email(non_subscriber)
+ end
+ end
+ end
+
context 'when the merge request is relabeled' do
let!(:non_subscriber) { create(:user) }
let!(:subscriber) { create(:user) { |u| label.toggle_subscription(u, project) } }
diff --git a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
index 7346a5b95ae..2581cab5d1a 100644
--- a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
+++ b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
@@ -3,12 +3,15 @@
require 'spec_helper'
RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
- subject(:execute_service) { described_class.new(track, interval).execute }
+ subject(:execute_service) do
+ travel_to(frozen_time) { described_class.new(track, interval).execute }
+ end
let(:track) { :create }
let(:interval) { 1 }
- let(:previous_action_completed_at) { 2.days.ago.middle_of_day }
+ let(:frozen_time) { Time.current }
+ let(:previous_action_completed_at) { frozen_time - 2.days }
let(:current_action_completed_at) { nil }
let(:experiment_enabled) { true }
let(:user_can_perform_current_track_action) { true }
@@ -39,18 +42,18 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
using RSpec::Parameterized::TableSyntax
where(:track, :interval, :actions_completed) do
- :create | 1 | { created_at: 2.days.ago.middle_of_day }
- :create | 5 | { created_at: 6.days.ago.middle_of_day }
- :create | 10 | { created_at: 11.days.ago.middle_of_day }
- :verify | 1 | { created_at: 2.days.ago.middle_of_day, git_write_at: 2.days.ago.middle_of_day }
- :verify | 5 | { created_at: 6.days.ago.middle_of_day, git_write_at: 6.days.ago.middle_of_day }
- :verify | 10 | { created_at: 11.days.ago.middle_of_day, git_write_at: 11.days.ago.middle_of_day }
- :trial | 1 | { created_at: 2.days.ago.middle_of_day, git_write_at: 2.days.ago.middle_of_day, pipeline_created_at: 2.days.ago.middle_of_day }
- :trial | 5 | { created_at: 6.days.ago.middle_of_day, git_write_at: 6.days.ago.middle_of_day, pipeline_created_at: 6.days.ago.middle_of_day }
- :trial | 10 | { created_at: 11.days.ago.middle_of_day, git_write_at: 11.days.ago.middle_of_day, pipeline_created_at: 11.days.ago.middle_of_day }
- :team | 1 | { created_at: 2.days.ago.middle_of_day, git_write_at: 2.days.ago.middle_of_day, pipeline_created_at: 2.days.ago.middle_of_day, trial_started_at: 2.days.ago.middle_of_day }
- :team | 5 | { created_at: 6.days.ago.middle_of_day, git_write_at: 6.days.ago.middle_of_day, pipeline_created_at: 6.days.ago.middle_of_day, trial_started_at: 6.days.ago.middle_of_day }
- :team | 10 | { created_at: 11.days.ago.middle_of_day, git_write_at: 11.days.ago.middle_of_day, pipeline_created_at: 11.days.ago.middle_of_day, trial_started_at: 11.days.ago.middle_of_day }
+ :create | 1 | { created_at: frozen_time - 2.days }
+ :create | 5 | { created_at: frozen_time - 6.days }
+ :create | 10 | { created_at: frozen_time - 11.days }
+ :verify | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days }
+ :verify | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days }
+ :verify | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days }
+ :trial | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days }
+ :trial | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days }
+ :trial | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days }
+ :team | 1 | { created_at: frozen_time - 2.days, git_write_at: frozen_time - 2.days, pipeline_created_at: frozen_time - 2.days, trial_started_at: frozen_time - 2.days }
+ :team | 5 | { created_at: frozen_time - 6.days, git_write_at: frozen_time - 6.days, pipeline_created_at: frozen_time - 6.days, trial_started_at: frozen_time - 6.days }
+ :team | 10 | { created_at: frozen_time - 11.days, git_write_at: frozen_time - 11.days, pipeline_created_at: frozen_time - 11.days, trial_started_at: frozen_time - 11.days }
end
with_them do
@@ -64,7 +67,7 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
it { is_expected.not_to send_in_product_marketing_email }
context 'when the previous track actions have been completed' do
- let(:current_action_completed_at) { 2.days.ago.middle_of_day }
+ let(:current_action_completed_at) { frozen_time - 2.days }
it { is_expected.to send_in_product_marketing_email(user.id, group.id, :verify, 0) }
end
@@ -76,7 +79,7 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
it { is_expected.not_to send_in_product_marketing_email }
context 'when the previous track action was completed within the intervals range' do
- let(:previous_action_completed_at) { 6.days.ago.middle_of_day }
+ let(:previous_action_completed_at) { frozen_time - 6.days }
it { is_expected.to send_in_product_marketing_email(user.id, group.id, :create, 1) }
end
@@ -113,13 +116,13 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
end
context 'when the previous track action is completed outside the intervals range' do
- let(:previous_action_completed_at) { 3.days.ago }
+ let(:previous_action_completed_at) { frozen_time - 3.days }
it { is_expected.not_to send_in_product_marketing_email }
end
context 'when the current track action is completed' do
- let(:current_action_completed_at) { Time.current }
+ let(:current_action_completed_at) { frozen_time }
it { is_expected.not_to send_in_product_marketing_email }
end
diff --git a/spec/services/notes/build_service_spec.rb b/spec/services/notes/build_service_spec.rb
index 90548cf9a99..deeab66c4e9 100644
--- a/spec/services/notes/build_service_spec.rb
+++ b/spec/services/notes/build_service_spec.rb
@@ -3,29 +3,38 @@
require 'spec_helper'
RSpec.describe Notes::BuildService do
+ include AdminModeHelper
+
let(:note) { create(:discussion_note_on_issue) }
let(:project) { note.project }
let(:author) { note.author }
+ let(:user) { author }
let(:merge_request) { create(:merge_request, source_project: project) }
- let(:mr_note) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project, author: author) }
+ let(:mr_note) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project, author: note.author) }
+ let(:base_params) { { note: 'Test' } }
+ let(:params) { {} }
+
+ subject(:new_note) { described_class.new(project, user, base_params.merge(params)).execute }
describe '#execute' do
context 'when in_reply_to_discussion_id is specified' do
+ let(:params) { { in_reply_to_discussion_id: note.discussion_id } }
+
context 'when a note with that original discussion ID exists' do
it 'sets the note up to be in reply to that note' do
- new_note = described_class.new(project, author, note: 'Test', in_reply_to_discussion_id: note.discussion_id).execute
expect(new_note).to be_valid
expect(new_note.in_reply_to?(note)).to be_truthy
expect(new_note.resolved?).to be_falsey
end
context 'when discussion is resolved' do
+ let(:params) { { in_reply_to_discussion_id: mr_note.discussion_id } }
+
before do
mr_note.resolve!(author)
end
it 'resolves the note' do
- new_note = described_class.new(project, author, note: 'Test', in_reply_to_discussion_id: mr_note.discussion_id).execute
expect(new_note).to be_valid
expect(new_note.resolved?).to be_truthy
end
@@ -34,24 +43,23 @@ RSpec.describe Notes::BuildService do
context 'when a note with that discussion ID exists' do
it 'sets the note up to be in reply to that note' do
- new_note = described_class.new(project, author, note: 'Test', in_reply_to_discussion_id: note.discussion_id).execute
expect(new_note).to be_valid
expect(new_note.in_reply_to?(note)).to be_truthy
end
end
context 'when no note with that discussion ID exists' do
+ let(:params) { { in_reply_to_discussion_id: 'foo' } }
+
it 'sets an error' do
- new_note = described_class.new(project, author, note: 'Test', in_reply_to_discussion_id: 'foo').execute
expect(new_note.errors[:base]).to include('Discussion to reply to cannot be found')
end
end
context 'when user has no access to discussion' do
- it 'sets an error' do
- another_user = create(:user)
- new_note = described_class.new(project, another_user, note: 'Test', in_reply_to_discussion_id: note.discussion_id).execute
+ let(:user) { create(:user) }
+ it 'sets an error' do
expect(new_note.errors[:base]).to include('Discussion to reply to cannot be found')
end
end
@@ -127,34 +135,118 @@ RSpec.describe Notes::BuildService do
context 'when replying to individual note' do
let(:note) { create(:note_on_issue) }
-
- subject { described_class.new(project, author, note: 'Test', in_reply_to_discussion_id: note.discussion_id).execute }
+ let(:params) { { in_reply_to_discussion_id: note.discussion_id } }
it 'sets the note up to be in reply to that note' do
- expect(subject).to be_valid
- expect(subject).to be_a(DiscussionNote)
- expect(subject.discussion_id).to eq(note.discussion_id)
+ expect(new_note).to be_valid
+ expect(new_note).to be_a(DiscussionNote)
+ expect(new_note.discussion_id).to eq(note.discussion_id)
end
context 'when noteable does not support replies' do
let(:note) { create(:note_on_commit) }
it 'builds another individual note' do
- expect(subject).to be_valid
- expect(subject).to be_a(Note)
- expect(subject.discussion_id).not_to eq(note.discussion_id)
+ expect(new_note).to be_valid
+ expect(new_note).to be_a(Note)
+ expect(new_note.discussion_id).not_to eq(note.discussion_id)
+ end
+ end
+ end
+
+ context 'confidential comments' do
+ before do
+ project.add_reporter(author)
+ end
+
+ context 'when replying to a confidential comment' do
+ let(:note) { create(:note_on_issue, confidential: true) }
+ let(:params) { { in_reply_to_discussion_id: note.discussion_id, confidential: false } }
+
+ context 'when the user can read confidential comments' do
+ it '`confidential` param is ignored and set to `true`' do
+ expect(new_note.confidential).to be_truthy
+ end
+ end
+
+ context 'when the user cannot read confidential comments' do
+ let(:user) { create(:user) }
+
+ it 'returns `Discussion to reply to cannot be found` error' do
+ expect(new_note.errors.first).to include("Discussion to reply to cannot be found")
+ end
+ end
+ end
+
+ context 'when replying to a public comment' do
+ let(:note) { create(:note_on_issue, confidential: false) }
+ let(:params) { { in_reply_to_discussion_id: note.discussion_id, confidential: true } }
+
+ it '`confidential` param is ignored and set to `false`' do
+ expect(new_note.confidential).to be_falsey
+ end
+ end
+
+ context 'when creating a new comment' do
+ context 'when the `confidential` note flag is set to `true`' do
+ context 'when the user is allowed (reporter)' do
+ let(:params) { { confidential: true, noteable: merge_request } }
+
+ it 'note `confidential` flag is set to `true`' do
+ expect(new_note.confidential).to be_truthy
+ end
+ end
+
+ context 'when the user is allowed (issuable author)' do
+ let(:user) { create(:user) }
+ let(:issue) { create(:issue, author: user) }
+ let(:params) { { confidential: true, noteable: issue } }
+
+ it 'note `confidential` flag is set to `true`' do
+ expect(new_note.confidential).to be_truthy
+ end
+ end
+
+ context 'when the user is allowed (admin)' do
+ before do
+ enable_admin_mode!(admin)
+ end
+
+ let(:admin) { create(:admin) }
+ let(:params) { { confidential: true, noteable: merge_request } }
+
+ it 'note `confidential` flag is set to `true`' do
+ expect(new_note.confidential).to be_truthy
+ end
+ end
+
+ context 'when the user is not allowed' do
+ let(:user) { create(:user) }
+ let(:params) { { confidential: true, noteable: merge_request } }
+
+ it 'note `confidential` flag is set to `false`' do
+ expect(new_note.confidential).to be_falsey
+ end
+ end
+ end
+
+ context 'when the `confidential` note flag is set to `false`' do
+ let(:params) { { confidential: false, noteable: merge_request } }
+
+ it 'note `confidential` flag is set to `false`' do
+ expect(new_note.confidential).to be_falsey
+ end
end
end
end
- it 'builds a note without saving it' do
- new_note = described_class.new(project,
- author,
- noteable_type: note.noteable_type,
- noteable_id: note.noteable_id,
- note: 'Test').execute
- expect(new_note).to be_valid
- expect(new_note).not_to be_persisted
+ context 'when noteable is not set' do
+ let(:params) { { noteable_type: note.noteable_type, noteable_id: note.noteable_id } }
+
+ it 'builds a note without saving it' do
+ expect(new_note).to be_valid
+ expect(new_note).not_to be_persisted
+ end
end
end
end
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index 902fd9958f8..000f3d26efa 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -64,6 +64,40 @@ RSpec.describe Notes::UpdateService do
end.to change { counter.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }.by(1)
end
+ context 'when note text was changed' do
+ let!(:note) { create(:note, project: project, noteable: issue, author: user2, note: "Old note #{user3.to_reference}") }
+ let(:edit_note_text) { update_note({ note: 'new text' }) }
+
+ it 'update last_edited_at' do
+ travel_to(1.day.from_now) do
+ expect { edit_note_text }.to change { note.reload.last_edited_at }
+ end
+ end
+
+ it 'update updated_by' do
+ travel_to(1.day.from_now) do
+ expect { edit_note_text }.to change { note.reload.updated_by }
+ end
+ end
+ end
+
+ context 'when note text was not changed' do
+ let!(:note) { create(:note, project: project, noteable: issue, author: user2, note: "Old note #{user3.to_reference}") }
+ let(:does_not_edit_note_text) { update_note({}) }
+
+ it 'does not update last_edited_at' do
+ travel_to(1.day.from_now) do
+ expect { does_not_edit_note_text }.not_to change { note.reload.last_edited_at }
+ end
+ end
+
+ it 'does not update updated_by' do
+ travel_to(1.day.from_now) do
+ expect { does_not_edit_note_text }.not_to change { note.reload.updated_by }
+ end
+ end
+ end
+
context 'when the notable is a merge request' do
let(:merge_request) { create(:merge_request, source_project: project) }
let(:note) { create(:note, project: project, noteable: merge_request, author: user, note: "Old note #{user2.to_reference}") }
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index b67c37ba02d..03dc3be194c 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -99,6 +99,23 @@ RSpec.describe NotificationService, :mailer do
end
end
+ shared_examples 'is not able to send notifications' do
+ it 'does not send any notification' do
+ user_1 = create(:user)
+ recipient_1 = NotificationRecipient.new(user_1, :custom, custom_action: :new_release)
+ allow(NotificationRecipients::BuildService).to receive(:build_new_release_recipients).and_return([recipient_1])
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: current_user.id, klass: object.class, object_id: object.id)
+
+ action
+
+ should_not_email(@u_mentioned)
+ should_not_email(@u_guest_watcher)
+ should_not_email(user_1)
+ should_not_email(current_user)
+ end
+ end
+
# Next shared examples are intended to test notifications of "participants"
#
# they take the following parameters:
@@ -243,11 +260,12 @@ RSpec.describe NotificationService, :mailer do
describe 'AccessToken' do
describe '#access_token_about_to_expire' do
let_it_be(:user) { create(:user) }
+ let_it_be(:pat) { create(:personal_access_token, user: user, expires_at: 5.days.from_now) }
- subject { notification.access_token_about_to_expire(user) }
+ subject { notification.access_token_about_to_expire(user, [pat.name]) }
it 'sends email to the token owner' do
- expect { subject }.to have_enqueued_email(user, mail: "access_token_about_to_expire_email")
+ expect { subject }.to have_enqueued_email(user, [pat.name], mail: "access_token_about_to_expire_email")
end
end
@@ -881,8 +899,24 @@ RSpec.describe NotificationService, :mailer do
end
describe '#send_new_release_notifications', :deliver_mails_inline do
+ let(:release) { create(:release, author: current_user) }
+ let(:object) { release }
+ let(:action) { notification.send_new_release_notifications(release) }
+
+ context 'when release author is blocked' do
+ let(:current_user) { create(:user, :blocked) }
+
+ include_examples 'is not able to send notifications'
+ end
+
+ context 'when release author is a ghost' do
+ let(:current_user) { create(:user, :ghost) }
+
+ include_examples 'is not able to send notifications'
+ end
+
context 'when recipients for a new release exist' do
- let(:release) { create(:release) }
+ let(:current_user) { create(:user) }
it 'calls new_release_email for each relevant recipient' do
user_1 = create(:user)
@@ -1127,11 +1161,31 @@ RSpec.describe NotificationService, :mailer do
should_email(admin)
end
end
+
+ context 'when the author is not allowed to trigger notifications' do
+ let(:current_user) { nil }
+ let(:object) { issue }
+ let(:action) { notification.new_issue(issue, current_user) }
+
+ context 'because they are blocked' do
+ let(:current_user) { create(:user, :blocked) }
+
+ include_examples 'is not able to send notifications'
+ end
+
+ context 'because they are a ghost' do
+ let(:current_user) { create(:user, :ghost) }
+
+ include_examples 'is not able to send notifications'
+ end
+ end
end
describe '#new_mentions_in_issue' do
let(:notification_method) { :new_mentions_in_issue }
let(:mentionable) { issue }
+ let(:object) { mentionable }
+ let(:action) { send_notifications(@u_mentioned, current_user: current_user) }
include_examples 'notifications for new mentions'
@@ -1139,6 +1193,18 @@ RSpec.describe NotificationService, :mailer do
let(:notification_target) { issue }
let(:notification_trigger) { send_notifications(@u_watcher, @u_participant_mentioned, @u_custom_global, @u_mentioned) }
end
+
+ context 'where current_user is blocked' do
+ let(:current_user) { create(:user, :blocked) }
+
+ include_examples 'is not able to send notifications'
+ end
+
+ context 'where current_user is a ghost' do
+ let(:current_user) { create(:user, :ghost) }
+
+ include_examples 'is not able to send notifications'
+ end
end
describe '#reassigned_issue' do
@@ -1751,11 +1817,31 @@ RSpec.describe NotificationService, :mailer do
it { should_not_email(participant) }
end
end
+
+ context 'when the author is not allowed to trigger notifications' do
+ let(:current_user) { nil }
+ let(:object) { merge_request }
+ let(:action) { notification.new_merge_request(merge_request, current_user) }
+
+ context 'because they are blocked' do
+ let(:current_user) { create(:user, :blocked) }
+
+ it_behaves_like 'is not able to send notifications'
+ end
+
+ context 'because they are a ghost' do
+ let(:current_user) { create(:user, :ghost) }
+
+ it_behaves_like 'is not able to send notifications'
+ end
+ end
end
describe '#new_mentions_in_merge_request' do
let(:notification_method) { :new_mentions_in_merge_request }
let(:mentionable) { merge_request }
+ let(:object) { mentionable }
+ let(:action) { send_notifications(@u_mentioned, current_user: current_user) }
include_examples 'notifications for new mentions'
@@ -1763,6 +1849,18 @@ RSpec.describe NotificationService, :mailer do
let(:notification_target) { merge_request }
let(:notification_trigger) { send_notifications(@u_watcher, @u_participant_mentioned, @u_custom_global, @u_mentioned) }
end
+
+ context 'where current_user is blocked' do
+ let(:current_user) { create(:user, :blocked) }
+
+ include_examples 'is not able to send notifications'
+ end
+
+ context 'where current_user is a ghost' do
+ let(:current_user) { create(:user, :ghost) }
+
+ include_examples 'is not able to send notifications'
+ end
end
describe '#reassigned_merge_request' do
@@ -1867,6 +1965,42 @@ RSpec.describe NotificationService, :mailer do
end
end
+ describe '#change_in_merge_request_draft_status' do
+ let(:merge_request) { create(:merge_request, author: author, source_project: project) }
+
+ let_it_be(:current_user) { create(:user) }
+
+ it 'sends emails to relevant users only', :aggregate_failures do
+ notification.change_in_merge_request_draft_status(merge_request, current_user)
+
+ merge_request.reviewers.each { |reviewer| should_email(reviewer) }
+ merge_request.assignees.each { |assignee| should_email(assignee) }
+ should_email(merge_request.author)
+ should_email(@u_watcher)
+ should_email(@subscriber)
+ should_email(@watcher_and_subscriber)
+ should_email(@u_guest_watcher)
+ should_not_email(@u_participant_mentioned)
+ should_not_email(@u_guest_custom)
+ should_not_email(@u_custom_global)
+ should_not_email(@unsubscriber)
+ should_not_email(@u_participating)
+ should_not_email(@u_disabled)
+ should_not_email(@u_lazy_participant)
+ end
+
+ it_behaves_like 'participating notifications' do
+ let(:participant) { create(:user, username: 'user-participant') }
+ let(:issuable) { merge_request }
+ let(:notification_trigger) { notification.change_in_merge_request_draft_status(merge_request, @u_disabled) }
+ end
+
+ it_behaves_like 'project emails are disabled' do
+ let(:notification_target) { merge_request }
+ let(:notification_trigger) { notification.change_in_merge_request_draft_status(merge_request, @u_disabled) }
+ end
+ end
+
describe '#push_to_merge_request' do
before do
update_custom_notification(:push_to_merge_request, @u_guest_custom, resource: project)
@@ -2159,8 +2293,38 @@ RSpec.describe NotificationService, :mailer do
end
describe '#merge_when_pipeline_succeeds' do
+ before do
+ update_custom_notification(:merge_when_pipeline_succeeds, @u_guest_custom, resource: project)
+ update_custom_notification(:merge_when_pipeline_succeeds, @u_custom_global)
+ end
+
it 'send notification that merge will happen when pipeline succeeds' do
notification.merge_when_pipeline_succeeds(merge_request, assignee)
+
+ should_email(merge_request.author)
+ should_email(@u_watcher)
+ should_email(@subscriber)
+ should_email(@u_guest_custom)
+ should_email(@u_custom_global)
+ should_not_email(@unsubscriber)
+ should_not_email(@u_disabled)
+ end
+
+ it 'does not send notification if the custom event is disabled' do
+ update_custom_notification(:merge_when_pipeline_succeeds, @u_guest_custom, resource: project, value: false)
+ update_custom_notification(:merge_when_pipeline_succeeds, @u_custom_global, resource: nil, value: false)
+ notification.merge_when_pipeline_succeeds(merge_request, assignee)
+
+ should_not_email(@u_guest_custom)
+ should_not_email(@u_custom_global)
+ end
+
+ it 'sends notification to participants even if the custom event is disabled' do
+ update_custom_notification(:merge_when_pipeline_succeeds, merge_request.author, resource: project, value: false)
+ update_custom_notification(:merge_when_pipeline_succeeds, @u_watcher, resource: project, value: false)
+ update_custom_notification(:merge_when_pipeline_succeeds, @subscriber, resource: project, value: false)
+ notification.merge_when_pipeline_succeeds(merge_request, assignee)
+
should_email(merge_request.author)
should_email(@u_watcher)
should_email(@subscriber)
@@ -2694,7 +2858,7 @@ RSpec.describe NotificationService, :mailer do
end
it 'filters out guests when new merge request is created' do
- notification.new_merge_request(merge_request1, @u_disabled)
+ notification.new_merge_request(merge_request1, developer)
should_not_email(guest)
should_email(assignee)
diff --git a/spec/services/onboarding_progress_service_spec.rb b/spec/services/onboarding_progress_service_spec.rb
index 340face4ae8..ef4f4f0d822 100644
--- a/spec/services/onboarding_progress_service_spec.rb
+++ b/spec/services/onboarding_progress_service_spec.rb
@@ -3,9 +3,49 @@
require 'spec_helper'
RSpec.describe OnboardingProgressService do
+ describe '.async' do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:action) { :git_pull }
+
+ subject(:execute_service) { described_class.async(namespace.id).execute(action: action) }
+
+ context 'when not onboarded' do
+ it 'does not schedule a worker' do
+ expect(Namespaces::OnboardingProgressWorker).not_to receive(:perform_async)
+
+ execute_service
+ end
+ end
+
+ context 'when onboarded' do
+ before do
+ OnboardingProgress.onboard(namespace)
+ end
+
+ context 'when action is already completed' do
+ before do
+ OnboardingProgress.register(namespace, action)
+ end
+
+ it 'does not schedule a worker' do
+ expect(Namespaces::OnboardingProgressWorker).not_to receive(:perform_async)
+
+ execute_service
+ end
+ end
+
+ context 'when action is not yet completed' do
+ it 'schedules a worker' do
+ expect(Namespaces::OnboardingProgressWorker).to receive(:perform_async)
+
+ execute_service
+ end
+ end
+ end
+ end
+
describe '#execute' do
- let(:namespace) { create(:namespace, parent: root_namespace) }
- let(:root_namespace) { nil }
+ let(:namespace) { create(:namespace) }
let(:action) { :namespace_action }
subject(:execute_service) { described_class.new(namespace).execute(action: :subscription_created) }
@@ -23,16 +63,16 @@ RSpec.describe OnboardingProgressService do
end
context 'when the namespace is not the root' do
- let(:root_namespace) { build(:namespace) }
+ let(:group) { create(:group, :nested) }
before do
- OnboardingProgress.onboard(root_namespace)
+ OnboardingProgress.onboard(group)
end
- it 'registers a namespace onboarding progress action for the root namespace' do
+ it 'does not register a namespace onboarding progress action' do
execute_service
- expect(OnboardingProgress.completed?(root_namespace, :subscription_created)).to eq(true)
+ expect(OnboardingProgress.completed?(group, :subscription_created)).to be(nil)
end
end
@@ -42,7 +82,7 @@ RSpec.describe OnboardingProgressService do
it 'does not register a namespace onboarding progress action' do
execute_service
- expect(OnboardingProgress.completed?(root_namespace, :subscription_created)).to be(nil)
+ expect(OnboardingProgress.completed?(namespace, :subscription_created)).to be(nil)
end
end
end
diff --git a/spec/services/packages/composer/create_package_service_spec.rb b/spec/services/packages/composer/create_package_service_spec.rb
index 4f1a46e7e45..526c7b4929b 100644
--- a/spec/services/packages/composer/create_package_service_spec.rb
+++ b/spec/services/packages/composer/create_package_service_spec.rb
@@ -28,6 +28,8 @@ RSpec.describe Packages::Composer::CreatePackageService do
let(:branch) { project.repository.find_branch('master') }
it 'creates the package' do
+ expect(::Packages::Composer::CacheUpdateWorker).to receive(:perform_async).with(project.id, package_name, nil)
+
expect { subject }
.to change { Packages::Package.composer.count }.by(1)
.and change { Packages::Composer::Metadatum.count }.by(1)
@@ -54,6 +56,8 @@ RSpec.describe Packages::Composer::CreatePackageService do
end
it 'creates the package' do
+ expect(::Packages::Composer::CacheUpdateWorker).to receive(:perform_async).with(project.id, package_name, nil)
+
expect { subject }
.to change { Packages::Package.composer.count }.by(1)
.and change { Packages::Composer::Metadatum.count }.by(1)
@@ -80,6 +84,8 @@ RSpec.describe Packages::Composer::CreatePackageService do
end
it 'does not create a new package' do
+ expect(::Packages::Composer::CacheUpdateWorker).to receive(:perform_async).with(project.id, package_name, nil)
+
expect { subject }
.to change { Packages::Package.composer.count }.by(0)
.and change { Packages::Composer::Metadatum.count }.by(0)
@@ -101,6 +107,8 @@ RSpec.describe Packages::Composer::CreatePackageService do
let!(:other_package) { create(:package, name: package_name, version: 'dev-master', project: other_project) }
it 'creates the package' do
+ expect(::Packages::Composer::CacheUpdateWorker).to receive(:perform_async).with(project.id, package_name, nil)
+
expect { subject }
.to change { Packages::Package.composer.count }.by(1)
.and change { Packages::Composer::Metadatum.count }.by(1)
diff --git a/spec/services/packages/create_event_service_spec.rb b/spec/services/packages/create_event_service_spec.rb
index f7bab0e5a9f..122f1e88ad0 100644
--- a/spec/services/packages/create_event_service_spec.rb
+++ b/spec/services/packages/create_event_service_spec.rb
@@ -57,18 +57,6 @@ RSpec.describe Packages::CreateEventService do
end
shared_examples 'redis package unique event creation' do |originator_type, expected_scope|
- context 'with feature flag disable' do
- before do
- stub_feature_flags(collect_package_events_redis: false)
- end
-
- it 'does not track the event' do
- expect(::Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
-
- subject
- end
- end
-
it 'tracks the event' do
expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(/package/, values: user.id)
@@ -77,18 +65,6 @@ RSpec.describe Packages::CreateEventService do
end
shared_examples 'redis package count event creation' do |originator_type, expected_scope|
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(collect_package_events_redis: false)
- end
-
- it 'does not track the event' do
- expect(::Gitlab::UsageDataCounters::PackageEventCounter).not_to receive(:count)
-
- subject
- end
- end
-
it 'tracks the event' do
expect(::Gitlab::UsageDataCounters::PackageEventCounter).to receive(:count).at_least(:once)
diff --git a/spec/services/packages/create_temporary_package_service_spec.rb b/spec/services/packages/create_temporary_package_service_spec.rb
new file mode 100644
index 00000000000..4b8d37401d8
--- /dev/null
+++ b/spec/services/packages/create_temporary_package_service_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::CreateTemporaryPackageService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:params) { {} }
+ let_it_be(:package_name) { 'my-package' }
+ let_it_be(:package_type) { 'rubygems' }
+
+ describe '#execute' do
+ subject { described_class.new(project, user, params).execute(package_type, name: package_name) }
+
+ let(:package) { Packages::Package.last }
+
+ it 'creates the package', :aggregate_failures do
+ expect { subject }.to change { Packages::Package.count }.by(1)
+
+ expect(package).to be_valid
+ expect(package).to be_processing
+ expect(package.name).to eq(package_name)
+ expect(package.version).to start_with(described_class::PACKAGE_VERSION)
+ expect(package.package_type).to eq(package_type)
+ end
+
+ it 'can create two packages in a row', :aggregate_failures do
+ expect { subject }.to change { Packages::Package.count }.by(1)
+
+ expect do
+ described_class.new(project, user, params).execute(package_type, name: package_name)
+ end.to change { Packages::Package.count }.by(1)
+
+ expect(package).to be_valid
+ expect(package).to be_processing
+ expect(package.name).to eq(package_name)
+ expect(package.version).to start_with(described_class::PACKAGE_VERSION)
+ expect(package.package_type).to eq(package_type)
+ end
+
+ it_behaves_like 'assigns the package creator'
+ it_behaves_like 'assigns build to package'
+ end
+end
diff --git a/spec/services/packages/debian/get_or_create_incoming_service_spec.rb b/spec/services/packages/debian/find_or_create_incoming_service_spec.rb
index ab99b091246..e1393c774b1 100644
--- a/spec/services/packages/debian/get_or_create_incoming_service_spec.rb
+++ b/spec/services/packages/debian/find_or_create_incoming_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Packages::Debian::GetOrCreateIncomingService do
+RSpec.describe Packages::Debian::FindOrCreateIncomingService do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
diff --git a/spec/services/packages/debian/find_or_create_package_service_spec.rb b/spec/services/packages/debian/find_or_create_package_service_spec.rb
new file mode 100644
index 00000000000..3582b1f1dc3
--- /dev/null
+++ b/spec/services/packages/debian/find_or_create_package_service_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::FindOrCreatePackageService do
+ let_it_be(:distribution) { create(:debian_project_distribution) }
+ let_it_be(:project) { distribution.project }
+ let_it_be(:user) { create(:user) }
+ let(:params) { { name: 'foo', version: '1.0+debian', distribution_name: distribution.codename } }
+
+ subject(:service) { described_class.new(project, user, params) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ let(:package) { subject.payload[:package] }
+
+ context 'run once' do
+ it 'creates a new package', :aggregate_failures do
+ expect { subject }.to change { ::Packages::Package.count }.by(1)
+ expect(subject).to be_success
+
+ expect(package).to be_valid
+ expect(package.project_id).to eq(project.id)
+ expect(package.creator_id).to eq(user.id)
+ expect(package.name).to eq('foo')
+ expect(package.version).to eq('1.0+debian')
+ expect(package).to be_debian
+ expect(package.debian_publication.distribution).to eq(distribution)
+ end
+ end
+
+ context 'run twice' do
+ let(:subject2) { service.execute }
+
+ let(:package2) { service.execute.payload[:package] }
+
+ it 'returns the same object' do
+ expect { subject }.to change { ::Packages::Package.count }.by(1)
+ expect { package2 }.not_to change { ::Packages::Package.count }
+
+ expect(package2.id).to eq(package.id)
+ end
+ end
+
+ context 'with non-existing distribution' do
+ let(:params) { { name: 'foo', version: '1.0+debian', distribution_name: 'not-existing' } }
+
+ it 'raises ActiveRecord::RecordNotFound' do
+ expect { package }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/maven/metadata/append_package_file_service_spec.rb b/spec/services/packages/maven/metadata/append_package_file_service_spec.rb
new file mode 100644
index 00000000000..c406ab93630
--- /dev/null
+++ b/spec/services/packages/maven/metadata/append_package_file_service_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::Maven::Metadata::AppendPackageFileService do
+ let_it_be(:package) { create(:maven_package, version: nil) }
+
+ let(:service) { described_class.new(package: package, metadata_content: content) }
+ let(:content) { 'test' }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'with some content' do
+ it 'creates all the related package files', :aggregate_failures do
+ expect { subject }.to change { package.package_files.count }.by(5)
+ expect(subject).to be_success
+
+ expect_file(metadata_file_name, with_content: content, with_content_type: 'application/xml')
+ expect_file("#{metadata_file_name}.md5")
+ expect_file("#{metadata_file_name}.sha1")
+ expect_file("#{metadata_file_name}.sha256")
+ expect_file("#{metadata_file_name}.sha512")
+ end
+ end
+
+ context 'with nil content' do
+ let(:content) { nil }
+
+ it_behaves_like 'returning an error service response', message: 'metadata content is not set'
+ end
+
+ context 'with nil package' do
+ let(:package) { nil }
+
+ it_behaves_like 'returning an error service response', message: 'package is not set'
+ end
+
+ def expect_file(file_name, with_content: nil, with_content_type: '')
+ package_file = package.package_files.recent.with_file_name(file_name).first
+
+ expect(package_file.file).to be_present
+ expect(package_file.file_name).to eq(file_name)
+ expect(package_file.size).to be > 0
+ expect(package_file.file_md5).to be_present
+ expect(package_file.file_sha1).to be_present
+ expect(package_file.file_sha256).to be_present
+ expect(package_file.file.content_type).to eq(with_content_type)
+
+ if with_content
+ expect(package_file.file.read).to eq(with_content)
+ end
+ end
+
+ def metadata_file_name
+ ::Packages::Maven::Metadata.filename
+ end
+ end
+end
diff --git a/spec/services/packages/maven/metadata/create_versions_xml_service_spec.rb b/spec/services/packages/maven/metadata/create_versions_xml_service_spec.rb
new file mode 100644
index 00000000000..109f7adab4e
--- /dev/null
+++ b/spec/services/packages/maven/metadata/create_versions_xml_service_spec.rb
@@ -0,0 +1,277 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::Maven::Metadata::CreateVersionsXmlService do
+ let_it_be(:package) { create(:maven_package, version: nil) }
+
+ let(:versions_in_database) { %w[1.3 2.0-SNAPSHOT 1.6 1.4 1.5-SNAPSHOT] }
+ let(:versions_in_xml) { %w[1.3 2.0-SNAPSHOT 1.6 1.4 1.5-SNAPSHOT] }
+ let(:version_latest) { nil }
+ let(:version_release) { '1.4' }
+ let(:service) { described_class.new(metadata_content: metadata_xml, package: package) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ before do
+ next unless package
+
+ versions_in_database.each do |version|
+ create(:maven_package, name: package.name, version: version, project: package.project)
+ end
+ end
+
+ shared_examples 'returning an xml with versions in the database' do
+ it 'returns an metadata versions xml with versions in the database', :aggregate_failures do
+ result = subject
+
+ expect(result).to be_success
+ expect(versions_from(result.payload[:metadata_content])).to match_array(versions_in_database)
+ end
+ end
+
+ shared_examples 'returning an xml with' do |release:, latest:|
+ it 'returns an xml with the updated release and latest versions', :aggregate_failures do
+ result = subject
+
+ expect(result).to be_success
+ expect(result.payload[:changes_exist]).to be_truthy
+ xml = result.payload[:metadata_content]
+ expect(release_from(xml)).to eq(release)
+ expect(latest_from(xml)).to eq(latest)
+ end
+ end
+
+ context 'with same versions in both sides' do
+ it 'returns no changes', :aggregate_failures do
+ result = subject
+
+ expect(result).to be_success
+ expect(result.payload).to eq(changes_exist: false, empty_versions: false)
+ end
+ end
+
+ context 'with more versions' do
+ let(:additional_versions) { %w[5.5 5.6 5.7-SNAPSHOT] }
+
+ context 'in the xml side' do
+ let(:versions_in_xml) { versions_in_database + additional_versions }
+
+ it_behaves_like 'returning an xml with versions in the database'
+ end
+
+ context 'in the database side' do
+ let(:versions_in_database) { versions_in_xml + additional_versions }
+
+ it_behaves_like 'returning an xml with versions in the database'
+ end
+ end
+
+ context 'with completely different versions' do
+ let(:versions_in_database) { %w[1.0 1.1 1.2] }
+ let(:versions_in_xml) { %w[2.0 2.1 2.2] }
+
+ it_behaves_like 'returning an xml with versions in the database'
+ end
+
+ context 'with no versions in the database' do
+ let(:versions_in_database) { [] }
+
+ it 'returns a success', :aggregate_failures do
+ result = subject
+
+ expect(result).to be_success
+ expect(result.payload).to eq(changes_exist: true, empty_versions: true)
+ end
+
+ context 'with an xml without a release version' do
+ let(:version_release) { nil }
+
+ it 'returns a success', :aggregate_failures do
+ result = subject
+
+ expect(result).to be_success
+ expect(result.payload).to eq(changes_exist: true, empty_versions: true)
+ end
+ end
+ end
+
+ context 'with differences in both sides' do
+ let(:shared_versions) { %w[1.3 2.0-SNAPSHOT 1.6 1.4 1.5-SNAPSHOT] }
+ let(:additional_versions_in_xml) { %w[5.5 5.6 5.7-SNAPSHOT] }
+ let(:versions_in_xml) { shared_versions + additional_versions_in_xml }
+ let(:additional_versions_in_database) { %w[6.5 6.6 6.7-SNAPSHOT] }
+ let(:versions_in_database) { shared_versions + additional_versions_in_database }
+
+ it_behaves_like 'returning an xml with versions in the database'
+ end
+
+ context 'with a new release and latest from the database' do
+ let(:versions_in_database) { versions_in_xml + %w[4.1 4.2-SNAPSHOT] }
+
+ it_behaves_like 'returning an xml with', release: '4.1', latest: nil
+
+ context 'with a latest in the xml' do
+ let(:version_latest) { '1.6' }
+
+ it_behaves_like 'returning an xml with', release: '4.1', latest: '4.2-SNAPSHOT'
+ end
+ end
+
+ context 'with release and latest not existing in the database' do
+ let(:version_release) { '7.0' }
+ let(:version_latest) { '8.0-SNAPSHOT' }
+
+ it_behaves_like 'returning an xml with', release: '1.4', latest: '1.5-SNAPSHOT'
+ end
+
+ context 'with added versions in the database side no more recent than release' do
+ let(:versions_in_database) { versions_in_xml + %w[4.1 4.2-SNAPSHOT] }
+
+ before do
+ ::Packages::Package.find_by(name: package.name, version: '4.1').update!(created_at: 2.weeks.ago)
+ ::Packages::Package.find_by(name: package.name, version: '4.2-SNAPSHOT').update!(created_at: 2.weeks.ago)
+ end
+
+ it_behaves_like 'returning an xml with', release: '1.4', latest: nil
+
+ context 'with a latest in the xml' do
+ let(:version_latest) { '1.6' }
+
+ it_behaves_like 'returning an xml with', release: '1.4', latest: '1.5-SNAPSHOT'
+ end
+ end
+
+ context 'only snapshot versions are in the database' do
+ let(:versions_in_database) { %w[4.2-SNAPSHOT] }
+
+ it_behaves_like 'returning an xml with', release: nil, latest: nil
+
+ it 'returns an xml without any release element' do
+ result = subject
+
+ xml_doc = Nokogiri::XML(result.payload[:metadata_content])
+ expect(xml_doc.xpath('//metadata/versioning/release')).to be_empty
+ end
+ end
+
+ context 'last updated timestamp' do
+ let(:versions_in_database) { versions_in_xml + %w[4.1 4.2-SNAPSHOT] }
+
+ it 'updates the last updated timestamp' do
+ original = last_updated_from(metadata_xml)
+
+ result = subject
+
+ expect(result).to be_success
+ expect(original).not_to eq(last_updated_from(result.payload[:metadata_content]))
+ end
+ end
+
+ context 'with an incomplete metadata content' do
+ let(:metadata_xml) { '<metadata></metadata>' }
+
+ it_behaves_like 'returning an error service response', message: 'metadata_content is invalid'
+ end
+
+ context 'with an invalid metadata content' do
+ let(:metadata_xml) { '<meta></metadata>' }
+
+ it_behaves_like 'returning an error service response', message: 'metadata_content is invalid'
+ end
+
+ context 'with metadata content pointing to a file' do
+ let(:service) { described_class.new(metadata_content: file, package: package) }
+ let(:file) do
+ Tempfile.new('metadata').tap do |file|
+ if file_contents
+ file.write(file_contents)
+ file.flush
+ file.rewind
+ end
+ end
+ end
+
+ after do
+ file.close
+ file.unlink
+ end
+
+ context 'with valid content' do
+ let(:file_contents) { metadata_xml }
+
+ it 'returns no changes' do
+ result = subject
+
+ expect(result).to be_success
+ expect(result.payload).to eq(changes_exist: false, empty_versions: false)
+ end
+ end
+
+ context 'with invalid content' do
+ let(:file_contents) { '<meta></metadata>' }
+
+ it_behaves_like 'returning an error service response', message: 'metadata_content is invalid'
+ end
+
+ context 'with no content' do
+ let(:file_contents) { nil }
+
+ it_behaves_like 'returning an error service response', message: 'metadata_content is invalid'
+ end
+ end
+
+ context 'with no package' do
+ let(:metadata_xml) { '' }
+ let(:package) { nil }
+
+ it_behaves_like 'returning an error service response', message: 'package not set'
+ end
+
+ context 'with no metadata content' do
+ let(:metadata_xml) { nil }
+
+ it_behaves_like 'returning an error service response', message: 'metadata_content not set'
+ end
+ end
+
+ def metadata_xml
+ Nokogiri::XML::Builder.new do |xml|
+ xml.metadata do
+ xml.groupId(package.maven_metadatum.app_group)
+ xml.artifactId(package.maven_metadatum.app_name)
+ xml.versioning do
+ xml.release(version_release) if version_release
+ xml.latest(version_latest) if version_latest
+ xml.lastUpdated('20210113130531')
+ xml.versions do
+ versions_in_xml.each do |version|
+ xml.version(version)
+ end
+ end
+ end
+ end
+ end.to_xml
+ end
+
+ def versions_from(xml_content)
+ doc = Nokogiri::XML(xml_content)
+ doc.xpath('//metadata/versioning/versions/version').map(&:content)
+ end
+
+ def release_from(xml_content)
+ doc = Nokogiri::XML(xml_content)
+ doc.xpath('//metadata/versioning/release').first&.content
+ end
+
+ def latest_from(xml_content)
+ doc = Nokogiri::XML(xml_content)
+ doc.xpath('//metadata/versioning/latest').first&.content
+ end
+
+ def last_updated_from(xml_content)
+ doc = Nokogiri::XML(xml_content)
+ doc.xpath('//metadata/versioning/lastUpdated').first.content
+ end
+end
diff --git a/spec/services/packages/maven/metadata/sync_service_spec.rb b/spec/services/packages/maven/metadata/sync_service_spec.rb
new file mode 100644
index 00000000000..be298e95236
--- /dev/null
+++ b/spec/services/packages/maven/metadata/sync_service_spec.rb
@@ -0,0 +1,154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::Maven::Metadata::SyncService do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:versionless_package_for_versions) { create(:maven_package, name: 'test', version: nil, project: project) }
+ let_it_be_with_reload(:metadata_file_for_versions) { create(:package_file, :xml, package: versionless_package_for_versions) }
+
+ let(:service) { described_class.new(container: project, current_user: user, params: { package_name: versionless_package_for_versions.name }) }
+
+ describe '#execute' do
+ let(:create_versions_xml_service_double) { double(::Packages::Maven::Metadata::CreateVersionsXmlService, execute: create_versions_xml_service_response) }
+ let(:append_package_file_service_double) { double(::Packages::Maven::Metadata::AppendPackageFileService, execute: append_package_file_service_response) }
+
+ let(:create_versions_xml_service_response) { ServiceResponse.success(payload: { changes_exist: true, empty_versions: false, metadata_content: 'test' }) }
+ let(:append_package_file_service_response) { ServiceResponse.success(message: 'New metadata package files created') }
+
+ subject { service.execute }
+
+ before do
+ allow(::Packages::Maven::Metadata::CreateVersionsXmlService)
+ .to receive(:new).with(metadata_content: an_instance_of(ObjectStorage::Concern::OpenFile), package: versionless_package_for_versions).and_return(create_versions_xml_service_double)
+ allow(::Packages::Maven::Metadata::AppendPackageFileService)
+ .to receive(:new).with(metadata_content: an_instance_of(String), package: versionless_package_for_versions).and_return(append_package_file_service_double)
+ end
+
+ context 'permissions' do
+ where(:role, :expected_result) do
+ :anonymous | :rejected
+ :developer | :rejected
+ :maintainer | :accepted
+ end
+
+ with_them do
+ if params[:role] == :anonymous
+ let_it_be(:user) { nil }
+ end
+
+ before do
+ project.send("add_#{role}", user) unless role == :anonymous
+ end
+
+ if params[:expected_result] == :rejected
+ it_behaves_like 'returning an error service response', message: 'Not allowed'
+ else
+ it_behaves_like 'returning a success service response', message: 'New metadata package files created'
+ end
+ end
+ end
+
+ context 'with a maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'with no changes' do
+ let(:create_versions_xml_service_response) { ServiceResponse.success(payload: { changes_exist: false }) }
+
+ before do
+ expect(::Packages::Maven::Metadata::AppendPackageFileService).not_to receive(:new)
+ end
+
+ it_behaves_like 'returning a success service response', message: 'No changes for versions xml'
+ end
+
+ context 'with changes' do
+ let(:create_versions_xml_service_response) { ServiceResponse.success(payload: { changes_exist: true, empty_versions: false, metadata_content: 'new metadata' }) }
+
+ it_behaves_like 'returning a success service response', message: 'New metadata package files created'
+
+ context 'with empty versions' do
+ let(:create_versions_xml_service_response) { ServiceResponse.success(payload: { changes_exist: true, empty_versions: true }) }
+
+ before do
+ expect(service.send(:versionless_package_for_versions)).to receive(:destroy!)
+ expect(::Packages::Maven::Metadata::AppendPackageFileService).not_to receive(:new)
+ end
+
+ it_behaves_like 'returning a success service response', message: 'Versionless package for versions destroyed'
+ end
+ end
+
+ context 'with a too big maven metadata file for versions' do
+ before do
+ metadata_file_for_versions.update!(size: 100.megabytes)
+ end
+
+ it_behaves_like 'returning an error service response', message: 'Metadata file for versions is too big'
+ end
+
+ context 'an error from the create versions xml service' do
+ let(:create_versions_xml_service_response) { ServiceResponse.error(message: 'metadata_content is invalid') }
+
+ before do
+ expect(::Packages::Maven::Metadata::AppendPackageFileService).not_to receive(:new)
+ end
+
+ it_behaves_like 'returning an error service response', message: 'metadata_content is invalid'
+ end
+
+ context 'an error from the append package file service' do
+ let(:append_package_file_service_response) { ServiceResponse.error(message: 'metadata content is not set') }
+
+ it_behaves_like 'returning an error service response', message: 'metadata content is not set'
+ end
+
+ context 'without a package name' do
+ let(:service) { described_class.new(container: project, current_user: user, params: { package_name: nil }) }
+
+ before do
+ expect(::Packages::Maven::Metadata::AppendPackageFileService).not_to receive(:new)
+ expect(::Packages::Maven::Metadata::CreateVersionsXmlService).not_to receive(:new)
+ end
+
+ it_behaves_like 'returning an error service response', message: 'Blank package name'
+ end
+
+ context 'without a versionless package for version' do
+ before do
+ versionless_package_for_versions.update!(version: '2.2.2')
+ expect(::Packages::Maven::Metadata::AppendPackageFileService).not_to receive(:new)
+ expect(::Packages::Maven::Metadata::CreateVersionsXmlService).not_to receive(:new)
+ end
+
+ it_behaves_like 'returning an error service response', message: 'Non existing versionless package'
+ end
+
+ context 'without a metadata package file for versions' do
+ before do
+ versionless_package_for_versions.package_files.update_all(file_name: 'test.txt')
+ expect(::Packages::Maven::Metadata::AppendPackageFileService).not_to receive(:new)
+ expect(::Packages::Maven::Metadata::CreateVersionsXmlService).not_to receive(:new)
+ end
+
+ it_behaves_like 'returning an error service response', message: 'Non existing metadata file for versions'
+ end
+
+ context 'without a project' do
+ let(:service) { described_class.new(container: nil, current_user: user, params: { package_name: versionless_package_for_versions.name }) }
+
+ before do
+ expect(::Packages::Maven::Metadata::AppendPackageFileService).not_to receive(:new)
+ expect(::Packages::Maven::Metadata::CreateVersionsXmlService).not_to receive(:new)
+ end
+
+ it_behaves_like 'returning an error service response', message: 'Not allowed'
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb
index 10fce6c1651..ba5729eaf59 100644
--- a/spec/services/packages/npm/create_package_service_spec.rb
+++ b/spec/services/packages/npm/create_package_service_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Packages::Npm::CreatePackageService do
end
let(:override) { {} }
- let(:package_name) { "@#{namespace.path}/my-app".freeze }
+ let(:package_name) { "@#{namespace.path}/my-app" }
subject { described_class.new(project, user, params).execute }
@@ -42,29 +42,35 @@ RSpec.describe Packages::Npm::CreatePackageService do
it { expect(subject.name).to eq(package_name) }
it { expect(subject.version).to eq(version) }
+
+ context 'with build info' do
+ let(:job) { create(:ci_build, user: user) }
+ let(:params) { super().merge(build: job) }
+
+ it_behaves_like 'assigns build to package'
+ it_behaves_like 'assigns status to package'
+
+ it 'creates a package file build info' do
+ expect { subject }.to change { Packages::PackageFileBuildInfo.count }.by(1)
+ end
+ end
end
describe '#execute' do
context 'scoped package' do
it_behaves_like 'valid package'
+ end
- context 'with build info' do
- let(:job) { create(:ci_build, user: user) }
- let(:params) { super().merge(build: job) }
-
- it_behaves_like 'assigns build to package'
- it_behaves_like 'assigns status to package'
+ context 'scoped package not following the naming convention' do
+ let(:package_name) { '@any-scope/package' }
- it 'creates a package file build info' do
- expect { subject }.to change { Packages::PackageFileBuildInfo.count }.by(1)
- end
- end
+ it_behaves_like 'valid package'
end
- context 'invalid package name' do
- let(:package_name) { "@#{namespace.path}/my-group/my-app".freeze }
+ context 'unscoped package' do
+ let(:package_name) { 'unscoped-package' }
- it { expect { subject }.to raise_error(ActiveRecord::RecordInvalid) }
+ it_behaves_like 'valid package'
end
context 'package already exists' do
@@ -84,11 +90,18 @@ RSpec.describe Packages::Npm::CreatePackageService do
it { expect(subject[:message]).to be 'File is too large.' }
end
- context 'with incorrect namespace' do
- let(:package_name) { '@my_other_namespace/my-app' }
-
- it 'raises a RecordInvalid error' do
- expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ [
+ '@inv@lid_scope/package',
+ '@scope/sub/group',
+ '@scope/../../package',
+ '@scope%2e%2e%2fpackage'
+ ].each do |invalid_package_name|
+ context "with invalid name #{invalid_package_name}" do
+ let(:package_name) { invalid_package_name }
+
+ it 'raises a RecordInvalid error' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ end
end
end
diff --git a/spec/services/packages/nuget/create_package_service_spec.rb b/spec/services/packages/nuget/create_package_service_spec.rb
deleted file mode 100644
index e338ac36fc3..00000000000
--- a/spec/services/packages/nuget/create_package_service_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Packages::Nuget::CreatePackageService do
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
- let_it_be(:params) { {} }
-
- describe '#execute' do
- subject { described_class.new(project, user, params).execute }
-
- let(:package) { Packages::Package.last }
-
- it 'creates the package' do
- expect { subject }.to change { Packages::Package.count }.by(1)
-
- expect(package).to be_valid
- expect(package.name).to eq(Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME)
- expect(package.version).to start_with(Packages::Nuget::CreatePackageService::PACKAGE_VERSION)
- expect(package.package_type).to eq('nuget')
- end
-
- it 'can create two packages in a row' do
- expect { subject }.to change { Packages::Package.count }.by(1)
- expect { described_class.new(project, user, params).execute }.to change { Packages::Package.count }.by(1)
-
- expect(package).to be_valid
- expect(package.name).to eq(Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME)
- expect(package.version).to start_with(Packages::Nuget::CreatePackageService::PACKAGE_VERSION)
- expect(package.package_type).to eq('nuget')
- end
-
- it_behaves_like 'assigns the package creator'
- it_behaves_like 'assigns build to package'
- it_behaves_like 'assigns status to package'
- end
-end
diff --git a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
index 92b493ed376..c1cce46a54c 100644
--- a/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
+++ b/spec/services/packages/nuget/update_package_from_metadata_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
- let(:package) { create(:nuget_package) }
+ let(:package) { create(:nuget_package, :processing) }
let(:package_file) { package.package_files.first }
let(:service) { described_class.new(package_file) }
let(:package_name) { 'DummyProject.DummyPackage' }
@@ -60,6 +60,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
.to change { ::Packages::Package.count }.by(0)
.and change { Packages::DependencyLink.count }.by(0)
expect(package_file.reload.file_name).not_to eq(package_file_name)
+ expect(package_file.package).to be_processing
expect(package_file.package.reload.name).not_to eq(package_name)
expect(package_file.package.version).not_to eq(package_version)
end
@@ -78,6 +79,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
expect(package.reload.name).to eq(package_name)
expect(package.version).to eq(package_version)
+ expect(package).to be_default
expect(package_file.reload.file_name).to eq(package_file_name)
# hard reset needed to properly reload package_file.file
expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
@@ -184,6 +186,7 @@ RSpec.describe Packages::Nuget::UpdatePackageFromMetadataService, :clean_gitlab_
expect(package.reload.name).to eq(package_name)
expect(package.version).to eq(package_version)
+ expect(package).to be_default
expect(package_file.reload.file_name).to eq(package_file_name)
# hard reset needed to properly reload package_file.file
expect(Packages::PackageFile.find(package_file.id).file.size).not_to eq 0
diff --git a/spec/services/packages/rubygems/dependency_resolver_service_spec.rb b/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
new file mode 100644
index 00000000000..206bffe53f8
--- /dev/null
+++ b/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rubygems::DependencyResolverService do
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:package) { create(:package, project: project) }
+ let_it_be(:user) { create(:user) }
+ let(:gem_name) { package.name }
+ let(:service) { described_class.new(project, user, gem_name: gem_name) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'user without access' do
+ it 'returns a service error' do
+ expect(subject.error?).to be(true)
+ expect(subject.message).to eq('forbidden')
+ end
+ end
+
+ context 'user with access' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when no package is found' do
+ let(:gem_name) { nil }
+
+ it 'returns a service error', :aggregate_failures do
+ expect(subject.error?).to be(true)
+ expect(subject.message).to eq("#{gem_name} not found")
+ end
+ end
+
+ context 'package without dependencies' do
+ it 'returns an empty dependencies array' do
+ expected_result = [{
+ name: package.name,
+ number: package.version,
+ platform: described_class::DEFAULT_PLATFORM,
+ dependencies: []
+ }]
+
+ expect(subject.payload).to eq(expected_result)
+ end
+ end
+
+ context 'package with dependencies' do
+ let(:dependency_link) { create(:packages_dependency_link, :rubygems, package: package)}
+ let(:dependency_link2) { create(:packages_dependency_link, :rubygems, package: package)}
+ let(:dependency_link3) { create(:packages_dependency_link, :rubygems, package: package)}
+
+ it 'returns a set of dependencies' do
+ expected_result = [{
+ name: package.name,
+ number: package.version,
+ platform: described_class::DEFAULT_PLATFORM,
+ dependencies: [
+ [dependency_link.dependency.name, dependency_link.dependency.version_pattern],
+ [dependency_link2.dependency.name, dependency_link2.dependency.version_pattern],
+ [dependency_link3.dependency.name, dependency_link3.dependency.version_pattern]
+ ]
+ }]
+
+ expect(subject.payload).to eq(expected_result)
+ end
+ end
+
+ context 'package with multiple versions' do
+ let(:dependency_link) { create(:packages_dependency_link, :rubygems, package: package)}
+ let(:dependency_link2) { create(:packages_dependency_link, :rubygems, package: package)}
+ let(:dependency_link3) { create(:packages_dependency_link, :rubygems, package: package)}
+ let(:package2) { create(:package, project: project, name: package.name, version: '9.9.9') }
+ let(:dependency_link4) { create(:packages_dependency_link, :rubygems, package: package2)}
+
+ it 'returns a set of dependencies' do
+ expected_result = [{
+ name: package.name,
+ number: package.version,
+ platform: described_class::DEFAULT_PLATFORM,
+ dependencies: [
+ [dependency_link.dependency.name, dependency_link.dependency.version_pattern],
+ [dependency_link2.dependency.name, dependency_link2.dependency.version_pattern],
+ [dependency_link3.dependency.name, dependency_link3.dependency.version_pattern]
+ ]
+ }, {
+ name: package2.name,
+ number: package2.version,
+ platform: described_class::DEFAULT_PLATFORM,
+ dependencies: [
+ [dependency_link4.dependency.name, dependency_link4.dependency.version_pattern]
+ ]
+ }]
+
+ expect(subject.payload).to eq(expected_result)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/pages/legacy_storage_lease_spec.rb b/spec/services/pages/legacy_storage_lease_spec.rb
index c022da6f47f..092dce093ff 100644
--- a/spec/services/pages/legacy_storage_lease_spec.rb
+++ b/spec/services/pages/legacy_storage_lease_spec.rb
@@ -47,14 +47,6 @@ RSpec.describe ::Pages::LegacyStorageLease do
expect(service.execute).to eq(nil)
end
-
- it 'runs guarded method if feature flag is disabled' do
- stub_feature_flags(pages_use_legacy_storage_lease: false)
-
- expect(service).to receive(:execute_unsafe).and_call_original
-
- expect(service.execute).to eq(true)
- end
end
context 'when another service holds the lease for the different project' do
diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb
index 4e366fce0d9..c272ce13132 100644
--- a/spec/services/projects/alerting/notify_service_spec.rb
+++ b/spec/services/projects/alerting/notify_service_spec.rb
@@ -119,6 +119,7 @@ RSpec.describe Projects::Alerting::NotifyService do
end
it_behaves_like 'does not an create alert management alert'
+ it_behaves_like 'creates single system note based on the source of the alert'
context 'auto_close_enabled setting enabled' do
let(:auto_close_enabled) { true }
@@ -131,6 +132,8 @@ RSpec.describe Projects::Alerting::NotifyService do
expect(alert.ended_at).to eql(ended_at)
end
+ it_behaves_like 'creates status-change system note for an auto-resolved alert'
+
context 'related issue exists' do
let(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint_sha) }
let(:issue) { alert.issue }
@@ -209,10 +212,7 @@ RSpec.describe Projects::Alerting::NotifyService do
)
end
- it 'creates a system note corresponding to alert creation' do
- expect { subject }.to change(Note, :count).by(1)
- expect(Note.last.note).to include(source)
- end
+ it_behaves_like 'creates single system note based on the source of the alert'
end
end
diff --git a/spec/services/projects/branches_by_mode_service_spec.rb b/spec/services/projects/branches_by_mode_service_spec.rb
index 9199c3e0b3a..e8bcda8a9c4 100644
--- a/spec/services/projects/branches_by_mode_service_spec.rb
+++ b/spec/services/projects/branches_by_mode_service_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Projects::BranchesByModeService do
branches, prev_page, next_page = subject
- expect(branches.size).to eq(10)
+ expect(branches.size).to eq(11)
expect(next_page).to be_nil
expect(prev_page).to eq("/#{project.full_path}/-/branches/all?offset=2&page=3")
end
@@ -99,7 +99,7 @@ RSpec.describe Projects::BranchesByModeService do
it 'returns branches after the specified branch' do
branches, prev_page, next_page = subject
- expect(branches.size).to eq(14)
+ expect(branches.size).to eq(15)
expect(next_page).to be_nil
expect(prev_page).to eq("/#{project.full_path}/-/branches/all?offset=3&page=4&sort=name_asc")
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index f7da6f75141..306d87eefb8 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -349,27 +349,38 @@ RSpec.describe Projects::CreateService, '#execute' do
context 'default visibility level' do
let(:group) { create(:group, :private) }
- before do
- stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
- group.add_developer(user)
+ using RSpec::Parameterized::TableSyntax
- opts.merge!(
- visibility: 'private',
- name: 'test',
- namespace: group,
- path: 'foo'
- )
+ where(:case_name, :group_level, :project_level) do
+ [
+ ['in public group', Gitlab::VisibilityLevel::PUBLIC, Gitlab::VisibilityLevel::INTERNAL],
+ ['in internal group', Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::INTERNAL],
+ ['in private group', Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::PRIVATE]
+ ]
end
- it 'creates a private project' do
- project = create_project(user, opts)
+ with_them do
+ before do
+ stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
+ group.add_developer(user)
+ group.update!(visibility_level: group_level)
- expect(project).to respond_to(:errors)
+ opts.merge!(
+ name: 'test',
+ namespace: group,
+ path: 'foo'
+ )
+ end
- expect(project.errors.any?).to be(false)
- expect(project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- expect(project.saved?).to be(true)
- expect(project.valid?).to be(true)
+ it 'creates project with correct visibility level', :aggregate_failures do
+ project = create_project(user, opts)
+
+ expect(project).to respond_to(:errors)
+ expect(project.errors).to be_blank
+ expect(project.visibility_level).to eq(project_level)
+ expect(project).to be_saved
+ expect(project).to be_valid
+ end
end
end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 75d1c98923a..5410e784cc0 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -31,9 +31,34 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end
shared_examples 'deleting the project with pipeline and build' do
- context 'with pipeline and build', :sidekiq_inline do # which has optimistic locking
+ context 'with pipeline and build related records', :sidekiq_inline do # which has optimistic locking
let!(:pipeline) { create(:ci_pipeline, project: project) }
- let!(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ let!(:build) { create(:ci_build, :artifacts, :with_runner_session, pipeline: pipeline) }
+ let!(:trace_chunks) { create(:ci_build_trace_chunk, build: build) }
+ let!(:job_variables) { create(:ci_job_variable, job: build) }
+ let!(:report_result) { create(:ci_build_report_result, build: build) }
+ let!(:pending_state) { create(:ci_build_pending_state, build: build) }
+
+ it 'deletes build related records' do
+ expect { destroy_project(project, user, {}) }.to change { Ci::Build.count }.by(-1)
+ .and change { Ci::BuildTraceChunk.count }.by(-1)
+ .and change { Ci::JobArtifact.count }.by(-2)
+ .and change { Ci::JobVariable.count }.by(-1)
+ .and change { Ci::BuildPendingState.count }.by(-1)
+ .and change { Ci::BuildReportResult.count }.by(-1)
+ .and change { Ci::BuildRunnerSession.count }.by(-1)
+ end
+
+ it 'avoids N+1 queries', skip: 'skipped until fixed in https://gitlab.com/gitlab-org/gitlab/-/issues/24644' do
+ recorder = ActiveRecord::QueryRecorder.new { destroy_project(project, user, {}) }
+
+ project = create(:project, :repository, namespace: user.namespace)
+ pipeline = create(:ci_pipeline, project: project)
+ builds = create_list(:ci_build, 3, :artifacts, pipeline: pipeline)
+ create_list(:ci_build_trace_chunk, 3, build: builds[0])
+
+ expect { destroy_project(project, project.owner, {}) }.not_to exceed_query_limit(recorder)
+ end
it_behaves_like 'deleting the project'
end
@@ -60,357 +85,343 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end
end
- shared_examples 'project destroy' do
- it_behaves_like 'deleting the project'
+ it_behaves_like 'deleting the project'
- it 'invalidates personal_project_count cache' do
- expect(user).to receive(:invalidate_personal_projects_count)
+ it 'invalidates personal_project_count cache' do
+ expect(user).to receive(:invalidate_personal_projects_count)
- destroy_project(project, user, {})
+ destroy_project(project, user, {})
+ end
+
+ it 'performs cancel for project ci pipelines' do
+ expect(::Ci::AbortProjectPipelinesService).to receive_message_chain(:new, :execute).with(project)
+
+ destroy_project(project, user, {})
+ end
+
+ context 'when project has remote mirrors' do
+ let!(:project) do
+ create(:project, :repository, namespace: user.namespace).tap do |project|
+ project.remote_mirrors.create!(url: 'http://test.com')
+ end
end
- it 'performs cancel for project ci pipelines' do
- expect(::Ci::AbortProjectPipelinesService).to receive_message_chain(:new, :execute).with(project)
+ it 'destroys them' do
+ expect(RemoteMirror.count).to eq(1)
destroy_project(project, user, {})
+
+ expect(RemoteMirror.count).to eq(0)
end
+ end
- context 'when project has remote mirrors' do
- let!(:project) do
- create(:project, :repository, namespace: user.namespace).tap do |project|
- project.remote_mirrors.create!(url: 'http://test.com')
- end
+ context 'when project has exports' do
+ let!(:project_with_export) do
+ create(:project, :repository, namespace: user.namespace).tap do |project|
+ create(:import_export_upload,
+ project: project,
+ export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
end
+ end
- it 'destroys them' do
- expect(RemoteMirror.count).to eq(1)
-
- destroy_project(project, user, {})
+ it 'destroys project and export' do
+ expect do
+ destroy_project(project_with_export, user, {})
+ end.to change(ImportExportUpload, :count).by(-1)
- expect(RemoteMirror.count).to eq(0)
- end
+ expect(Project.all).not_to include(project_with_export)
end
+ end
- context 'when project has exports' do
- let!(:project_with_export) do
- create(:project, :repository, namespace: user.namespace).tap do |project|
- create(:import_export_upload,
- project: project,
- export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz'))
- end
- end
+ context 'Sidekiq fake' do
+ before do
+ # Dont run sidekiq to check if renamed repository exists
+ Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
+ end
- it 'destroys project and export' do
- expect do
- destroy_project(project_with_export, user, {})
- end.to change(ImportExportUpload, :count).by(-1)
+ it { expect(Project.all).not_to include(project) }
- expect(Project.all).not_to include(project_with_export)
- end
+ it do
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end
- context 'Sidekiq fake' do
- before do
- # Dont run sidekiq to check if renamed repository exists
- Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
- end
+ it do
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
+ end
+ end
- it { expect(Project.all).not_to include(project) }
+ context 'when flushing caches fail due to Git errors' do
+ before do
+ allow(project.repository).to receive(:before_delete).and_raise(::Gitlab::Git::CommandError)
+ allow(Gitlab::GitLogger).to receive(:warn).with(
+ class: Repositories::DestroyService.name,
+ container_id: project.id,
+ disk_path: project.disk_path,
+ message: 'Gitlab::Git::CommandError').and_call_original
+ end
- it do
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
- end
+ it_behaves_like 'deleting the project'
+ end
- it do
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
- end
+ context 'when flushing caches fail due to Redis' do
+ before do
+ new_user = create(:user)
+ project.team.add_user(new_user, Gitlab::Access::DEVELOPER)
+ allow_any_instance_of(described_class).to receive(:flush_caches).and_raise(::Redis::CannotConnectError)
end
- context 'when flushing caches fail due to Git errors' do
- before do
- allow(project.repository).to receive(:before_delete).and_raise(::Gitlab::Git::CommandError)
- allow(Gitlab::GitLogger).to receive(:warn).with(
- class: Repositories::DestroyService.name,
- container_id: project.id,
- disk_path: project.disk_path,
- message: 'Gitlab::Git::CommandError').and_call_original
+ it 'keeps project team intact upon an error' do
+ perform_enqueued_jobs do
+ destroy_project(project, user, {})
+ rescue ::Redis::CannotConnectError
end
- it_behaves_like 'deleting the project'
+ expect(project.team.members.count).to eq 2
end
+ end
+
+ context 'with async_execute', :sidekiq_inline do
+ let(:async) { true }
- context 'when flushing caches fail due to Redis' do
+ context 'async delete of project with private issue visibility' do
before do
- new_user = create(:user)
- project.team.add_user(new_user, Gitlab::Access::DEVELOPER)
- allow_any_instance_of(described_class).to receive(:flush_caches).and_raise(::Redis::CannotConnectError)
+ project.project_feature.update_attribute("issues_access_level", ProjectFeature::PRIVATE)
end
- it 'keeps project team intact upon an error' do
- perform_enqueued_jobs do
- destroy_project(project, user, {})
- rescue ::Redis::CannotConnectError
- end
-
- expect(project.team.members.count).to eq 2
- end
+ it_behaves_like 'deleting the project'
end
- context 'with async_execute', :sidekiq_inline do
- let(:async) { true }
+ it_behaves_like 'deleting the project with pipeline and build'
- context 'async delete of project with private issue visibility' do
+ context 'errors' do
+ context 'when `remove_legacy_registry_tags` fails' do
before do
- project.project_feature.update_attribute("issues_access_level", ProjectFeature::PRIVATE)
+ expect_any_instance_of(described_class)
+ .to receive(:remove_legacy_registry_tags).and_return(false)
end
- it_behaves_like 'deleting the project'
+ it_behaves_like 'handles errors thrown during async destroy', "Failed to remove some tags"
end
- it_behaves_like 'deleting the project with pipeline and build'
-
- context 'errors' do
- context 'when `remove_legacy_registry_tags` fails' do
- before do
- expect_any_instance_of(described_class)
- .to receive(:remove_legacy_registry_tags).and_return(false)
- end
-
- it_behaves_like 'handles errors thrown during async destroy', "Failed to remove some tags"
+ context 'when `remove_repository` fails' do
+ before do
+ expect_any_instance_of(described_class)
+ .to receive(:remove_repository).and_return(false)
end
- context 'when `remove_repository` fails' do
- before do
- expect_any_instance_of(described_class)
- .to receive(:remove_repository).and_return(false)
- end
+ it_behaves_like 'handles errors thrown during async destroy', "Failed to remove project repository"
+ end
- it_behaves_like 'handles errors thrown during async destroy', "Failed to remove project repository"
+ context 'when `execute` raises expected error' do
+ before do
+ expect_any_instance_of(Project)
+ .to receive(:destroy!).and_raise(StandardError.new("Other error message"))
end
- context 'when `execute` raises expected error' do
- before do
- expect_any_instance_of(Project)
- .to receive(:destroy!).and_raise(StandardError.new("Other error message"))
- end
+ it_behaves_like 'handles errors thrown during async destroy', "Other error message"
+ end
- it_behaves_like 'handles errors thrown during async destroy', "Other error message"
+ context 'when `execute` raises unexpected error' do
+ before do
+ expect_any_instance_of(Project)
+ .to receive(:destroy!).and_raise(Exception.new('Other error message'))
end
- context 'when `execute` raises unexpected error' do
- before do
- expect_any_instance_of(Project)
- .to receive(:destroy!).and_raise(Exception.new('Other error message'))
- end
+ it 'allows error to bubble up and rolls back project deletion' do
+ expect do
+ destroy_project(project, user, {})
+ end.to raise_error(Exception, 'Other error message')
- it 'allows error to bubble up and rolls back project deletion' do
- expect do
- destroy_project(project, user, {})
- end.to raise_error(Exception, 'Other error message')
-
- expect(project.reload.pending_delete).to be(false)
- expect(project.delete_error).to include("Other error message")
- end
+ expect(project.reload.pending_delete).to be(false)
+ expect(project.delete_error).to include("Other error message")
end
end
end
+ end
- describe 'container registry' do
- context 'when there are regular container repositories' do
- let(:container_repository) { create(:container_repository) }
+ describe 'container registry' do
+ context 'when there are regular container repositories' do
+ let(:container_repository) { create(:container_repository) }
- before do
- stub_container_registry_tags(repository: project.full_path + '/image',
- tags: ['tag'])
- project.container_repositories << container_repository
- end
+ before do
+ stub_container_registry_tags(repository: project.full_path + '/image',
+ tags: ['tag'])
+ project.container_repositories << container_repository
+ end
- context 'when image repository deletion succeeds' do
- it 'removes tags' do
- expect_any_instance_of(ContainerRepository)
- .to receive(:delete_tags!).and_return(true)
+ context 'when image repository deletion succeeds' do
+ it 'removes tags' do
+ expect_any_instance_of(ContainerRepository)
+ .to receive(:delete_tags!).and_return(true)
- destroy_project(project, user)
- end
+ destroy_project(project, user)
end
+ end
- context 'when image repository deletion fails' do
- it 'raises an exception' do
- expect_any_instance_of(ContainerRepository)
- .to receive(:delete_tags!).and_raise(RuntimeError)
+ context 'when image repository deletion fails' do
+ it 'raises an exception' do
+ expect_any_instance_of(ContainerRepository)
+ .to receive(:delete_tags!).and_raise(RuntimeError)
- expect(destroy_project(project, user)).to be false
- end
+ expect(destroy_project(project, user)).to be false
end
+ end
- context 'when registry is disabled' do
- before do
- stub_container_registry_config(enabled: false)
- end
+ context 'when registry is disabled' do
+ before do
+ stub_container_registry_config(enabled: false)
+ end
- it 'does not attempting to remove any tags' do
- expect(Projects::ContainerRepository::DestroyService).not_to receive(:new)
+ it 'does not attempting to remove any tags' do
+ expect(Projects::ContainerRepository::DestroyService).not_to receive(:new)
- destroy_project(project, user)
- end
+ destroy_project(project, user)
end
end
+ end
- context 'when there are tags for legacy root repository' do
- before do
- stub_container_registry_tags(repository: project.full_path,
- tags: ['tag'])
- end
+ context 'when there are tags for legacy root repository' do
+ before do
+ stub_container_registry_tags(repository: project.full_path,
+ tags: ['tag'])
+ end
- context 'when image repository tags deletion succeeds' do
- it 'removes tags' do
- expect_any_instance_of(ContainerRepository)
- .to receive(:delete_tags!).and_return(true)
+ context 'when image repository tags deletion succeeds' do
+ it 'removes tags' do
+ expect_any_instance_of(ContainerRepository)
+ .to receive(:delete_tags!).and_return(true)
- destroy_project(project, user)
- end
+ destroy_project(project, user)
end
+ end
- context 'when image repository tags deletion fails' do
- it 'raises an exception' do
- expect_any_instance_of(ContainerRepository)
- .to receive(:delete_tags!).and_return(false)
+ context 'when image repository tags deletion fails' do
+ it 'raises an exception' do
+ expect_any_instance_of(ContainerRepository)
+ .to receive(:delete_tags!).and_return(false)
- expect(destroy_project(project, user)).to be false
- end
+ expect(destroy_project(project, user)).to be false
end
end
end
+ end
- context 'for a forked project with LFS objects' do
- let(:forked_project) { fork_project(project, user) }
+ context 'for a forked project with LFS objects' do
+ let(:forked_project) { fork_project(project, user) }
- before do
- project.lfs_objects << create(:lfs_object)
- forked_project.reload
- end
+ before do
+ project.lfs_objects << create(:lfs_object)
+ forked_project.reload
+ end
- it 'destroys the fork' do
- expect { destroy_project(forked_project, user) }
- .not_to raise_error
- end
+ it 'destroys the fork' do
+ expect { destroy_project(forked_project, user) }
+ .not_to raise_error
end
+ end
- context 'as the root of a fork network' do
- let!(:fork_1) { fork_project(project, user) }
- let!(:fork_2) { fork_project(project, user) }
+ context 'as the root of a fork network' do
+ let!(:fork_1) { fork_project(project, user) }
+ let!(:fork_2) { fork_project(project, user) }
- it 'updates the fork network with the project name' do
- fork_network = project.fork_network
+ it 'updates the fork network with the project name' do
+ fork_network = project.fork_network
- destroy_project(project, user)
+ destroy_project(project, user)
- fork_network.reload
+ fork_network.reload
- expect(fork_network.deleted_root_project_name).to eq(project.full_name)
- expect(fork_network.root_project).to be_nil
- end
+ expect(fork_network.deleted_root_project_name).to eq(project.full_name)
+ expect(fork_network.root_project).to be_nil
end
+ end
- context 'repository +deleted path removal' do
- context 'regular phase' do
- it 'schedules +deleted removal of existing repos' do
- service = described_class.new(project, user, {})
- allow(service).to receive(:schedule_stale_repos_removal)
+ context 'repository +deleted path removal' do
+ context 'regular phase' do
+ it 'schedules +deleted removal of existing repos' do
+ service = described_class.new(project, user, {})
+ allow(service).to receive(:schedule_stale_repos_removal)
- expect(Repositories::ShellDestroyService).to receive(:new).and_call_original
- expect(GitlabShellWorker).to receive(:perform_in)
- .with(5.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
+ expect(Repositories::ShellDestroyService).to receive(:new).and_call_original
+ expect(GitlabShellWorker).to receive(:perform_in)
+ .with(5.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
- service.execute
- end
+ service.execute
end
+ end
- context 'stale cleanup' do
- let(:async) { true }
+ context 'stale cleanup' do
+ let(:async) { true }
- it 'schedules +deleted wiki and repo removal' do
- allow(ProjectDestroyWorker).to receive(:perform_async)
+ it 'schedules +deleted wiki and repo removal' do
+ allow(ProjectDestroyWorker).to receive(:perform_async)
- expect(Repositories::ShellDestroyService).to receive(:new).with(project.repository).and_call_original
- expect(GitlabShellWorker).to receive(:perform_in)
- .with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
+ expect(Repositories::ShellDestroyService).to receive(:new).with(project.repository).and_call_original
+ expect(GitlabShellWorker).to receive(:perform_in)
+ .with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
- expect(Repositories::ShellDestroyService).to receive(:new).with(project.wiki.repository).and_call_original
- expect(GitlabShellWorker).to receive(:perform_in)
- .with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.wiki.disk_path))
+ expect(Repositories::ShellDestroyService).to receive(:new).with(project.wiki.repository).and_call_original
+ expect(GitlabShellWorker).to receive(:perform_in)
+ .with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.wiki.disk_path))
- destroy_project(project, user, {})
- end
+ destroy_project(project, user, {})
end
end
+ end
- context 'snippets' do
- let!(:snippet1) { create(:project_snippet, project: project, author: user) }
- let!(:snippet2) { create(:project_snippet, project: project, author: user) }
-
- it 'does not include snippets when deleting in batches' do
- expect(project).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:container_repositories, :snippets] })
+ context 'snippets' do
+ let!(:snippet1) { create(:project_snippet, project: project, author: user) }
+ let!(:snippet2) { create(:project_snippet, project: project, author: user) }
- destroy_project(project, user)
- end
+ it 'does not include snippets when deleting in batches' do
+ expect(project).to receive(:destroy_dependent_associations_in_batches).with({ exclude: [:container_repositories, :snippets] })
- it 'calls the bulk snippet destroy service' do
- expect(project.snippets.count).to eq 2
+ destroy_project(project, user)
+ end
- expect(Snippets::BulkDestroyService).to receive(:new)
- .with(user, project.snippets).and_call_original
+ it 'calls the bulk snippet destroy service' do
+ expect(project.snippets.count).to eq 2
- expect do
- destroy_project(project, user)
- end.to change(Snippet, :count).by(-2)
- end
+ expect(Snippets::BulkDestroyService).to receive(:new)
+ .with(user, project.snippets).and_call_original
- context 'when an error is raised deleting snippets' do
- it 'does not delete project' do
- allow_next_instance_of(Snippets::BulkDestroyService) do |instance|
- allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
- end
-
- expect(destroy_project(project, user)).to be_falsey
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
- end
- end
+ expect do
+ destroy_project(project, user)
+ end.to change(Snippet, :count).by(-2)
end
- context 'error while destroying', :sidekiq_inline do
- let!(:pipeline) { create(:ci_pipeline, project: project) }
- let!(:builds) { create_list(:ci_build, 2, :artifacts, pipeline: pipeline) }
- let!(:build_trace) { create(:ci_build_trace_chunk, build: builds[0]) }
-
- it 'deletes on retry' do
- # We can expect this to timeout for very large projects
- # TODO: remove allow_next_instance_of: https://gitlab.com/gitlab-org/gitlab/-/issues/220440
- allow_any_instance_of(Ci::Build).to receive(:destroy).and_raise('boom')
- destroy_project(project, user, {})
-
- allow_any_instance_of(Ci::Build).to receive(:destroy).and_call_original
- destroy_project(project, user, {})
+ context 'when an error is raised deleting snippets' do
+ it 'does not delete project' do
+ allow_next_instance_of(Snippets::BulkDestroyService) do |instance|
+ allow(instance).to receive(:execute).and_return(ServiceResponse.error(message: 'foo'))
+ end
- expect(Project.unscoped.all).not_to include(project)
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
- expect(project.all_pipelines).to be_empty
- expect(project.builds).to be_empty
+ expect(destroy_project(project, user)).to be_falsey
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
end
end
end
- context 'when project_transactionless_destroy enabled' do
- it_behaves_like 'project destroy'
- end
+ context 'error while destroying', :sidekiq_inline do
+ let!(:pipeline) { create(:ci_pipeline, project: project) }
+ let!(:builds) { create_list(:ci_build, 2, :artifacts, pipeline: pipeline) }
+ let!(:build_trace) { create(:ci_build_trace_chunk, build: builds[0]) }
- context 'when project_transactionless_destroy disabled', :sidekiq_inline do
- before do
- stub_feature_flags(project_transactionless_destroy: false)
- end
+ it 'deletes on retry' do
+ # We can expect this to timeout for very large projects
+ # TODO: remove allow_next_instance_of: https://gitlab.com/gitlab-org/gitlab/-/issues/220440
+ allow_any_instance_of(Ci::Build).to receive(:destroy).and_raise('boom')
+ destroy_project(project, user, {})
+
+ allow_any_instance_of(Ci::Build).to receive(:destroy).and_call_original
+ destroy_project(project, user, {})
- it_behaves_like 'project destroy'
+ expect(Project.unscoped.all).not_to include(project)
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
+ expect(project.all_pipelines).to be_empty
+ expect(project.builds).to be_empty
+ end
end
def destroy_project(project, user, params = {})
diff --git a/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb b/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb
index 15c9d1e5925..2dc4a56368b 100644
--- a/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb
+++ b/spec/services/projects/schedule_bulk_repository_shard_moves_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Projects::ScheduleBulkRepositoryShardMovesService do
it_behaves_like 'moves repository shard in bulk' do
let_it_be_with_reload(:container) { create(:project, :repository).tap { |project| project.track_project_repository } }
- let(:move_service_klass) { ProjectRepositoryStorageMove }
- let(:bulk_worker_klass) { ::ProjectScheduleBulkRepositoryShardMovesWorker }
+ let(:move_service_klass) { Projects::RepositoryStorageMove }
+ let(:bulk_worker_klass) { ::Projects::ScheduleBulkRepositoryShardMovesWorker }
end
end
diff --git a/spec/services/projects/update_pages_configuration_service_spec.rb b/spec/services/projects/update_pages_configuration_service_spec.rb
index 294de813e02..9ef66a10f0d 100644
--- a/spec/services/projects/update_pages_configuration_service_spec.rb
+++ b/spec/services/projects/update_pages_configuration_service_spec.rb
@@ -26,11 +26,18 @@ RSpec.describe Projects::UpdatePagesConfigurationService do
context 'when configuration changes' do
it 'updates the config and reloads the daemon' do
- allow(service).to receive(:update_file).and_call_original
-
expect(service).to receive(:update_file).with(file.path, an_instance_of(String))
.and_call_original
- expect(service).to receive(:reload_daemon).and_call_original
+ allow(service).to receive(:update_file).with(File.join(::Settings.pages.path, '.update'),
+ an_instance_of(String)).and_call_original
+
+ expect(subject).to include(status: :success)
+ end
+
+ it "doesn't update configuration files if updates on legacy storage are disabled" do
+ stub_feature_flags(pages_update_legacy_storage: false)
+
+ expect(service).not_to receive(:update_file)
expect(subject).to include(status: :success)
end
@@ -42,8 +49,8 @@ RSpec.describe Projects::UpdatePagesConfigurationService do
service.execute
end
- it 'does not update the .update file' do
- expect(service).not_to receive(:reload_daemon)
+ it 'does not update anything' do
+ expect(service).not_to receive(:update_file)
expect(subject).to include(status: :success)
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 6bf2876f640..b735f4b6bc2 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -335,6 +335,41 @@ RSpec.describe Projects::UpdatePagesService do
end
end
+ context 'when retrying the job' do
+ let!(:older_deploy_job) do
+ create(:generic_commit_status, :failed, pipeline: pipeline,
+ ref: build.ref,
+ stage: 'deploy',
+ name: 'pages:deploy')
+ end
+
+ before do
+ create(:ci_job_artifact, :correct_checksum, file: file, job: build)
+ create(:ci_job_artifact, file_type: :metadata, file_format: :gzip, file: metadata, job: build)
+ build.reload
+ end
+
+ it 'marks older pages:deploy jobs retried' do
+ expect(execute).to eq(:success)
+
+ expect(older_deploy_job.reload).to be_retried
+ end
+
+ context 'when FF ci_fix_commit_status_retried is disabled' do
+ before do
+ stub_feature_flags(ci_fix_commit_status_retried: false)
+ end
+
+ it 'does not mark older pages:deploy jobs retried' do
+ expect(execute).to eq(:success)
+
+ expect(older_deploy_job.reload).not_to be_retried
+ end
+ end
+ end
+
+ private
+
def deploy_status
GenericCommitStatus.find_by(name: 'pages:deploy')
end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index a59b6adf346..b9e909e8615 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -551,7 +551,7 @@ RSpec.describe Projects::UpdateService do
expect(project).to be_repository_read_only
expect(project.repository_storage_moves.last).to have_attributes(
- state: ::ProjectRepositoryStorageMove.state_machines[:state].states[:scheduled].value,
+ state: ::Projects::RepositoryStorageMove.state_machines[:state].states[:scheduled].value,
source_storage_name: 'default',
destination_storage_name: 'test_second_storage'
)
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 1a102b125f6..bf35e72a037 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -1949,6 +1949,100 @@ RSpec.describe QuickActions::InterpretService do
end
end
end
+
+ context 'invite_email command' do
+ let_it_be(:issuable) { issue }
+
+ it_behaves_like 'empty command', "No email participants were added. Either none were provided, or they already exist." do
+ let(:content) { '/invite_email' }
+ end
+
+ context 'with existing email participant' do
+ let(:content) { '/invite_email a@gitlab.com' }
+
+ before do
+ issuable.issue_email_participants.create!(email: "a@gitlab.com")
+ end
+
+ it_behaves_like 'empty command', "No email participants were added. Either none were provided, or they already exist."
+ end
+
+ context 'with new email participants' do
+ let(:content) { '/invite_email a@gitlab.com b@gitlab.com' }
+
+ subject(:add_emails) { service.execute(content, issuable) }
+
+ it 'returns message' do
+ _, _, message = add_emails
+
+ expect(message).to eq('Added a@gitlab.com and b@gitlab.com.')
+ end
+
+ it 'adds 2 participants' do
+ expect { add_emails }.to change { issue.issue_email_participants.count }.by(2)
+ end
+
+ context 'with mixed case email' do
+ let(:content) { '/invite_email FirstLast@GitLab.com' }
+
+ it 'returns correctly cased message' do
+ _, _, message = add_emails
+
+ expect(message).to eq('Added FirstLast@GitLab.com.')
+ end
+ end
+
+ context 'with invalid email' do
+ let(:content) { '/invite_email a@gitlab.com bad_email' }
+
+ it 'only adds valid emails' do
+ expect { add_emails }.to change { issue.issue_email_participants.count }.by(1)
+ end
+ end
+
+ context 'with existing email' do
+ let(:content) { '/invite_email a@gitlab.com existing@gitlab.com' }
+
+ it 'only adds new emails' do
+ issue.issue_email_participants.create!(email: 'existing@gitlab.com')
+
+ expect { add_emails }.to change { issue.issue_email_participants.count }.by(1)
+ end
+
+ it 'only adds new (case insensitive) emails' do
+ issue.issue_email_participants.create!(email: 'EXISTING@gitlab.com')
+
+ expect { add_emails }.to change { issue.issue_email_participants.count }.by(1)
+ end
+ end
+
+ context 'with duplicate email' do
+ let(:content) { '/invite_email a@gitlab.com a@gitlab.com' }
+
+ it 'only adds unique new emails' do
+ expect { add_emails }.to change { issue.issue_email_participants.count }.by(1)
+ end
+ end
+
+ context 'with more than 6 emails' do
+ let(:content) { '/invite_email a@gitlab.com b@gitlab.com c@gitlab.com d@gitlab.com e@gitlab.com f@gitlab.com g@gitlab.com' }
+
+ it 'only adds 6 new emails' do
+ expect { add_emails }.to change { issue.issue_email_participants.count }.by(6)
+ end
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(issue_email_participants: false)
+ end
+
+ it 'does not add any participants' do
+ expect { add_emails }.not_to change { issue.issue_email_participants.count }
+ end
+ end
+ end
+ end
end
describe '#explain' do
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index a545b0f070a..dab38445ccf 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -4,48 +4,64 @@ require 'spec_helper'
RSpec.describe Repositories::ChangelogService do
describe '#execute' do
- it 'generates and commits a changelog section' do
- project = create(:project, :empty_repo)
- creator = project.creator
- author1 = create(:user)
- author2 = create(:user)
-
- project.add_maintainer(author1)
- project.add_maintainer(author2)
-
- mr1 = create(:merge_request, :merged, target_project: project)
- mr2 = create(:merge_request, :merged, target_project: project)
-
- # The range of commits ignores the first commit, but includes the last
- # commit. To ensure both the commits below are included, we must create an
- # extra commit.
- #
- # In the real world, the start commit of the range will be the last commit
- # of the previous release, so ignoring that is expected and desired.
- sha1 = create_commit(
+ let!(:project) { create(:project, :empty_repo) }
+ let!(:creator) { project.creator }
+ let!(:author1) { create(:user) }
+ let!(:author2) { create(:user) }
+ let!(:mr1) { create(:merge_request, :merged, target_project: project) }
+ let!(:mr2) { create(:merge_request, :merged, target_project: project) }
+
+ # The range of commits ignores the first commit, but includes the last
+ # commit. To ensure both the commits below are included, we must create an
+ # extra commit.
+ #
+ # In the real world, the start commit of the range will be the last commit
+ # of the previous release, so ignoring that is expected and desired.
+ let!(:sha1) do
+ create_commit(
project,
creator,
commit_message: 'Initial commit',
actions: [{ action: 'create', content: 'test', file_path: 'README.md' }]
)
+ end
+
+ let!(:sha2) do
+ project.add_maintainer(author1)
- sha2 = create_commit(
+ create_commit(
project,
author1,
commit_message: "Title 1\n\nChangelog: feature",
actions: [{ action: 'create', content: 'foo', file_path: 'a.txt' }]
)
+ end
+
+ let!(:sha3) do
+ project.add_maintainer(author2)
- sha3 = create_commit(
+ create_commit(
project,
author2,
commit_message: "Title 2\n\nChangelog: feature",
actions: [{ action: 'create', content: 'bar', file_path: 'b.txt' }]
)
+ end
- commit1 = project.commit(sha2)
- commit2 = project.commit(sha3)
+ let!(:sha4) do
+ create_commit(
+ project,
+ author2,
+ commit_message: "Title 3\n\nChangelog: feature",
+ actions: [{ action: 'create', content: 'bar', file_path: 'c.txt' }]
+ )
+ end
+ let!(:commit1) { project.commit(sha2) }
+ let!(:commit2) { project.commit(sha3) }
+ let!(:commit3) { project.commit(sha4) }
+
+ it 'generates and commits a changelog section' do
allow(MergeRequestDiffCommit)
.to receive(:oldest_merge_request_id_per_commit)
.with(project.id, [commit2.id, commit1.id])
@@ -54,16 +70,60 @@ RSpec.describe Repositories::ChangelogService do
{ sha: sha3, merge_request_id: mr2.id }
])
- recorder = ActiveRecord::QueryRecorder.new do
- described_class
- .new(project, creator, version: '1.0.0', from: sha1, to: sha3)
- .execute
- end
+ service = described_class
+ .new(project, creator, version: '1.0.0', from: sha1, to: sha3)
+
+ recorder = ActiveRecord::QueryRecorder.new { service.execute }
+ changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
+
+ expect(recorder.count).to eq(11)
+ expect(changelog).to include('Title 1', 'Title 2')
+ end
+
+ it "ignores a commit when it's both added and reverted in the same range" do
+ create_commit(
+ project,
+ author2,
+ commit_message: "Title 4\n\nThis reverts commit #{sha4}",
+ actions: [{ action: 'create', content: 'bar', file_path: 'd.txt' }]
+ )
+
+ described_class
+ .new(project, creator, version: '1.0.0', from: sha1)
+ .execute
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
- expect(recorder.count).to eq(10)
expect(changelog).to include('Title 1', 'Title 2')
+ expect(changelog).not_to include('Title 3', 'Title 4')
+ end
+
+ it 'includes a revert commit when it has a trailer' do
+ create_commit(
+ project,
+ author2,
+ commit_message: "Title 4\n\nThis reverts commit #{sha4}\n\nChangelog: added",
+ actions: [{ action: 'create', content: 'bar', file_path: 'd.txt' }]
+ )
+
+ described_class
+ .new(project, creator, version: '1.0.0', from: sha1)
+ .execute
+
+ changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
+
+ expect(changelog).to include('Title 1', 'Title 2', 'Title 4')
+ expect(changelog).not_to include('Title 3')
+ end
+
+ it 'uses the target branch when "to" is unspecified' do
+ described_class
+ .new(project, creator, version: '1.0.0', from: sha1)
+ .execute
+
+ changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
+
+ expect(changelog).to include('Title 1', 'Title 2', 'Title 3')
end
end
diff --git a/spec/services/snippets/schedule_bulk_repository_shard_moves_service_spec.rb b/spec/services/snippets/schedule_bulk_repository_shard_moves_service_spec.rb
index 764c7f94a46..9286d73ed4a 100644
--- a/spec/services/snippets/schedule_bulk_repository_shard_moves_service_spec.rb
+++ b/spec/services/snippets/schedule_bulk_repository_shard_moves_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Snippets::ScheduleBulkRepositoryShardMovesService do
it_behaves_like 'moves repository shard in bulk' do
let_it_be_with_reload(:container) { create(:snippet, :repository) }
- let(:move_service_klass) { SnippetRepositoryStorageMove }
- let(:bulk_worker_klass) { ::SnippetScheduleBulkRepositoryShardMovesWorker }
+ let(:move_service_klass) { Snippets::RepositoryStorageMove }
+ let(:bulk_worker_klass) { ::Snippets::ScheduleBulkRepositoryShardMovesWorker }
end
end
diff --git a/spec/services/system_hooks_service_spec.rb b/spec/services/system_hooks_service_spec.rb
index 1ec5237370f..446325e5f71 100644
--- a/spec/services/system_hooks_service_spec.rb
+++ b/spec/services/system_hooks_service_spec.rb
@@ -149,9 +149,6 @@ RSpec.describe SystemHooksService do
it { expect(event_name(project, :rename)).to eq "project_rename" }
it { expect(event_name(project, :transfer)).to eq "project_transfer" }
it { expect(event_name(project, :update)).to eq "project_update" }
- it { expect(event_name(project_member, :create)).to eq "user_add_to_team" }
- it { expect(event_name(project_member, :destroy)).to eq "user_remove_from_team" }
- it { expect(event_name(project_member, :update)).to eq "user_update_for_team" }
it { expect(event_name(key, :create)).to eq 'key_create' }
it { expect(event_name(key, :destroy)).to eq 'key_destroy' }
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index df4880dfa13..54cef164f1c 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -779,4 +779,17 @@ RSpec.describe SystemNoteService do
described_class.change_incident_severity(incident, author)
end
end
+
+ describe '.log_resolving_alert' do
+ let(:alert) { build(:alert_management_alert) }
+ let(:monitoring_tool) { 'Prometheus' }
+
+ it 'calls AlertManagementService' do
+ expect_next_instance_of(SystemNotes::AlertManagementService) do |service|
+ expect(service).to receive(:log_resolving_alert).with(monitoring_tool)
+ end
+
+ described_class.log_resolving_alert(alert, monitoring_tool)
+ end
+ end
end
diff --git a/spec/services/system_notes/alert_management_service_spec.rb b/spec/services/system_notes/alert_management_service_spec.rb
index 4ebaa54534c..fc71799d8c5 100644
--- a/spec/services/system_notes/alert_management_service_spec.rb
+++ b/spec/services/system_notes/alert_management_service_spec.rb
@@ -59,4 +59,17 @@ RSpec.describe ::SystemNotes::AlertManagementService do
expect(subject.note).to eq("changed the status to **Resolved** by closing issue #{issue.to_reference(project)}")
end
end
+
+ describe '#log_resolving_alert' do
+ subject { described_class.new(noteable: noteable, project: project).log_resolving_alert('Some Service') }
+
+ it_behaves_like 'a system note' do
+ let(:author) { User.alert_bot }
+ let(:action) { 'new_alert_added' }
+ end
+
+ it 'has the appropriate message' do
+ expect(subject.note).to eq('logged a resolving alert from **Some Service**')
+ end
+ end
end
diff --git a/spec/services/system_notes/merge_requests_service_spec.rb b/spec/services/system_notes/merge_requests_service_spec.rb
index 2131f3d3bdf..58d2489f878 100644
--- a/spec/services/system_notes/merge_requests_service_spec.rb
+++ b/spec/services/system_notes/merge_requests_service_spec.rb
@@ -189,7 +189,7 @@ RSpec.describe ::SystemNotes::MergeRequestsService do
subject { service.change_branch('target', 'delete', old_branch, new_branch) }
it 'sets the note text' do
- expect(subject.note).to eq "changed automatically target branch to `#{new_branch}` because `#{old_branch}` was deleted"
+ expect(subject.note).to eq "deleted the `#{old_branch}` branch. This merge request now targets the `#{new_branch}` branch"
end
end
diff --git a/spec/services/users/dismiss_user_callout_service_spec.rb b/spec/services/users/dismiss_user_callout_service_spec.rb
new file mode 100644
index 00000000000..22f84a939f7
--- /dev/null
+++ b/spec/services/users/dismiss_user_callout_service_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::DismissUserCalloutService do
+ let(:user) { create(:user) }
+
+ let(:service) do
+ described_class.new(
+ container: nil, current_user: user, params: { feature_name: UserCallout.feature_names.each_key.first }
+ )
+ end
+
+ describe '#execute' do
+ subject(:execute) { service.execute }
+
+ it 'returns a user callout' do
+ expect(execute).to be_an_instance_of(UserCallout)
+ end
+
+ it 'sets the dismisse_at attribute to current time' do
+ freeze_time do
+ expect(execute).to have_attributes(dismissed_at: Time.current)
+ end
+ end
+ end
+end
diff --git a/spec/services/users/refresh_authorized_projects_service_spec.rb b/spec/services/users/refresh_authorized_projects_service_spec.rb
index cc01b22f9d2..1e74ff3d9eb 100644
--- a/spec/services/users/refresh_authorized_projects_service_spec.rb
+++ b/spec/services/users/refresh_authorized_projects_service_spec.rb
@@ -152,9 +152,13 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
expect(Gitlab::AppJsonLogger).to(
receive(:info)
.with(event: 'authorized_projects_refresh',
+ user_id: user.id,
'authorized_projects_refresh.source': source,
- 'authorized_projects_refresh.rows_deleted': 0,
- 'authorized_projects_refresh.rows_added': 1))
+ 'authorized_projects_refresh.rows_deleted_count': 0,
+ 'authorized_projects_refresh.rows_added_count': 1,
+ 'authorized_projects_refresh.rows_deleted_slice': [],
+ 'authorized_projects_refresh.rows_added_slice': [[user.id, project.id, Gitlab::Access::MAINTAINER]])
+ )
service.update_authorizations([], [[user.id, project.id, Gitlab::Access::MAINTAINER]])
end