summaryrefslogtreecommitdiff
path: root/spec/models
diff options
context:
space:
mode:
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/abuse_report_spec.rb36
-rw-r--r--spec/models/achievements/user_achievement_spec.rb29
-rw-r--r--spec/models/airflow/dags_spec.rb17
-rw-r--r--spec/models/alert_management/alert_assignee_spec.rb6
-rw-r--r--spec/models/alert_management/alert_spec.rb10
-rw-r--r--spec/models/alert_management/alert_user_mention_spec.rb6
-rw-r--r--spec/models/application_setting_spec.rb113
-rw-r--r--spec/models/audit_event_spec.rb4
-rw-r--r--spec/models/board_spec.rb8
-rw-r--r--spec/models/bulk_import_spec.rb29
-rw-r--r--spec/models/bulk_imports/batch_tracker_spec.rb16
-rw-r--r--spec/models/bulk_imports/entity_spec.rb66
-rw-r--r--spec/models/bulk_imports/export_batch_spec.rb17
-rw-r--r--spec/models/bulk_imports/export_spec.rb3
-rw-r--r--spec/models/bulk_imports/file_transfer/project_config_spec.rb12
-rw-r--r--spec/models/bulk_imports/tracker_spec.rb5
-rw-r--r--spec/models/chat_name_spec.rb7
-rw-r--r--spec/models/ci/build_metadata_spec.rb1
-rw-r--r--spec/models/ci/build_pending_state_spec.rb7
-rw-r--r--spec/models/ci/build_spec.rb160
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb6
-rw-r--r--spec/models/ci/catalog/listing_spec.rb65
-rw-r--r--spec/models/ci/daily_build_group_report_result_spec.rb6
-rw-r--r--spec/models/ci/group_variable_spec.rb2
-rw-r--r--spec/models/ci/job_artifact_spec.rb25
-rw-r--r--spec/models/ci/job_token/scope_spec.rb8
-rw-r--r--spec/models/ci/job_variable_spec.rb2
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb17
-rw-r--r--spec/models/ci/pipeline_spec.rb217
-rw-r--r--spec/models/ci/processable_spec.rb2
-rw-r--r--spec/models/ci/runner_machine_build_spec.rb100
-rw-r--r--spec/models/ci/runner_machine_spec.rb200
-rw-r--r--spec/models/ci/runner_spec.rb235
-rw-r--r--spec/models/ci/runner_version_spec.rb11
-rw-r--r--spec/models/ci/sources/pipeline_spec.rb5
-rw-r--r--spec/models/ci/variable_spec.rb2
-rw-r--r--spec/models/clusters/applications/crossplane_spec.rb62
-rw-r--r--spec/models/clusters/applications/knative_spec.rb25
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb349
-rw-r--r--spec/models/clusters/cluster_spec.rb20
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb9
-rw-r--r--spec/models/commit_collection_spec.rb15
-rw-r--r--spec/models/commit_status_spec.rb39
-rw-r--r--spec/models/concerns/atomic_internal_id_spec.rb99
-rw-r--r--spec/models/concerns/ci/maskable_spec.rb2
-rw-r--r--spec/models/concerns/ci/partitionable/partitioned_filter_spec.rb80
-rw-r--r--spec/models/concerns/ci/partitionable_spec.rb10
-rw-r--r--spec/models/concerns/each_batch_spec.rb32
-rw-r--r--spec/models/concerns/has_user_type_spec.rb4
-rw-r--r--spec/models/concerns/redis_cacheable_spec.rb52
-rw-r--r--spec/models/concerns/routable_spec.rb11
-rw-r--r--spec/models/concerns/subscribable_spec.rb26
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/base_spec.rb37
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb149
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/encryption_helper_spec.rb92
-rw-r--r--spec/models/concerns/uniquify_spec.rb44
-rw-r--r--spec/models/concerns/web_hooks/has_web_hooks_spec.rb41
-rw-r--r--spec/models/container_registry/data_repair_detail_spec.rb11
-rw-r--r--spec/models/container_registry/event_spec.rb22
-rw-r--r--spec/models/container_repository_spec.rb29
-rw-r--r--spec/models/design_management/design_spec.rb5
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb38
-rw-r--r--spec/models/group_spec.rb14
-rw-r--r--spec/models/hooks/web_hook_spec.rb22
-rw-r--r--spec/models/import_export_upload_spec.rb2
-rw-r--r--spec/models/import_failure_spec.rb16
-rw-r--r--spec/models/integrations/apple_app_store_spec.rb5
-rw-r--r--spec/models/integrations/campfire_spec.rb14
-rw-r--r--spec/models/integrations/google_play_spec.rb81
-rw-r--r--spec/models/integrations/jira_spec.rb6
-rw-r--r--spec/models/integrations/mattermost_slash_commands_spec.rb9
-rw-r--r--spec/models/integrations/slack_slash_commands_spec.rb9
-rw-r--r--spec/models/integrations/squash_tm_spec.rb117
-rw-r--r--spec/models/internal_id_spec.rb12
-rw-r--r--spec/models/issue_spec.rb136
-rw-r--r--spec/models/member_spec.rb109
-rw-r--r--spec/models/members/member_role_spec.rb107
-rw-r--r--spec/models/members/project_member_spec.rb2
-rw-r--r--spec/models/merge_request_spec.rb41
-rw-r--r--spec/models/namespace_spec.rb412
-rw-r--r--spec/models/namespaces/randomized_suffix_path_spec.rb2
-rw-r--r--spec/models/note_spec.rb10
-rw-r--r--spec/models/oauth_access_token_spec.rb4
-rw-r--r--spec/models/onboarding/completion_spec.rb73
-rw-r--r--spec/models/packages/debian/file_metadatum_spec.rb1
-rw-r--r--spec/models/packages/package_file_spec.rb1
-rw-r--r--spec/models/pages/lookup_path_spec.rb69
-rw-r--r--spec/models/pages_domain_spec.rb39
-rw-r--r--spec/models/personal_access_token_spec.rb24
-rw-r--r--spec/models/preloaders/runner_machine_policy_preloader_spec.rb38
-rw-r--r--spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb16
-rw-r--r--spec/models/project_ci_cd_setting_spec.rb18
-rw-r--r--spec/models/project_feature_spec.rb34
-rw-r--r--spec/models/project_setting_spec.rb38
-rw-r--r--spec/models/project_spec.rb353
-rw-r--r--spec/models/projects/data_transfer_spec.rb6
-rw-r--r--spec/models/projects/forks/details_spec.rb (renamed from spec/models/projects/forks/divergence_counts_spec.rb)78
-rw-r--r--spec/models/projects/import_export/relation_export_spec.rb22
-rw-r--r--spec/models/protected_branch_spec.rb85
-rw-r--r--spec/models/repository_spec.rb119
-rw-r--r--spec/models/serverless/domain_cluster_spec.rb75
-rw-r--r--spec/models/serverless/domain_spec.rb97
-rw-r--r--spec/models/serverless/function_spec.rb21
-rw-r--r--spec/models/service_desk/custom_email_verification_spec.rb109
-rw-r--r--spec/models/service_desk_setting_spec.rb57
-rw-r--r--spec/models/user_spec.rb229
-rw-r--r--spec/models/wiki_directory_spec.rb9
-rw-r--r--spec/models/wiki_page_spec.rb14
-rw-r--r--spec/models/work_item_spec.rb24
-rw-r--r--spec/models/work_items/widget_definition_spec.rb3
-rw-r--r--spec/models/work_items/widgets/notifications_spec.rb19
111 files changed, 3427 insertions, 2132 deletions
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index 7995cc36383..9026a870138 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -70,6 +70,42 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do
}
end
+ describe 'scopes' do
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:report1) { create(:abuse_report, reporter: reporter) }
+ let_it_be(:report2) { create(:abuse_report, :closed, category: 'phishing') }
+
+ describe '.by_reporter_id' do
+ subject(:results) { described_class.by_reporter_id(reporter.id) }
+
+ it 'returns reports with reporter_id equal to the given user id' do
+ expect(subject).to match_array([report1])
+ end
+ end
+
+ describe '.open' do
+ subject(:results) { described_class.open }
+
+ it 'returns reports without resolved_at value' do
+ expect(subject).to match_array([report, report1])
+ end
+ end
+
+ describe '.closed' do
+ subject(:results) { described_class.closed }
+
+ it 'returns reports with resolved_at value' do
+ expect(subject).to match_array([report2])
+ end
+ end
+
+ describe '.by_category' do
+ it 'returns abuse reports with the specified category' do
+ expect(described_class.by_category('phishing')).to match_array([report2])
+ end
+ end
+ end
+
describe 'before_validation' do
context 'when links to spam contains empty strings' do
let(:report) { create(:abuse_report, links_to_spam: ['', 'https://gitlab.com']) }
diff --git a/spec/models/achievements/user_achievement_spec.rb b/spec/models/achievements/user_achievement_spec.rb
index 9d88bfdd477..41eaadafa67 100644
--- a/spec/models/achievements/user_achievement_spec.rb
+++ b/spec/models/achievements/user_achievement_spec.rb
@@ -7,7 +7,34 @@ RSpec.describe Achievements::UserAchievement, type: :model, feature_category: :u
it { is_expected.to belong_to(:achievement).inverse_of(:user_achievements).required }
it { is_expected.to belong_to(:user).inverse_of(:user_achievements).required }
- it { is_expected.to belong_to(:awarded_by_user).class_name('User').inverse_of(:awarded_user_achievements).optional }
+ it { is_expected.to belong_to(:awarded_by_user).class_name('User').inverse_of(:awarded_user_achievements).required }
it { is_expected.to belong_to(:revoked_by_user).class_name('User').inverse_of(:revoked_user_achievements).optional }
+
+ describe '#revoked?' do
+ subject { achievement.revoked? }
+
+ context 'when revoked' do
+ let_it_be(:achievement) { create(:user_achievement, :revoked) }
+
+ it { is_expected.to be true }
+ end
+
+ context 'when not revoked' do
+ let_it_be(:achievement) { create(:user_achievement) }
+
+ it { is_expected.to be false }
+ end
+ end
+ end
+
+ describe 'scopes' do
+ let_it_be(:user_achievement) { create(:user_achievement) }
+ let_it_be(:revoked_user_achievement) { create(:user_achievement, :revoked) }
+
+ describe '.not_revoked' do
+ it 'only returns user achievements which have not been revoked' do
+ expect(described_class.not_revoked).to contain_exactly(user_achievement)
+ end
+ end
end
end
diff --git a/spec/models/airflow/dags_spec.rb b/spec/models/airflow/dags_spec.rb
deleted file mode 100644
index ff3c4522779..00000000000
--- a/spec/models/airflow/dags_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Airflow::Dags, feature_category: :dataops do
- describe 'associations' do
- it { is_expected.to belong_to(:project) }
- end
-
- describe 'validations' do
- it { is_expected.to validate_presence_of(:project) }
- it { is_expected.to validate_presence_of(:dag_name) }
- it { is_expected.to validate_length_of(:dag_name).is_at_most(255) }
- it { is_expected.to validate_length_of(:schedule).is_at_most(255) }
- it { is_expected.to validate_length_of(:fileloc).is_at_most(255) }
- end
-end
diff --git a/spec/models/alert_management/alert_assignee_spec.rb b/spec/models/alert_management/alert_assignee_spec.rb
index c50a3ec0d01..647195380b3 100644
--- a/spec/models/alert_management/alert_assignee_spec.rb
+++ b/spec/models/alert_management/alert_assignee_spec.rb
@@ -5,7 +5,11 @@ require 'spec_helper'
RSpec.describe AlertManagement::AlertAssignee do
describe 'associations' do
it { is_expected.to belong_to(:alert) }
- it { is_expected.to belong_to(:assignee) }
+
+ it do
+ is_expected.to belong_to(:assignee).class_name('User')
+ .with_foreign_key(:user_id).inverse_of(:alert_assignees)
+ end
end
describe 'validations' do
diff --git a/spec/models/alert_management/alert_spec.rb b/spec/models/alert_management/alert_spec.rb
index 685ed81ec84..ff77ca2ab64 100644
--- a/spec/models/alert_management/alert_spec.rb
+++ b/spec/models/alert_management/alert_spec.rb
@@ -16,9 +16,13 @@ RSpec.describe AlertManagement::Alert do
it { is_expected.to belong_to(:prometheus_alert).optional }
it { is_expected.to belong_to(:environment).optional }
it { is_expected.to have_many(:assignees).through(:alert_assignees) }
- it { is_expected.to have_many(:notes) }
- it { is_expected.to have_many(:ordered_notes) }
- it { is_expected.to have_many(:user_mentions) }
+ it { is_expected.to have_many(:notes).inverse_of(:noteable) }
+ it { is_expected.to have_many(:ordered_notes).class_name('Note').inverse_of(:noteable) }
+
+ it do
+ is_expected.to have_many(:user_mentions).class_name('AlertManagement::AlertUserMention')
+ .with_foreign_key(:alert_management_alert_id).inverse_of(:alert)
+ end
end
describe 'validations' do
diff --git a/spec/models/alert_management/alert_user_mention_spec.rb b/spec/models/alert_management/alert_user_mention_spec.rb
index 27c3d290dde..083bf667bea 100644
--- a/spec/models/alert_management/alert_user_mention_spec.rb
+++ b/spec/models/alert_management/alert_user_mention_spec.rb
@@ -4,7 +4,11 @@ require 'spec_helper'
RSpec.describe AlertManagement::AlertUserMention do
describe 'associations' do
- it { is_expected.to belong_to(:alert_management_alert) }
+ it do
+ is_expected.to belong_to(:alert).class_name('::AlertManagement::Alert')
+ .with_foreign_key(:alert_management_alert_id).inverse_of(:user_mentions)
+ end
+
it { is_expected.to belong_to(:note) }
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 5b99c68ec80..8387f4021b6 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
+RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
using RSpec::Parameterized::TableSyntax
subject(:setting) { described_class.create_from_defaults }
@@ -25,6 +25,20 @@ RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
it { expect(setting.kroki_formats).to eq({}) }
end
+ describe 'associations' do
+ it do
+ is_expected.to belong_to(:self_monitoring_project).class_name('Project')
+ .with_foreign_key(:instance_administration_project_id)
+ .inverse_of(:application_setting)
+ end
+
+ it do
+ is_expected.to belong_to(:instance_group).class_name('Group')
+ .with_foreign_key(:instance_administrators_group_id)
+ .inverse_of(:application_setting)
+ end
+ end
+
describe 'validations' do
let(:http) { 'http://example.com' }
let(:https) { 'https://example.com' }
@@ -132,6 +146,9 @@ RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
it { is_expected.to allow_value(false).for(:user_defaults_to_private_profile) }
it { is_expected.not_to allow_value(nil).for(:user_defaults_to_private_profile) }
+ it { is_expected.to allow_values([true, false]).for(:deny_all_requests_except_allowed) }
+ it { is_expected.not_to allow_value(nil).for(:deny_all_requests_except_allowed) }
+
it 'ensures max_pages_size is an integer greater than 0 (or equal to 0 to indicate unlimited/maximum)' do
is_expected.to validate_numericality_of(:max_pages_size).only_integer.is_greater_than_or_equal_to(0)
.is_less_than(::Gitlab::Pages::MAX_SIZE / 1.megabyte)
@@ -182,7 +199,8 @@ RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
it { is_expected.not_to allow_value('default' => 101).for(:repository_storages_weighted).with_message("value for 'default' must be between 0 and 100") }
it { is_expected.not_to allow_value('default' => 100, shouldntexist: 50).for(:repository_storages_weighted).with_message("can't include: shouldntexist") }
- %i[notes_create_limit search_rate_limit search_rate_limit_unauthenticated users_get_by_id_limit].each do |setting|
+ %i[notes_create_limit search_rate_limit search_rate_limit_unauthenticated users_get_by_id_limit
+ projects_api_rate_limit_unauthenticated].each do |setting|
it { is_expected.to allow_value(400).for(setting) }
it { is_expected.not_to allow_value('two').for(setting) }
it { is_expected.not_to allow_value(nil).for(setting) }
@@ -209,6 +227,12 @@ RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
it { is_expected.to allow_value('disabled').for(:whats_new_variant) }
it { is_expected.not_to allow_value(nil).for(:whats_new_variant) }
+ it { is_expected.to allow_value('http://example.com/').for(:public_runner_releases_url) }
+ it { is_expected.not_to allow_value(nil).for(:public_runner_releases_url) }
+
+ it { is_expected.to allow_value([true, false]).for(:update_runner_versions_enabled) }
+ it { is_expected.not_to allow_value(nil).for(:update_runner_versions_enabled) }
+
it { is_expected.not_to allow_value(['']).for(:valid_runner_registrars) }
it { is_expected.not_to allow_value(['OBVIOUSLY_WRONG']).for(:valid_runner_registrars) }
it { is_expected.not_to allow_value(%w(project project)).for(:valid_runner_registrars) }
@@ -228,6 +252,10 @@ RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
it { is_expected.to allow_value(false).for(:allow_runner_registration_token) }
it { is_expected.not_to allow_value(nil).for(:allow_runner_registration_token) }
+ it { is_expected.to allow_value(true).for(:gitlab_dedicated_instance) }
+ it { is_expected.to allow_value(false).for(:gitlab_dedicated_instance) }
+ it { is_expected.not_to allow_value(nil).for(:gitlab_dedicated_instance) }
+
context 'when deactivate_dormant_users is enabled' do
before do
stub_application_setting(deactivate_dormant_users: true)
@@ -318,7 +346,7 @@ RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
end
end
- describe 'default_branch_name validaitions' do
+ describe 'default_branch_name validations' do
context "when javascript tags get sanitized properly" do
it "gets sanitized properly" do
setting.update!(default_branch_name: "hello<script>alert(1)</script>")
@@ -585,6 +613,23 @@ RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
end
end
+ describe 'setting validated as `addressable_url` configured with external URI' do
+ before do
+ # Use any property that has the `addressable_url` validation.
+ setting.help_page_documentation_base_url = 'http://example.com'
+ end
+
+ it 'is valid by default' do
+ expect(setting).to be_valid
+ end
+
+ it 'is invalid when unpersisted `deny_all_requests_except_allowed` property is true' do
+ setting.deny_all_requests_except_allowed = true
+
+ expect(setting).not_to be_valid
+ end
+ end
+
context 'key restrictions' do
it 'does not allow all key types to be disabled' do
Gitlab::SSHPublicKey.supported_types.each do |type|
@@ -1124,6 +1169,11 @@ RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
it { is_expected.to allow_value(*Gitlab::I18n.available_locales).for(:default_preferred_language) }
it { is_expected.not_to allow_value(nil, '', 'invalid_locale').for(:default_preferred_language) }
end
+
+ context 'for default_syntax_highlighting_theme' do
+ it { is_expected.to allow_value(*Gitlab::ColorSchemes.valid_ids).for(:default_syntax_highlighting_theme) }
+ it { is_expected.not_to allow_value(nil, 0, Gitlab::ColorSchemes.available_schemes.size + 1).for(:default_syntax_highlighting_theme) }
+ end
end
context 'restrict creating duplicates' do
@@ -1144,6 +1194,17 @@ RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
end
end
+ describe 'ADDRESSABLE_URL_VALIDATION_OPTIONS' do
+ it 'is applied to all addressable_url validated properties' do
+ url_validators = described_class.validators.select { |validator| validator.is_a?(AddressableUrlValidator) }
+
+ url_validators.each do |validator|
+ expect(validator.options).to match(hash_including(described_class::ADDRESSABLE_URL_VALIDATION_OPTIONS)),
+ "#{validator.attributes} should use ADDRESSABLE_URL_VALIDATION_OPTIONS"
+ end
+ end
+ end
+
describe '#disabled_oauth_sign_in_sources=' do
before do
allow(Devise).to receive(:omniauth_providers).and_return([:github])
@@ -1474,4 +1535,50 @@ RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
expect(setting.personal_access_tokens_disabled?).to eq(false)
end
end
+
+ describe 'email_confirmation_setting prefixes' do
+ before do
+ described_class.create_from_defaults
+ end
+
+ context 'when feature flag `soft_email_confirmation` is not enabled' do
+ before do
+ stub_feature_flags(soft_email_confirmation: false)
+ end
+
+ where(:email_confirmation_setting, :off, :soft, :hard) do
+ 'off' | true | false | false
+ 'soft' | false | true | false
+ 'hard' | false | false | true
+ end
+
+ with_them do
+ it 'returns the correct value when prefixed' do
+ stub_application_setting_enum('email_confirmation_setting', email_confirmation_setting)
+
+ expect(described_class.last.email_confirmation_setting_off?).to be off
+ expect(described_class.last.email_confirmation_setting_soft?).to be soft
+ expect(described_class.last.email_confirmation_setting_hard?).to be hard
+ end
+ end
+
+ it 'calls super' do
+ expect(described_class.last.email_confirmation_setting_off?).to be true
+ expect(described_class.last.email_confirmation_setting_soft?).to be false
+ expect(described_class.last.email_confirmation_setting_hard?).to be false
+ end
+ end
+
+ context 'when feature flag `soft_email_confirmation` is enabled' do
+ before do
+ stub_feature_flags(soft_email_confirmation: true)
+ end
+
+ it 'returns correct value when enum is prefixed' do
+ expect(described_class.last.email_confirmation_setting_off?).to be false
+ expect(described_class.last.email_confirmation_setting_soft?).to be true
+ expect(described_class.last.email_confirmation_setting_hard?).to be false
+ end
+ end
+ end
end
diff --git a/spec/models/audit_event_spec.rb b/spec/models/audit_event_spec.rb
index 9f2724cebee..9e667836b45 100644
--- a/spec/models/audit_event_spec.rb
+++ b/spec/models/audit_event_spec.rb
@@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe AuditEvent do
+ describe 'associations' do
+ it { is_expected.to belong_to(:user).with_foreign_key(:author_id).inverse_of(:audit_events) }
+ end
+
describe 'validations' do
include_examples 'validates IP address' do
let(:attribute) { :ip_address }
diff --git a/spec/models/board_spec.rb b/spec/models/board_spec.rb
index 6017298e85b..f469dee5ba1 100644
--- a/spec/models/board_spec.rb
+++ b/spec/models/board_spec.rb
@@ -8,7 +8,13 @@ RSpec.describe Board do
describe 'relationships' do
it { is_expected.to belong_to(:project) }
- it { is_expected.to have_many(:lists).order(list_type: :asc, position: :asc).dependent(:delete_all) }
+
+ it do
+ is_expected.to have_many(:lists).order(list_type: :asc, position: :asc).dependent(:delete_all)
+ .inverse_of(:board)
+ end
+
+ it { is_expected.to have_many(:destroyable_lists).order(list_type: :asc, position: :asc).inverse_of(:board) }
end
describe 'validations' do
diff --git a/spec/models/bulk_import_spec.rb b/spec/models/bulk_import_spec.rb
index 3430da43f62..acb1f4a2ef7 100644
--- a/spec/models/bulk_import_spec.rb
+++ b/spec/models/bulk_import_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImport, type: :model do
+RSpec.describe BulkImport, type: :model, feature_category: :importers do
let_it_be(:created_bulk_import) { create(:bulk_import, :created) }
let_it_be(:started_bulk_import) { create(:bulk_import, :started) }
let_it_be(:finished_bulk_import) { create(:bulk_import, :finished) }
@@ -48,4 +48,31 @@ RSpec.describe BulkImport, type: :model do
expect(bulk_import.source_version_info.to_s).to eq(bulk_import.source_version)
end
end
+
+ describe '#update_has_failures' do
+ let(:import) { create(:bulk_import, :started) }
+ let(:entity) { create(:bulk_import_entity, bulk_import: import) }
+
+ context 'when entity has failures' do
+ it 'sets has_failures flag to true' do
+ expect(import.has_failures).to eq(false)
+
+ entity.update!(has_failures: true)
+ import.fail_op!
+
+ expect(import.has_failures).to eq(true)
+ end
+ end
+
+ context 'when entity does not have failures' do
+ it 'sets has_failures flag to false' do
+ expect(import.has_failures).to eq(false)
+
+ entity.update!(has_failures: false)
+ import.fail_op!
+
+ expect(import.has_failures).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/models/bulk_imports/batch_tracker_spec.rb b/spec/models/bulk_imports/batch_tracker_spec.rb
new file mode 100644
index 00000000000..336943228c7
--- /dev/null
+++ b/spec/models/bulk_imports/batch_tracker_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::BatchTracker, type: :model, feature_category: :importers do
+ describe 'associations' do
+ it { is_expected.to belong_to(:tracker) }
+ end
+
+ describe 'validations' do
+ subject { build(:bulk_import_batch_tracker) }
+
+ it { is_expected.to validate_presence_of(:batch_number) }
+ it { is_expected.to validate_uniqueness_of(:batch_number).scoped_to(:tracker_id) }
+ end
+end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 56796aa1fe4..45f120e6773 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -6,8 +6,13 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
describe 'associations' do
it { is_expected.to belong_to(:bulk_import).required }
it { is_expected.to belong_to(:parent) }
- it { is_expected.to belong_to(:group) }
+ it { is_expected.to belong_to(:group).optional.with_foreign_key(:namespace_id).inverse_of(:bulk_import_entities) }
it { is_expected.to belong_to(:project) }
+
+ it do
+ is_expected.to have_many(:trackers).class_name('BulkImports::Tracker')
+ .with_foreign_key(:bulk_import_entity_id).inverse_of(:entity)
+ end
end
describe 'validations' do
@@ -93,8 +98,6 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
end
it 'is invalid as a project_entity' do
- stub_feature_flags(bulk_import_projects: true)
-
entity = build(:bulk_import_entity, :project_entity, group: build(:group), project: nil)
expect(entity).not_to be_valid
@@ -104,8 +107,6 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
context 'when associated with a project and no group' do
it 'is valid' do
- stub_feature_flags(bulk_import_projects: true)
-
entity = build(:bulk_import_entity, :project_entity, group: nil, project: build(:project))
expect(entity).to be_valid
@@ -135,8 +136,6 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
context 'when the parent is a project import' do
it 'is invalid' do
- stub_feature_flags(bulk_import_projects: true)
-
entity = build(:bulk_import_entity, parent: build(:bulk_import_entity, :project_entity))
expect(entity).not_to be_valid
@@ -178,38 +177,13 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
end
end
- context 'when bulk_import_projects feature flag is disabled and source_type is a project_entity' do
- it 'is invalid' do
- stub_feature_flags(bulk_import_projects: false)
-
- entity = build(:bulk_import_entity, :project_entity)
-
- expect(entity).not_to be_valid
- expect(entity.errors[:base]).to include('invalid entity source type')
- end
- end
-
- context 'when bulk_import_projects feature flag is enabled and source_type is a project_entity' do
+ context 'when source_type is a project_entity' do
it 'is valid' do
- stub_feature_flags(bulk_import_projects: true)
-
entity = build(:bulk_import_entity, :project_entity)
expect(entity).to be_valid
end
end
-
- context 'when bulk_import_projects feature flag is enabled on root ancestor level and source_type is a project_entity' do
- it 'is valid' do
- top_level_namespace = create(:group)
-
- stub_feature_flags(bulk_import_projects: top_level_namespace)
-
- entity = build(:bulk_import_entity, :project_entity, destination_namespace: top_level_namespace.full_path)
-
- expect(entity).to be_valid
- end
- end
end
describe '#encoded_source_full_path' do
@@ -412,4 +386,30 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
end
end
end
+
+ describe '#update_has_failures' do
+ let(:entity) { create(:bulk_import_entity) }
+
+ context 'when entity has failures' do
+ it 'sets has_failures flag to true' do
+ expect(entity.has_failures).to eq(false)
+
+ create(:bulk_import_failure, entity: entity)
+
+ entity.fail_op!
+
+ expect(entity.has_failures).to eq(true)
+ end
+ end
+
+ context 'when entity does not have failures' do
+ it 'sets has_failures flag to false' do
+ expect(entity.has_failures).to eq(false)
+
+ entity.fail_op!
+
+ expect(entity.has_failures).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/models/bulk_imports/export_batch_spec.rb b/spec/models/bulk_imports/export_batch_spec.rb
new file mode 100644
index 00000000000..43209921b9c
--- /dev/null
+++ b/spec/models/bulk_imports/export_batch_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::ExportBatch, type: :model, feature_category: :importers do
+ describe 'associations' do
+ it { is_expected.to belong_to(:export) }
+ it { is_expected.to have_one(:upload) }
+ end
+
+ describe 'validations' do
+ subject { build(:bulk_import_export_batch) }
+
+ it { is_expected.to validate_presence_of(:batch_number) }
+ it { is_expected.to validate_uniqueness_of(:batch_number).scoped_to(:export_id) }
+ end
+end
diff --git a/spec/models/bulk_imports/export_spec.rb b/spec/models/bulk_imports/export_spec.rb
index d85b77d599b..7173d032bc2 100644
--- a/spec/models/bulk_imports/export_spec.rb
+++ b/spec/models/bulk_imports/export_spec.rb
@@ -2,11 +2,12 @@
require 'spec_helper'
-RSpec.describe BulkImports::Export, type: :model do
+RSpec.describe BulkImports::Export, type: :model, feature_category: :importers do
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:project) }
it { is_expected.to have_one(:upload) }
+ it { is_expected.to have_many(:batches) }
end
describe 'validations' do
diff --git a/spec/models/bulk_imports/file_transfer/project_config_spec.rb b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
index 0f02c5c546f..21fe6cfb3fa 100644
--- a/spec/models/bulk_imports/file_transfer/project_config_spec.rb
+++ b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
@@ -42,6 +42,18 @@ RSpec.describe BulkImports::FileTransfer::ProjectConfig do
it 'returns relation tree of a top level relation' do
expect(subject.top_relation_tree('labels')).to eq('priorities' => {})
end
+
+ it 'returns relation tree with merged with deprecated tree' do
+ expect(subject.top_relation_tree('ci_pipelines')).to match(
+ a_hash_including(
+ {
+ 'external_pull_request' => {},
+ 'merge_request' => {},
+ 'stages' => { 'bridges' => {}, 'builds' => {}, 'generic_commit_statuses' => {}, 'statuses' => {} }
+ }
+ )
+ )
+ end
end
describe '#relation_excluded_keys' do
diff --git a/spec/models/bulk_imports/tracker_spec.rb b/spec/models/bulk_imports/tracker_spec.rb
index 1516ab106cb..a618a12df6b 100644
--- a/spec/models/bulk_imports/tracker_spec.rb
+++ b/spec/models/bulk_imports/tracker_spec.rb
@@ -4,7 +4,10 @@ require 'spec_helper'
RSpec.describe BulkImports::Tracker, type: :model do
describe 'associations' do
- it { is_expected.to belong_to(:entity).required }
+ it do
+ is_expected.to belong_to(:entity).required.class_name('BulkImports::Entity')
+ .with_foreign_key(:bulk_import_entity_id).inverse_of(:trackers)
+ end
end
describe 'validations' do
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index 0838c232872..9d6b1a56458 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe ChatName, feature_category: :integrations do
subject { chat_name }
- it { is_expected.to belong_to(:integration) }
it { is_expected.to belong_to(:user) }
it { is_expected.to validate_presence_of(:user) }
@@ -16,12 +15,6 @@ RSpec.describe ChatName, feature_category: :integrations do
it { is_expected.to validate_uniqueness_of(:chat_id).scoped_to(:team_id) }
- it 'is not removed when the project is deleted' do
- expect { subject.reload.integration.project.delete }.not_to change { ChatName.count }
-
- expect(ChatName.where(id: subject.id)).to exist
- end
-
describe '#update_last_used_at', :clean_gitlab_redis_shared_state do
it 'updates the last_used_at timestamp' do
expect(subject.last_used_at).to be_nil
diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb
index fb50ba89cd3..c3b445cbbe5 100644
--- a/spec/models/ci/build_metadata_spec.rb
+++ b/spec/models/ci/build_metadata_spec.rb
@@ -22,7 +22,6 @@ RSpec.describe Ci::BuildMetadata do
it { is_expected.to belong_to(:build) }
it { is_expected.to belong_to(:project) }
- it { is_expected.to belong_to(:runner_machine) }
describe '#update_timeout_state' do
subject { metadata }
diff --git a/spec/models/ci/build_pending_state_spec.rb b/spec/models/ci/build_pending_state_spec.rb
index bff0b35f878..c978e3dba36 100644
--- a/spec/models/ci/build_pending_state_spec.rb
+++ b/spec/models/ci/build_pending_state_spec.rb
@@ -3,10 +3,15 @@
require 'spec_helper'
RSpec.describe Ci::BuildPendingState, feature_category: :continuous_integration do
+ describe 'associations' do
+ it do
+ is_expected.to belong_to(:build).class_name('Ci::Build').with_foreign_key(:build_id).inverse_of(:pending_state)
+ end
+ end
+
describe 'validations' do
subject(:pending_state) { build(:ci_build_pending_state) }
- it { is_expected.to belong_to(:build) }
it { is_expected.to validate_presence_of(:build) }
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 80d6693e08e..ca7f4794a0c 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -22,20 +22,35 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it { is_expected.to belong_to(:runner) }
it { is_expected.to belong_to(:trigger_request) }
it { is_expected.to belong_to(:erased_by) }
+ it { is_expected.to belong_to(:pipeline).inverse_of(:builds) }
it { is_expected.to have_many(:needs).with_foreign_key(:build_id) }
- it { is_expected.to have_many(:sourced_pipelines).with_foreign_key(:source_job_id) }
- it { is_expected.to have_one(:sourced_pipeline).with_foreign_key(:source_job_id) }
+
+ it do
+ is_expected.to have_many(:sourced_pipelines).class_name('Ci::Sources::Pipeline').with_foreign_key(:source_job_id)
+ .inverse_of(:build)
+ end
+
it { is_expected.to have_many(:job_variables).with_foreign_key(:job_id) }
it { is_expected.to have_many(:report_results).with_foreign_key(:build_id) }
it { is_expected.to have_many(:pages_deployments).with_foreign_key(:ci_build_id) }
it { is_expected.to have_one(:deployment) }
- it { is_expected.to have_one(:runner_machine).through(:metadata) }
+ it { is_expected.to have_one(:runner_machine).through(:runner_machine_build) }
it { is_expected.to have_one(:runner_session).with_foreign_key(:build_id) }
it { is_expected.to have_one(:trace_metadata).with_foreign_key(:build_id) }
it { is_expected.to have_one(:runtime_metadata).with_foreign_key(:build_id) }
- it { is_expected.to have_one(:pending_state).with_foreign_key(:build_id) }
+ it { is_expected.to have_one(:pending_state).with_foreign_key(:build_id).inverse_of(:build) }
+
+ it do
+ is_expected.to have_one(:queuing_entry).class_name('Ci::PendingBuild').with_foreign_key(:build_id).inverse_of(:build)
+ end
+
+ it do
+ is_expected.to have_one(:runtime_metadata).class_name('Ci::RunningBuild').with_foreign_key(:build_id)
+ .inverse_of(:build)
+ end
+
it { is_expected.to have_many(:terraform_state_versions).inverse_of(:build).with_foreign_key(:ci_build_id) }
it { is_expected.to validate_presence_of(:ref) }
@@ -1040,7 +1055,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
context 'non public artifacts' do
- let(:build) { create(:ci_build, :artifacts, :non_public_artifacts, pipeline: pipeline) }
+ let(:build) { create(:ci_build, :artifacts, :with_private_artifacts_config, pipeline: pipeline) }
it { is_expected.to be_falsey }
end
@@ -1422,11 +1437,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
let(:build) { create(:ci_build, trait, pipeline: pipeline) }
let(:event) { state }
- context "when transitioning to #{params[:state]}" do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
+ context "when transitioning to #{params[:state]}", :saas do
it 'increments build_completed_report_type metric' do
expect(
::Gitlab::Ci::Artifacts::Metrics
@@ -1926,62 +1937,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
- describe '#failed_but_allowed?' do
- subject { build.failed_but_allowed? }
-
- context 'when build is not allowed to fail' do
- before do
- build.allow_failure = false
- end
-
- context 'and build.status is success' do
- before do
- build.status = 'success'
- end
-
- it { is_expected.to be_falsey }
- end
-
- context 'and build.status is failed' do
- before do
- build.status = 'failed'
- end
-
- it { is_expected.to be_falsey }
- end
- end
-
- context 'when build is allowed to fail' do
- before do
- build.allow_failure = true
- end
-
- context 'and build.status is success' do
- before do
- build.status = 'success'
- end
-
- it { is_expected.to be_falsey }
- end
-
- context 'and build status is failed' do
- before do
- build.status = 'failed'
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'when build is a manual action' do
- before do
- build.status = 'manual'
- end
-
- it { is_expected.to be_falsey }
- end
- end
- end
-
describe 'flags' do
describe '#cancelable?' do
subject { build }
@@ -3639,6 +3594,46 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
+ context 'for the google_play integration' do
+ let_it_be(:google_play_integration) { create(:google_play_integration) }
+
+ let(:google_play_variables) do
+ [
+ { key: 'SUPPLY_JSON_KEY_DATA', value: google_play_integration.service_account_key, masked: true, public: false }
+ ]
+ end
+
+ context 'when the google_play integration exists' do
+ context 'when a build is protected' do
+ before do
+ allow(build.pipeline).to receive(:protected_ref?).and_return(true)
+ build.project.update!(google_play_integration: google_play_integration)
+ end
+
+ it 'includes google_play variables' do
+ is_expected.to include(*google_play_variables)
+ end
+ end
+
+ context 'when a build is not protected' do
+ before do
+ allow(build.pipeline).to receive(:protected_ref?).and_return(false)
+ build.project.update!(google_play_integration: google_play_integration)
+ end
+
+ it 'does not include the google_play variable' do
+ expect(subject[:key] == 'SUPPLY_JSON_KEY_DATA').to eq(false)
+ end
+ end
+ end
+
+ context 'when the googel_play integration does not exist' do
+ it 'does not include google_play variable' do
+ expect(subject[:key] == 'SUPPLY_JSON_KEY_DATA').to eq(false)
+ end
+ end
+ end
+
context 'when build has dependency which has dotenv variable' do
let!(:prepare) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
let!(:build) { create(:ci_build, pipeline: pipeline, stage_idx: 1, options: { dependencies: [prepare.name] }) }
@@ -5784,12 +5779,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
expect(build.token).to be_nil
expect(build.changes).to be_empty
end
-
- it 'does not remove the token when FF is disabled' do
- stub_feature_flags(remove_job_token_on_completion: false)
-
- expect { build.remove_token! }.not_to change(build, :token)
- end
end
describe 'metadata partitioning', :ci_partitioning do
@@ -5905,4 +5894,31 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
end
+
+ describe 'token format for builds transiting into pending' do
+ let(:partition_id) { 100 }
+ let(:ci_build) { described_class.new(partition_id: partition_id) }
+
+ context 'when build is initialized without a token and transits to pending' do
+ let(:partition_id_prefix_in_16_bit_encode) { partition_id.to_s(16) + '_' }
+
+ it 'generates a token' do
+ expect { ci_build.enqueue }
+ .to change { ci_build.token }.from(nil).to(a_string_starting_with(partition_id_prefix_in_16_bit_encode))
+ end
+ end
+
+ context 'when build is initialized with a token and transits to pending' do
+ let(:token) { 'an_existing_secret_token' }
+
+ before do
+ ci_build.set_token(token)
+ end
+
+ it 'does not change the existing token' do
+ expect { ci_build.enqueue }
+ .not_to change { ci_build.token }.from(token)
+ end
+ end
+ end
end
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index ac0a18a176d..f338c2727ad 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -20,6 +20,12 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state, :clean_git
stub_artifacts_object_storage
end
+ describe 'associations' do
+ it do
+ is_expected.to belong_to(:build).class_name('Ci::Build').with_foreign_key(:build_id).inverse_of(:trace_chunks)
+ end
+ end
+
it_behaves_like 'having unique enum values'
def redis_instance
diff --git a/spec/models/ci/catalog/listing_spec.rb b/spec/models/ci/catalog/listing_spec.rb
new file mode 100644
index 00000000000..c9ccecbc9fe
--- /dev/null
+++ b/spec/models/ci/catalog/listing_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do
+ let_it_be(:namespace) { create(:group) }
+ let_it_be(:project_1) { create(:project, namespace: namespace) }
+ let_it_be(:project_2) { create(:project, namespace: namespace) }
+ let_it_be(:project_3) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:list) { described_class.new(namespace, user) }
+
+ describe '#new' do
+ context 'when namespace is not a root namespace' do
+ let(:namespace) { create(:group, :nested) }
+
+ it 'raises an exception' do
+ expect { list }.to raise_error(ArgumentError, 'Namespace is not a root namespace')
+ end
+ end
+ end
+
+ describe '#resources' do
+ subject(:resources) { list.resources }
+
+ context 'when the user has access to all projects in the namespace' do
+ before do
+ namespace.add_developer(user)
+ end
+
+ context 'when the namespace has no catalog resources' do
+ it { is_expected.to be_empty }
+ end
+
+ context 'when the namespace has catalog resources' do
+ let!(:resource) { create(:catalog_resource, project: project_1) }
+ let!(:other_namespace_resource) { create(:catalog_resource, project: project_3) }
+
+ it 'contains only catalog resources for projects in that namespace' do
+ is_expected.to contain_exactly(resource)
+ end
+ end
+ end
+
+ context 'when the user only has access to some projects in the namespace' do
+ let!(:resource_1) { create(:catalog_resource, project: project_1) }
+ let!(:resource_2) { create(:catalog_resource, project: project_2) }
+
+ before do
+ project_1.add_developer(user)
+ end
+
+ it 'only returns catalog resources for projects the user has access to' do
+ is_expected.to contain_exactly(resource_1)
+ end
+ end
+
+ context 'when the user does not have access to the namespace' do
+ let!(:resource) { create(:catalog_resource, project: project_1) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/models/ci/daily_build_group_report_result_spec.rb b/spec/models/ci/daily_build_group_report_result_spec.rb
index cd55817243f..6f73d89d760 100644
--- a/spec/models/ci/daily_build_group_report_result_spec.rb
+++ b/spec/models/ci/daily_build_group_report_result_spec.rb
@@ -6,7 +6,11 @@ RSpec.describe Ci::DailyBuildGroupReportResult do
let(:daily_build_group_report_result) { build(:ci_daily_build_group_report_result) }
describe 'associations' do
- it { is_expected.to belong_to(:last_pipeline) }
+ it do
+ is_expected.to belong_to(:last_pipeline).class_name('Ci::Pipeline')
+ .with_foreign_key(:last_pipeline_id).inverse_of(:daily_build_group_report_results)
+ end
+
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:group) }
end
diff --git a/spec/models/ci/group_variable_spec.rb b/spec/models/ci/group_variable_spec.rb
index e73319cfcd7..f8f184c63a1 100644
--- a/spec/models/ci/group_variable_spec.rb
+++ b/spec/models/ci/group_variable_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::GroupVariable, feature_category: :pipeline_authoring do
+RSpec.describe Ci::GroupVariable, feature_category: :pipeline_composition do
let_it_be_with_refind(:group) { create(:group) }
subject { build(:ci_group_variable, group: group) }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index e94445f17cd..f7fafd93f4b 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Ci::JobArtifact, feature_category: :build_artifacts do
describe "Associations" do
it { is_expected.to belong_to(:project) }
- it { is_expected.to belong_to(:job) }
+ it { is_expected.to belong_to(:job).class_name('Ci::Build').with_foreign_key(:job_id).inverse_of(:job_artifacts) }
it { is_expected.to validate_presence_of(:job) }
it { is_expected.to validate_presence_of(:partition_id) }
end
@@ -243,6 +243,29 @@ RSpec.describe Ci::JobArtifact, feature_category: :build_artifacts do
end
end
+ describe '.non_trace' do
+ subject { described_class.non_trace }
+
+ context 'when there is only a trace job artifact' do
+ let!(:trace) { create(:ci_job_artifact, :trace) }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when there is only a non-trace job artifact' do
+ let!(:junit) { create(:ci_job_artifact, :junit) }
+
+ it { is_expected.to eq([junit]) }
+ end
+
+ context 'when there are both trace and non-trace job artifacts' do
+ let!(:trace) { create(:ci_job_artifact, :trace) }
+ let!(:junit) { create(:ci_job_artifact, :junit) }
+
+ it { is_expected.to eq([junit]) }
+ end
+ end
+
describe '.downloadable' do
subject { described_class.downloadable }
diff --git a/spec/models/ci/job_token/scope_spec.rb b/spec/models/ci/job_token/scope_spec.rb
index 9ae061a3702..51f0f4878e7 100644
--- a/spec/models/ci/job_token/scope_spec.rb
+++ b/spec/models/ci/job_token/scope_spec.rb
@@ -160,13 +160,5 @@ RSpec.describe Ci::JobToken::Scope, feature_category: :continuous_integration, f
include_examples 'enforces outbound scope only'
end
-
- context 'when inbound scope flag disabled' do
- before do
- stub_feature_flags(ci_inbound_job_token_scope: false)
- end
-
- include_examples 'enforces outbound scope only'
- end
end
end
diff --git a/spec/models/ci/job_variable_spec.rb b/spec/models/ci/job_variable_spec.rb
index 0a65708160a..a56e6b6be43 100644
--- a/spec/models/ci/job_variable_spec.rb
+++ b/spec/models/ci/job_variable_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Ci::JobVariable, feature_category: :continuous_integration do
describe 'associations' do
let!(:job_variable) { create(:ci_job_variable) }
- it { is_expected.to belong_to(:job) }
+ it { is_expected.to belong_to(:job).class_name('Ci::Build').with_foreign_key(:job_id).inverse_of(:job_variables) }
it { is_expected.to validate_uniqueness_of(:key).scoped_to(:job_id) }
end
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 9b70f7c2839..c441be58edf 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:owner) }
- it { is_expected.to have_many(:pipelines) }
+ it { is_expected.to have_many(:pipelines).dependent(:nullify) }
it { is_expected.to have_many(:variables) }
it { is_expected.to respond_to(:ref) }
@@ -281,4 +281,19 @@ RSpec.describe Ci::PipelineSchedule, feature_category: :continuous_integration d
let!(:model) { create(:ci_pipeline_schedule, project: parent) }
end
end
+
+ describe 'before_destroy' do
+ let_it_be_with_reload(:pipeline_schedule) { create(:ci_pipeline_schedule, cron: ' 0 0 * * * ') }
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline, pipeline_schedule: pipeline_schedule) }
+
+ it 'nullifys associated pipelines' do
+ expect(pipeline_schedule).to receive(:nullify_dependent_associations_in_batches).and_call_original
+
+ result = pipeline_schedule.destroy
+
+ expect(result).to be_truthy
+ expect(pipeline.reload.pipeline_schedule).to be_nil
+ expect(described_class.find_by(id: pipeline_schedule.id)).to be_nil
+ end
+ end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 61422978df7..d50672da8e5 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -19,32 +19,67 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:user) }
- it { is_expected.to belong_to(:auto_canceled_by) }
+ it { is_expected.to belong_to(:auto_canceled_by).class_name('Ci::Pipeline').inverse_of(:auto_canceled_pipelines) }
it { is_expected.to belong_to(:pipeline_schedule) }
it { is_expected.to belong_to(:merge_request) }
it { is_expected.to belong_to(:external_pull_request) }
it { is_expected.to have_many(:statuses) }
- it { is_expected.to have_many(:trigger_requests) }
+ it { is_expected.to have_many(:trigger_requests).with_foreign_key(:commit_id).inverse_of(:pipeline) }
it { is_expected.to have_many(:variables) }
it { is_expected.to have_many(:builds) }
- it { is_expected.to have_many(:statuses_order_id_desc) }
+
+ it do
+ is_expected.to have_many(:statuses_order_id_desc)
+ .class_name('CommitStatus').with_foreign_key(:commit_id).inverse_of(:pipeline)
+ end
+
it { is_expected.to have_many(:bridges) }
it { is_expected.to have_many(:job_artifacts).through(:builds) }
it { is_expected.to have_many(:build_trace_chunks).through(:builds) }
- it { is_expected.to have_many(:auto_canceled_pipelines) }
- it { is_expected.to have_many(:auto_canceled_jobs) }
- it { is_expected.to have_many(:sourced_pipelines) }
+
it { is_expected.to have_many(:triggered_pipelines) }
it { is_expected.to have_many(:pipeline_artifacts) }
- it { is_expected.to have_one(:chat_data) }
+ it do
+ is_expected.to have_many(:failed_builds).class_name('Ci::Build')
+ .with_foreign_key(:commit_id).inverse_of(:pipeline)
+ end
+
+ it do
+ is_expected.to have_many(:cancelable_statuses).class_name('CommitStatus')
+ .with_foreign_key(:commit_id).inverse_of(:pipeline)
+ end
+
+ it do
+ is_expected.to have_many(:auto_canceled_pipelines).class_name('Ci::Pipeline')
+ .with_foreign_key(:auto_canceled_by_id).inverse_of(:auto_canceled_by)
+ end
+
+ it do
+ is_expected.to have_many(:auto_canceled_jobs).class_name('CommitStatus')
+ .with_foreign_key(:auto_canceled_by_id).inverse_of(:auto_canceled_by)
+ end
+
+ it do
+ is_expected.to have_many(:sourced_pipelines).class_name('Ci::Sources::Pipeline')
+ .with_foreign_key(:source_pipeline_id).inverse_of(:source_pipeline)
+ end
+
it { is_expected.to have_one(:source_pipeline) }
+ it { is_expected.to have_one(:chat_data) }
it { is_expected.to have_one(:triggered_by_pipeline) }
it { is_expected.to have_one(:source_job) }
it { is_expected.to have_one(:pipeline_config) }
it { is_expected.to have_one(:pipeline_metadata) }
+ it do
+ is_expected.to have_many(:daily_build_group_report_results).class_name('Ci::DailyBuildGroupReportResult')
+ .with_foreign_key(:last_pipeline_id).inverse_of(:last_pipeline)
+ end
+
+ it { is_expected.to have_many(:latest_builds_report_results).through(:latest_builds).source(:report_results) }
+
it { is_expected.to respond_to :git_author_name }
it { is_expected.to respond_to :git_author_email }
it { is_expected.to respond_to :git_author_full_text }
@@ -872,6 +907,26 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
expect(pipeline).to be_valid
end
end
+
+ context 'when source is unknown' do
+ subject(:pipeline) { create(:ci_empty_pipeline, :created) }
+
+ let(:attr) { :source }
+ let(:attr_value) { :unknown }
+
+ it_behaves_like 'having enum with nil value'
+ end
+ end
+
+ describe '#config_source' do
+ context 'when source is unknown' do
+ subject(:pipeline) { create(:ci_empty_pipeline, :created) }
+
+ let(:attr) { :config_source }
+ let(:attr_value) { :unknown_source }
+
+ it_behaves_like 'having enum with nil value'
+ end
end
describe '#block' do
@@ -1661,6 +1716,154 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
+ describe 'merge status subscription trigger' do
+ shared_examples 'state transition not triggering GraphQL subscription mergeRequestMergeStatusUpdated' do
+ context 'when state transitions to running' do
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { pipeline.run }
+ end
+ end
+
+ context 'when state transitions to success' do
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { pipeline.succeed }
+ end
+ end
+
+ context 'when state transitions to failed' do
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { pipeline.drop }
+ end
+ end
+
+ context 'when state transitions to canceled' do
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { pipeline.cancel }
+ end
+ end
+
+ context 'when state transitions to skipped' do
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { pipeline.skip }
+ end
+ end
+ end
+
+ shared_examples 'state transition triggering GraphQL subscription mergeRequestMergeStatusUpdated' do
+ context 'when state transitions to running' do
+ it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { pipeline.run }
+ end
+ end
+
+ context 'when state transitions to success' do
+ it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { pipeline.succeed }
+ end
+ end
+
+ context 'when state transitions to failed' do
+ it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { pipeline.drop }
+ end
+ end
+
+ context 'when state transitions to canceled' do
+ it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { pipeline.cancel }
+ end
+ end
+
+ context 'when state transitions to skipped' do
+ it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { pipeline.skip }
+ end
+ end
+
+ context 'when only_allow_merge_if_pipeline_succeeds? returns false' do
+ let(:only_allow_merge_if_pipeline_succeeds?) { false }
+
+ it_behaves_like 'state transition not triggering GraphQL subscription mergeRequestMergeStatusUpdated'
+ end
+
+ context 'when pipeline_trigger_merge_status feature flag is disabled' do
+ before do
+ stub_feature_flags(pipeline_trigger_merge_status: false)
+ end
+
+ it_behaves_like 'state transition not triggering GraphQL subscription mergeRequestMergeStatusUpdated'
+ end
+ end
+
+ context 'when pipeline has merge requests' do
+ let(:merge_request) do
+ create(
+ :merge_request,
+ :simple,
+ source_project: project,
+ target_project: project
+ )
+ end
+
+ let(:only_allow_merge_if_pipeline_succeeds?) { true }
+
+ before do
+ allow(project)
+ .to receive(:only_allow_merge_if_pipeline_succeeds?)
+ .and_return(only_allow_merge_if_pipeline_succeeds?)
+ end
+
+ context 'when for a specific merge request' do
+ let(:pipeline) do
+ create(
+ :ci_pipeline,
+ project: project,
+ merge_request: merge_request
+ )
+ end
+
+ it_behaves_like 'state transition triggering GraphQL subscription mergeRequestMergeStatusUpdated'
+
+ context 'when pipeline is a child' do
+ let(:parent_pipeline) do
+ create(
+ :ci_pipeline,
+ project: project,
+ merge_request: merge_request
+ )
+ end
+
+ let(:pipeline) do
+ create(
+ :ci_pipeline,
+ child_of: parent_pipeline,
+ merge_request: merge_request
+ )
+ end
+
+ it_behaves_like 'state transition not triggering GraphQL subscription mergeRequestMergeStatusUpdated'
+ end
+ end
+
+ context 'when for merge requests matching the source branch and SHA' do
+ let(:pipeline) do
+ create(
+ :ci_pipeline,
+ project: project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha
+ )
+ end
+
+ it_behaves_like 'state transition triggering GraphQL subscription mergeRequestMergeStatusUpdated'
+ end
+ end
+
+ context 'when pipeline has no merge requests' do
+ it_behaves_like 'state transition not triggering GraphQL subscription mergeRequestMergeStatusUpdated'
+ end
+ end
+
def create_build(name, *traits, queued_at: current, started_from: 0, **opts)
create(:ci_build, *traits,
name: name,
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index db22d8f3a6c..cf2c176816d 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried failure_reason
sourced_pipelines sourced_pipeline artifacts_file_store artifacts_metadata_store
- metadata runner_machine_id runner_machine runner_session trace_chunks upstream_pipeline_id
+ metadata runner_machine_build runner_machine runner_session trace_chunks upstream_pipeline_id
artifacts_file artifacts_metadata artifacts_size commands
resource resource_group_id processed security_scans author
pipeline_id report_results pending_state pages_deployments
diff --git a/spec/models/ci/runner_machine_build_spec.rb b/spec/models/ci/runner_machine_build_spec.rb
new file mode 100644
index 00000000000..b43ff535477
--- /dev/null
+++ b/spec/models/ci/runner_machine_build_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::RunnerMachineBuild, model: true, feature_category: :runner_fleet do
+ let_it_be(:runner) { create(:ci_runner) }
+ let_it_be(:runner_machine) { create(:ci_runner_machine, runner: runner) }
+ let_it_be(:build) { create(:ci_build, runner_machine: runner_machine) }
+
+ it { is_expected.to belong_to(:build) }
+ it { is_expected.to belong_to(:runner_machine) }
+
+ describe 'partitioning' do
+ context 'with build' do
+ let(:build) { FactoryBot.build(:ci_build, partition_id: ci_testing_partition_id) }
+ let(:runner_machine_build) { FactoryBot.build(:ci_runner_machine_build, build: build) }
+
+ it 'sets partition_id to the current partition value' do
+ expect { runner_machine_build.valid? }.to change { runner_machine_build.partition_id }
+ .to(ci_testing_partition_id)
+ end
+
+ context 'when it is already set' do
+ let(:runner_machine_build) { FactoryBot.build(:ci_runner_machine_build, partition_id: 125) }
+
+ it 'does not change the partition_id value' do
+ expect { runner_machine_build.valid? }.not_to change { runner_machine_build.partition_id }
+ end
+ end
+ end
+
+ context 'without build' do
+ let(:runner_machine_build) { FactoryBot.build(:ci_runner_machine_build, build: nil) }
+
+ it { is_expected.to validate_presence_of(:partition_id) }
+
+ it 'does not change the partition_id value' do
+ expect { runner_machine_build.valid? }.not_to change { runner_machine_build.partition_id }
+ end
+ end
+ end
+
+ describe 'ci_sliding_list partitioning' do
+ let(:connection) { described_class.connection }
+ let(:partition_manager) { Gitlab::Database::Partitioning::PartitionManager.new(described_class) }
+
+ let(:partitioning_strategy) { described_class.partitioning_strategy }
+
+ it { expect(partitioning_strategy.missing_partitions).to be_empty }
+ it { expect(partitioning_strategy.extra_partitions).to be_empty }
+ it { expect(partitioning_strategy.current_partitions).to include partitioning_strategy.initial_partition }
+ it { expect(partitioning_strategy.active_partition).to be_present }
+ end
+
+ context 'loose foreign key on p_ci_runner_machine_builds.runner_machine_id' do # rubocop:disable RSpec/ContextWording
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:parent) { create(:ci_runner_machine) }
+ let!(:model) { create(:ci_runner_machine_build, runner_machine: parent) }
+ end
+ end
+
+ describe '.for_build' do
+ subject(:for_build) { described_class.for_build(build_id) }
+
+ context 'with valid build_id' do
+ let(:build_id) { build.id }
+
+ it { is_expected.to contain_exactly(described_class.find_by_build_id(build_id)) }
+ end
+
+ context 'with valid build_ids' do
+ let(:build2) { create(:ci_build, runner_machine: runner_machine) }
+ let(:build_id) { [build, build2] }
+
+ it { is_expected.to eq(described_class.where(build_id: build_id)) }
+ end
+
+ context 'with non-existeng build_id' do
+ let(:build_id) { non_existing_record_id }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ describe '.pluck_runner_machine_id_and_build_id' do
+ subject { scope.pluck_build_id_and_runner_machine_id }
+
+ context 'with default scope' do
+ let(:scope) { described_class }
+
+ it { is_expected.to eq({ build.id => runner_machine.id }) }
+ end
+
+ context 'with scope excluding build' do
+ let(:scope) { described_class.where(build_id: non_existing_record_id) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/models/ci/runner_machine_spec.rb b/spec/models/ci/runner_machine_spec.rb
index d0979d8a485..0989477cd21 100644
--- a/spec/models/ci/runner_machine_spec.rb
+++ b/spec/models/ci/runner_machine_spec.rb
@@ -5,10 +5,14 @@ require 'spec_helper'
RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model do
it_behaves_like 'having unique enum values'
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :ci_runner_machine }
+ end
+
it { is_expected.to belong_to(:runner) }
it { is_expected.to belong_to(:runner_version).with_foreign_key(:version) }
- it { is_expected.to have_many(:build_metadata) }
- it { is_expected.to have_many(:builds).through(:build_metadata) }
+ it { is_expected.to have_many(:runner_machine_builds) }
+ it { is_expected.to have_many(:builds).through(:runner_machine_builds) }
describe 'validation' do
it { is_expected.to validate_presence_of(:runner) }
@@ -53,10 +57,67 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model
end
end
+ describe '.online_contact_time_deadline', :freeze_time do
+ subject { described_class.online_contact_time_deadline }
+
+ it { is_expected.to eq(2.hours.ago) }
+ end
+
+ describe '.stale_deadline', :freeze_time do
+ subject { described_class.stale_deadline }
+
+ it { is_expected.to eq(7.days.ago) }
+ end
+
+ describe '#status', :freeze_time do
+ let(:runner_machine) { build(:ci_runner_machine, created_at: 8.days.ago) }
+
+ subject { runner_machine.status }
+
+ context 'if never connected' do
+ before do
+ runner_machine.contacted_at = nil
+ end
+
+ it { is_expected.to eq(:stale) }
+
+ context 'if created recently' do
+ before do
+ runner_machine.created_at = 1.day.ago
+ end
+
+ it { is_expected.to eq(:never_contacted) }
+ end
+ end
+
+ context 'if contacted 1s ago' do
+ before do
+ runner_machine.contacted_at = 1.second.ago
+ end
+
+ it { is_expected.to eq(:online) }
+ end
+
+ context 'if contacted recently' do
+ before do
+ runner_machine.contacted_at = 2.hours.ago
+ end
+
+ it { is_expected.to eq(:offline) }
+ end
+
+ context 'if contacted long time ago' do
+ before do
+ runner_machine.contacted_at = 7.days.ago
+ end
+
+ it { is_expected.to eq(:stale) }
+ end
+ end
+
describe '#heartbeat', :freeze_time do
- let(:runner_machine) { create(:ci_runner_machine) }
+ let(:runner_machine) { create(:ci_runner_machine, version: '15.0.0') }
let(:executor) { 'shell' }
- let(:version) { '15.0.1' }
let(:values) do
{
ip_address: '8.8.8.8',
@@ -76,18 +137,38 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model
runner_machine.contacted_at = Time.current
end
- it 'schedules version update' do
- expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version).once
+ context 'when version is changed' do
+ let(:version) { '15.0.1' }
- heartbeat
+ before do
+ allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version)
+ end
- expect(runner_machine.runner_version).to be_nil
- end
+ it 'schedules version information update' do
+ heartbeat
- it 'updates cache' do
- expect_redis_update
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).with(version).once
+ end
- heartbeat
+ it 'updates cache' do
+ expect_redis_update
+
+ heartbeat
+
+ expect(runner_machine.runner_version).to be_nil
+ end
+
+ context 'when fetching runner releases is disabled' do
+ before do
+ stub_application_setting(update_runner_versions_enabled: false)
+ end
+
+ it 'does not schedule version information update' do
+ heartbeat
+
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async)
+ end
+ end
end
context 'with only ip_address specified' do
@@ -96,14 +177,21 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model
end
it 'updates only ip_address' do
- attrs = Gitlab::Json.dump(ip_address: '1.1.1.1', contacted_at: Time.current)
+ expect_redis_update(values.merge(contacted_at: Time.current))
+
+ heartbeat
+ end
+
+ context 'with new version having been cached' do
+ let(:version) { '15.0.1' }
- Gitlab::Redis::Cache.with do |redis|
- redis_key = runner_machine.send(:cache_attribute_key)
- expect(redis).to receive(:set).with(redis_key, attrs, any_args)
+ before do
+ runner_machine.cache_attributes(version: version)
end
- heartbeat
+ it 'does not lose cached version value' do
+ expect { heartbeat }.not_to change { runner_machine.version }.from(version)
+ end
end
end
end
@@ -112,17 +200,29 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model
before do
runner_machine.contacted_at = 2.hours.ago
- allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version).once
+ allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version)
end
- context 'with invalid runner_machine' do
- before do
- runner_machine.runner = nil
- end
+ context 'when version is changed' do
+ let(:version) { '15.0.1' }
- it 'still updates redis cache and database' do
- expect(runner_machine).to be_invalid
+ context 'with invalid runner_machine' do
+ before do
+ runner_machine.runner = nil
+ end
+ it 'still updates redis cache and database' do
+ expect(runner_machine).to be_invalid
+
+ expect_redis_update
+ does_db_update
+
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async)
+ .with(version).once
+ end
+ end
+
+ it 'updates redis cache and database' do
expect_redis_update
does_db_update
@@ -132,58 +232,52 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model
end
context 'with unchanged runner_machine version' do
- let(:runner_machine) { create(:ci_runner_machine, version: version) }
+ let(:version) { runner_machine.version }
it 'does not schedule ci_runner_versions update' do
heartbeat
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async)
end
- end
-
- it 'updates redis cache and database' do
- expect_redis_update
- does_db_update
-
- expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async)
- .with(version).once
- end
- Ci::Runner::EXECUTOR_NAME_TO_TYPES.each_key do |executor|
- context "with #{executor} executor" do
- let(:executor) { executor }
+ Ci::Runner::EXECUTOR_NAME_TO_TYPES.each_key do |executor|
+ context "with #{executor} executor" do
+ let(:executor) { executor }
- it 'updates with expected executor type' do
- expect_redis_update
+ it 'updates with expected executor type' do
+ expect_redis_update
- heartbeat
+ heartbeat
- expect(runner_machine.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
- end
+ expect(runner_machine.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
+ end
- def expected_executor_type
- executor.gsub(/[+-]/, '_')
+ def expected_executor_type
+ executor.gsub(/[+-]/, '_')
+ end
end
end
- end
- context "with an unknown executor type" do
- let(:executor) { 'some-unknown-type' }
+ context 'with an unknown executor type' do
+ let(:executor) { 'some-unknown-type' }
- it 'updates with unknown executor type' do
- expect_redis_update
+ it 'updates with unknown executor type' do
+ expect_redis_update
- heartbeat
+ heartbeat
- expect(runner_machine.reload.read_attribute(:executor_type)).to eq('unknown')
+ expect(runner_machine.reload.read_attribute(:executor_type)).to eq('unknown')
+ end
end
end
end
- def expect_redis_update
+ def expect_redis_update(values = anything)
+ values_json = values == anything ? anything : Gitlab::Json.dump(values)
+
Gitlab::Redis::Cache.with do |redis|
redis_key = runner_machine.send(:cache_attribute_key)
- expect(redis).to receive(:set).with(redis_key, anything, any_args).and_call_original
+ expect(redis).to receive(:set).with(redis_key, values_json, any_args).and_call_original
end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 01d5fe7f90b..fe49b2c2c7f 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -541,9 +541,9 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
describe '.stale', :freeze_time do
subject { described_class.stale }
- let!(:runner1) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago + 10.seconds) }
- let!(:runner2) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago - 1.second) }
- let!(:runner3) { create(:ci_runner, :instance, created_at: 3.months.ago - 1.second, contacted_at: nil) }
+ let!(:runner1) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago + 1.second) }
+ let!(:runner2) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago) }
+ let!(:runner3) { create(:ci_runner, :instance, created_at: 3.months.ago, contacted_at: nil) }
let!(:runner4) { create(:ci_runner, :instance, created_at: 2.months.ago, contacted_at: nil) }
it 'returns stale runners' do
@@ -551,7 +551,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
- describe '#stale?', :clean_gitlab_redis_cache do
+ describe '#stale?', :clean_gitlab_redis_cache, :freeze_time do
let(:runner) { build(:ci_runner, :instance) }
subject { runner.stale? }
@@ -570,11 +570,11 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
using RSpec::Parameterized::TableSyntax
where(:created_at, :contacted_at, :expected_stale?) do
- nil | nil | false
- 3.months.ago - 1.second | 3.months.ago - 0.001.seconds | true
- 3.months.ago - 1.second | 3.months.ago + 1.hour | false
- 3.months.ago - 1.second | nil | true
- 3.months.ago + 1.hour | nil | false
+ nil | nil | false
+ 3.months.ago | 3.months.ago | true
+ 3.months.ago | (3.months - 1.hour).ago | false
+ 3.months.ago | nil | true
+ (3.months - 1.hour).ago | nil | false
end
with_them do
@@ -588,9 +588,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
runner.contacted_at = contacted_at
end
- specify do
- is_expected.to eq(expected_stale?)
- end
+ it { is_expected.to eq(expected_stale?) }
end
context 'with cache value' do
@@ -599,9 +597,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
stub_redis_runner_contacted_at(contacted_at.to_s)
end
- specify do
- is_expected.to eq(expected_stale?)
- end
+ it { is_expected.to eq(expected_stale?) }
end
def stub_redis_runner_contacted_at(value)
@@ -617,7 +613,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
- describe '.online' do
+ describe '.online', :freeze_time do
subject { described_class.online }
let!(:runner1) { create(:ci_runner, :instance, contacted_at: 2.hours.ago) }
@@ -626,7 +622,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
it { is_expected.to match_array([runner2]) }
end
- describe '#online?', :clean_gitlab_redis_cache do
+ describe '#online?', :clean_gitlab_redis_cache, :freeze_time do
let(:runner) { build(:ci_runner, :instance) }
subject { runner.online? }
@@ -891,8 +887,8 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
- describe '#status' do
- let(:runner) { build(:ci_runner, :instance, created_at: 4.months.ago) }
+ describe '#status', :freeze_time do
+ let(:runner) { build(:ci_runner, :instance, created_at: 3.months.ago) }
let(:legacy_mode) {}
subject { runner.status(legacy_mode) }
@@ -948,7 +944,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
context 'contacted recently' do
before do
- runner.contacted_at = (3.months - 1.hour).ago
+ runner.contacted_at = (3.months - 1.second).ago
end
it { is_expected.to eq(:offline) }
@@ -956,7 +952,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
context 'contacted long time ago' do
before do
- runner.contacted_at = (3.months + 1.second).ago
+ runner.contacted_at = 3.months.ago
end
context 'with legacy_mode enabled' do
@@ -971,7 +967,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
- describe '#deprecated_rest_status' do
+ describe '#deprecated_rest_status', :freeze_time do
let(:runner) { create(:ci_runner, :instance, contacted_at: 1.second.ago) }
subject { runner.deprecated_rest_status }
@@ -994,8 +990,8 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
context 'contacted long time ago' do
before do
- runner.created_at = 1.year.ago
- runner.contacted_at = 1.year.ago
+ runner.created_at = 3.months.ago
+ runner.contacted_at = 3.months.ago
end
it { is_expected.to eq(:stale) }
@@ -1076,13 +1072,13 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
- describe '#heartbeat' do
- let(:runner) { create(:ci_runner, :project) }
+ describe '#heartbeat', :freeze_time do
+ let(:runner) { create(:ci_runner, :project, version: '15.0.0') }
let(:executor) { 'shell' }
- let(:version) { '15.0.1' }
+ let(:values) { { architecture: '18-bit', config: { gpus: "all" }, executor: executor, version: version } }
subject(:heartbeat) do
- runner.heartbeat(architecture: '18-bit', config: { gpus: "all" }, executor: executor, version: version)
+ runner.heartbeat(values)
end
context 'when database was updated recently' do
@@ -1090,29 +1086,61 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
runner.contacted_at = Time.current
end
- it 'updates cache' do
- expect_redis_update
- expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to receive(:perform_async)
+ context 'when version is changed' do
+ let(:version) { '15.0.1' }
+
+ before do
+ allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version)
+ end
+
+ it 'updates cache' do
+ expect_redis_update
+
+ heartbeat
+
+ expect(runner.runner_version).to be_nil
+ end
+
+ it 'schedules version information update' do
+ heartbeat
+
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).with(version).once
+ end
- heartbeat
+ context 'when fetching runner releases is disabled' do
+ before do
+ stub_application_setting(update_runner_versions_enabled: false)
+ end
+
+ it 'does not schedule version information update' do
+ heartbeat
- expect(runner.runner_version).to be_nil
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async)
+ end
+ end
end
context 'with only ip_address specified', :freeze_time do
- subject(:heartbeat) do
- runner.heartbeat(ip_address: '1.1.1.1')
+ let(:values) do
+ { ip_address: '1.1.1.1' }
end
it 'updates only ip_address' do
- attrs = Gitlab::Json.dump(ip_address: '1.1.1.1', contacted_at: Time.current)
+ expect_redis_update(values.merge(contacted_at: Time.current))
- Gitlab::Redis::Cache.with do |redis|
- redis_key = runner.send(:cache_attribute_key)
- expect(redis).to receive(:set).with(redis_key, attrs, any_args)
+ heartbeat
+ end
+
+ context 'with new version having been cached' do
+ let(:version) { '15.0.1' }
+
+ before do
+ runner.cache_attributes(version: version)
end
- heartbeat
+ it 'does not lose cached version value' do
+ expect { heartbeat }.not_to change { runner.version }.from(version)
+ end
end
end
end
@@ -1121,65 +1149,81 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
before do
runner.contacted_at = 2.hours.ago
- allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async)
+ allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version)
end
- context 'with invalid runner' do
- before do
- runner.runner_projects.delete_all
- end
+ context 'when version is changed' do
+ let(:version) { '15.0.1' }
- it 'still updates redis cache and database' do
- expect(runner).to be_invalid
+ context 'with invalid runner' do
+ before do
+ runner.runner_projects.delete_all
+ end
+ it 'still updates redis cache and database' do
+ expect(runner).to be_invalid
+
+ expect_redis_update
+ does_db_update
+
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).with(version).once
+ end
+ end
+
+ it 'updates redis cache and database' do
expect_redis_update
does_db_update
-
- expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).once
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).with(version).once
end
end
context 'with unchanged runner version' do
- let(:runner) { create(:ci_runner, version: version) }
+ let(:version) { runner.version }
it 'does not schedule ci_runner_versions update' do
heartbeat
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async)
end
- end
- it 'updates redis cache and database' do
- expect_redis_update
- does_db_update
- expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).once
- end
+ Ci::Runner::EXECUTOR_NAME_TO_TYPES.each_key do |executor|
+ context "with #{executor} executor" do
+ let(:executor) { executor }
- %w(custom shell docker docker-windows docker-ssh ssh parallels virtualbox docker+machine docker-ssh+machine kubernetes some-unknown-type).each do |executor|
- context "with #{executor} executor" do
- let(:executor) { executor }
+ it 'updates with expected executor type' do
+ expect_redis_update
- it 'updates with expected executor type' do
- expect_redis_update
+ heartbeat
- heartbeat
+ expect(runner.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
+ end
- expect(runner.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
+ def expected_executor_type
+ executor.gsub(/[+-]/, '_')
+ end
end
+ end
+
+ context 'with an unknown executor type' do
+ let(:executor) { 'some-unknown-type' }
+
+ it 'updates with unknown executor type' do
+ expect_redis_update
- def expected_executor_type
- return 'unknown' if executor == 'some-unknown-type'
+ heartbeat
- executor.gsub(/[+-]/, '_')
+ expect(runner.reload.read_attribute(:executor_type)).to eq('unknown')
end
end
end
end
- def expect_redis_update
+ def expect_redis_update(values = anything)
+ values_json = values == anything ? anything : Gitlab::Json.dump(values)
+
Gitlab::Redis::Cache.with do |redis|
redis_key = runner.send(:cache_attribute_key)
- expect(redis).to receive(:set).with(redis_key, anything, any_args)
+ expect(redis).to receive(:set).with(redis_key, values_json, any_args).and_call_original
end
end
@@ -1994,4 +2038,59 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
end
+
+ describe '.with_creator' do
+ subject { described_class.with_creator }
+
+ let!(:user) { create(:admin) }
+ let!(:runner) { create(:ci_runner, creator: user) }
+
+ it { is_expected.to contain_exactly(runner) }
+ end
+
+ describe '#ensure_token' do
+ let(:runner) { described_class.new(registration_type: registration_type) }
+ let(:token) { 'an_existing_secret_token' }
+ let(:static_prefix) { described_class::CREATED_RUNNER_TOKEN_PREFIX }
+
+ context 'when runner is initialized without a token' do
+ context 'with registration_token' do
+ let(:registration_type) { :registration_token }
+
+ it 'generates a token' do
+ expect { runner.ensure_token }.to change { runner.token }.from(nil)
+ end
+ end
+
+ context 'with authenticated_user' do
+ let(:registration_type) { :authenticated_user }
+
+ it 'generates a token with prefix' do
+ expect { runner.ensure_token }.to change { runner.token }.from(nil).to(a_string_starting_with(static_prefix))
+ end
+ end
+ end
+
+ context 'when runner is initialized with a token' do
+ before do
+ runner.set_token(token)
+ end
+
+ context 'with registration_token' do
+ let(:registration_type) { :registration_token }
+
+ it 'does not change the existing token' do
+ expect { runner.ensure_token }.not_to change { runner.token }.from(token)
+ end
+ end
+
+ context 'with authenticated_user' do
+ let(:registration_type) { :authenticated_user }
+
+ it 'does not change the existing token' do
+ expect { runner.ensure_token }.not_to change { runner.token }.from(token)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/runner_version_spec.rb b/spec/models/ci/runner_version_spec.rb
index 51a2f14c57c..511d120ab7f 100644
--- a/spec/models/ci/runner_version_spec.rb
+++ b/spec/models/ci/runner_version_spec.rb
@@ -39,4 +39,15 @@ RSpec.describe Ci::RunnerVersion, feature_category: :runner_fleet do
describe 'validation' do
it { is_expected.to validate_length_of(:version).is_at_most(2048) }
end
+
+ describe '#status' do
+ context 'when is not processed' do
+ subject(:ci_runner_version) { create(:ci_runner_version, version: 'abc124', status: :not_processed) }
+
+ let(:attr) { :status }
+ let(:attr_value) { :not_processed }
+
+ it_behaves_like 'having enum with nil value'
+ end
+ end
end
diff --git a/spec/models/ci/sources/pipeline_spec.rb b/spec/models/ci/sources/pipeline_spec.rb
index 707872d0a15..47f32353fef 100644
--- a/spec/models/ci/sources/pipeline_spec.rb
+++ b/spec/models/ci/sources/pipeline_spec.rb
@@ -6,6 +6,11 @@ RSpec.describe Ci::Sources::Pipeline, feature_category: :continuous_integration
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:pipeline) }
+ it do
+ is_expected.to belong_to(:build).class_name('Ci::Build')
+ .with_foreign_key(:source_job_id).inverse_of(:sourced_pipelines)
+ end
+
it { is_expected.to belong_to(:source_project).class_name('::Project') }
it { is_expected.to belong_to(:source_job) }
it { is_expected.to belong_to(:source_bridge) }
diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb
index ce64b3ea158..7a313115965 100644
--- a/spec/models/ci/variable_spec.rb
+++ b/spec/models/ci/variable_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Variable, feature_category: :pipeline_authoring do
+RSpec.describe Ci::Variable, feature_category: :pipeline_composition do
let_it_be_with_reload(:project) { create(:project) }
subject { build(:ci_variable, project: project) }
diff --git a/spec/models/clusters/applications/crossplane_spec.rb b/spec/models/clusters/applications/crossplane_spec.rb
deleted file mode 100644
index d1abaa52c7f..00000000000
--- a/spec/models/clusters/applications/crossplane_spec.rb
+++ /dev/null
@@ -1,62 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Applications::Crossplane do
- let(:crossplane) { create(:clusters_applications_crossplane) }
-
- include_examples 'cluster application core specs', :clusters_applications_crossplane
- include_examples 'cluster application status specs', :clusters_applications_crossplane
- include_examples 'cluster application version specs', :clusters_applications_crossplane
- include_examples 'cluster application initial status specs'
-
- describe 'validations' do
- it { is_expected.to validate_presence_of(:stack) }
- end
-
- describe 'default values' do
- it { expect(subject.version).to eq(described_class::VERSION) }
- it { expect(subject.stack).to be_empty }
- end
-
- describe '#can_uninstall?' do
- subject { crossplane.can_uninstall? }
-
- it { is_expected.to be_truthy }
- end
-
- describe '#install_command' do
- let(:stack) { 'gcp' }
-
- subject { crossplane.install_command }
-
- it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V3::InstallCommand) }
-
- it 'is initialized with crossplane arguments' do
- expect(subject.name).to eq('crossplane')
- expect(subject.chart).to eq('crossplane/crossplane')
- expect(subject.repository).to eq('https://charts.crossplane.io/alpha')
- expect(subject.version).to eq('0.4.1')
- expect(subject).to be_rbac
- end
-
- context 'application failed to install previously' do
- let(:crossplane) { create(:clusters_applications_crossplane, :errored, version: '0.0.1') }
-
- it 'is initialized with the locked version' do
- expect(subject.version).to eq('0.4.1')
- end
- end
- end
-
- describe '#files' do
- let(:application) { crossplane }
- let(:values) { subject[:'values.yaml'] }
-
- subject { application.files }
-
- it 'includes crossplane specific keys in the values.yaml file' do
- expect(values).to include('clusterStacks')
- end
- end
-end
diff --git a/spec/models/clusters/applications/knative_spec.rb b/spec/models/clusters/applications/knative_spec.rb
index f6b13f4a93f..91e90de02c0 100644
--- a/spec/models/clusters/applications/knative_spec.rb
+++ b/spec/models/clusters/applications/knative_spec.rb
@@ -17,10 +17,6 @@ RSpec.describe Clusters::Applications::Knative do
include_examples 'cluster application version specs', :clusters_applications_knative
include_examples 'cluster application initial status specs'
- describe 'associations' do
- it { is_expected.to have_one(:serverless_domain_cluster).class_name('::Serverless::DomainCluster').with_foreign_key('clusters_applications_knative_id').inverse_of(:knative) }
- end
-
describe 'default values' do
it { expect(subject.version).to eq(described_class::VERSION) }
end
@@ -135,19 +131,6 @@ RSpec.describe Clusters::Applications::Knative do
it 'does not install metrics for prometheus' do
expect(subject.postinstall).to be_empty
end
-
- context 'with prometheus installed' do
- let(:prometheus) { create(:clusters_applications_prometheus, :installed) }
- let(:knative) { create(:clusters_applications_knative, cluster: prometheus.cluster) }
-
- subject { knative.install_command }
-
- it 'installs metrics' do
- expect(subject.postinstall).not_to be_empty
- expect(subject.postinstall.length).to be(1)
- expect(subject.postinstall[0]).to eql("kubectl apply -f #{Clusters::Applications::Knative::METRICS_CONFIG}")
- end
- end
end
describe '#install_command' do
@@ -249,12 +232,4 @@ RSpec.describe Clusters::Applications::Knative do
expect(subject.find_available_domain(domain.id)).to eq(domain)
end
end
-
- describe '#pages_domain' do
- let!(:sdc) { create(:serverless_domain_cluster, knative: knative) }
-
- it 'returns the the associated pages domain' do
- expect(knative.reload.pages_domain).to eq(sdc.pages_domain)
- end
- end
end
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
deleted file mode 100644
index 15c3162270e..00000000000
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ /dev/null
@@ -1,349 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Applications::Prometheus do
- include KubernetesHelpers
- include StubRequests
-
- include_examples 'cluster application core specs', :clusters_applications_prometheus
- include_examples 'cluster application status specs', :clusters_applications_prometheus
- include_examples 'cluster application version specs', :clusters_applications_prometheus
- include_examples 'cluster application helm specs', :clusters_applications_prometheus
- include_examples 'cluster application initial status specs'
-
- describe 'default values' do
- subject(:prometheus) { build(:clusters_applications_prometheus) }
-
- it { expect(prometheus.alert_manager_token).to be_an_instance_of(String) }
- it { expect(prometheus.version).to eq(described_class::VERSION) }
- end
-
- describe 'after_destroy' do
- let(:cluster) { create(:cluster, :with_installed_helm) }
- let(:application) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
-
- it 'disables the corresponding integration' do
- application.destroy!
-
- expect(cluster.integration_prometheus).not_to be_enabled
- end
- end
-
- describe 'transition to installed' do
- let(:project) { create(:project) }
- let(:cluster) { create(:cluster, :with_installed_helm) }
- let(:application) { create(:clusters_applications_prometheus, :installing, cluster: cluster) }
-
- it 'enables the corresponding integration' do
- application.make_installed
-
- expect(cluster.integration_prometheus).to be_enabled
- end
- end
-
- describe 'transition to externally_installed' do
- let(:project) { create(:project) }
- let(:cluster) { create(:cluster, :with_installed_helm) }
- let(:application) { create(:clusters_applications_prometheus, :installing, cluster: cluster) }
-
- it 'enables the corresponding integration' do
- application.make_externally_installed!
-
- expect(cluster.integration_prometheus).to be_enabled
- end
- end
-
- describe 'transition to updating' do
- let(:project) { create(:project) }
- let(:cluster) { create(:cluster, projects: [project]) }
-
- subject { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
-
- it 'sets last_update_started_at to now' do
- freeze_time do
- expect { subject.make_updating }.to change { subject.reload.last_update_started_at }.to be_within(1.second).of(Time.current)
- end
- end
- end
-
- describe '#managed_prometheus?' do
- subject { prometheus.managed_prometheus? }
-
- let(:prometheus) { build(:clusters_applications_prometheus) }
-
- it { is_expected.to be_truthy }
-
- context 'externally installed' do
- let(:prometheus) { build(:clusters_applications_prometheus, :externally_installed) }
-
- it { is_expected.to be_falsey }
- end
-
- context 'uninstalled' do
- let(:prometheus) { build(:clusters_applications_prometheus, :uninstalled) }
-
- it { is_expected.to be_falsey }
- end
- end
-
- describe '#can_uninstall?' do
- let(:prometheus) { create(:clusters_applications_prometheus) }
-
- subject { prometheus.can_uninstall? }
-
- it { is_expected.to be_truthy }
- end
-
- describe '#prometheus_client' do
- include_examples '#prometheus_client shared' do
- let(:factory) { :clusters_applications_prometheus }
- end
- end
-
- describe '#install_command' do
- let(:prometheus) { create(:clusters_applications_prometheus) }
-
- subject { prometheus.install_command }
-
- it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V3::InstallCommand) }
-
- it 'is initialized with 3 arguments' do
- expect(subject.name).to eq('prometheus')
- expect(subject.chart).to eq('prometheus/prometheus')
- expect(subject.version).to eq('10.4.1')
- expect(subject).to be_rbac
- expect(subject.files).to eq(prometheus.files)
- end
-
- context 'on a non rbac enabled cluster' do
- before do
- prometheus.cluster.platform_kubernetes.abac!
- end
-
- it { is_expected.not_to be_rbac }
- end
-
- context 'application failed to install previously' do
- let(:prometheus) { create(:clusters_applications_prometheus, :errored, version: '2.0.0') }
-
- it 'is initialized with the locked version' do
- expect(subject.version).to eq('10.4.1')
- end
- end
-
- it 'does not install knative metrics' do
- expect(subject.postinstall).to be_empty
- end
-
- context 'with knative installed' do
- let(:knative) { create(:clusters_applications_knative, :updated) }
- let(:prometheus) { create(:clusters_applications_prometheus, cluster: knative.cluster) }
-
- subject { prometheus.install_command }
-
- it 'installs knative metrics' do
- expect(subject.postinstall).to include("kubectl apply -f #{Clusters::Applications::Knative::METRICS_CONFIG}")
- end
- end
- end
-
- describe '#uninstall_command' do
- let(:prometheus) { create(:clusters_applications_prometheus) }
-
- subject { prometheus.uninstall_command }
-
- it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V3::DeleteCommand) }
-
- it 'has the application name' do
- expect(subject.name).to eq('prometheus')
- end
-
- it 'has files' do
- expect(subject.files).to eq(prometheus.files)
- end
-
- it 'is rbac' do
- expect(subject).to be_rbac
- end
-
- describe '#predelete' do
- let(:knative) { create(:clusters_applications_knative, :updated) }
- let(:prometheus) { create(:clusters_applications_prometheus, cluster: knative.cluster) }
-
- subject { prometheus.uninstall_command.predelete }
-
- it 'deletes knative metrics' do
- metrics_config = Clusters::Applications::Knative::METRICS_CONFIG
- is_expected.to include("kubectl delete -f #{metrics_config} --ignore-not-found")
- end
- end
-
- context 'on a non rbac enabled cluster' do
- before do
- prometheus.cluster.platform_kubernetes.abac!
- end
-
- it { is_expected.not_to be_rbac }
- end
- end
-
- describe '#patch_command' do
- subject(:patch_command) { prometheus.patch_command(values) }
-
- let(:prometheus) { build(:clusters_applications_prometheus) }
- let(:values) { prometheus.values }
-
- it { is_expected.to be_an_instance_of(::Gitlab::Kubernetes::Helm::V3::PatchCommand) }
-
- it 'is initialized with 3 arguments' do
- expect(patch_command.name).to eq('prometheus')
- expect(patch_command.chart).to eq('prometheus/prometheus')
- expect(patch_command.version).to eq('10.4.1')
- expect(patch_command.files).to eq(prometheus.files)
- end
- end
-
- describe '#update_in_progress?' do
- context 'when app is updating' do
- it 'returns true' do
- cluster = create(:cluster)
- prometheus_app = build(:clusters_applications_prometheus, :updating, cluster: cluster)
-
- expect(prometheus_app.update_in_progress?).to be true
- end
- end
- end
-
- describe '#update_errored?' do
- context 'when app errored' do
- it 'returns true' do
- cluster = create(:cluster)
- prometheus_app = build(:clusters_applications_prometheus, :update_errored, cluster: cluster)
-
- expect(prometheus_app.update_errored?).to be true
- end
- end
- end
-
- describe '#files' do
- let(:application) { create(:clusters_applications_prometheus) }
- let(:values) { subject[:'values.yaml'] }
-
- subject { application.files }
-
- it 'includes prometheus valid values' do
- expect(values).to include('alertmanager')
- expect(values).to include('kubeStateMetrics')
- expect(values).to include('nodeExporter')
- expect(values).to include('pushgateway')
- expect(values).to include('serverFiles')
- end
- end
-
- describe '#files_with_replaced_values' do
- let(:application) { build(:clusters_applications_prometheus) }
- let(:files) { application.files }
-
- subject { application.files_with_replaced_values({ hello: :world }) }
-
- it 'does not modify #files' do
- expect(subject[:'values.yaml']).not_to eq(files[:'values.yaml'])
-
- expect(files[:'values.yaml']).to eq(application.values)
- end
-
- it 'returns values.yaml with replaced values' do
- expect(subject[:'values.yaml']).to eq({ hello: :world })
- end
-
- it 'uses values from #files, except for values.yaml' do
- allow(application).to receive(:files).and_return({
- 'values.yaml': 'some value specific to files',
- 'file_a.txt': 'file_a',
- 'file_b.txt': 'file_b'
- })
-
- expect(subject.except(:'values.yaml')).to eq({
- 'file_a.txt': 'file_a',
- 'file_b.txt': 'file_b'
- })
- end
- end
-
- describe '#configured?' do
- let(:prometheus) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
-
- subject { prometheus.configured? }
-
- context 'when a kubenetes client is present' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
-
- it { is_expected.to be_truthy }
-
- context 'when it is not availalble' do
- let(:prometheus) { create(:clusters_applications_prometheus, cluster: cluster) }
-
- it { is_expected.to be_falsey }
- end
-
- context 'when the kubernetes URL is blocked' do
- before do
- blocked_ip = '127.0.0.1' # localhost addresses are blocked by default
-
- stub_all_dns(cluster.platform.api_url, ip_address: blocked_ip)
- end
-
- it { is_expected.to be_falsey }
- end
- end
-
- context 'when a kubenetes client is not present' do
- let(:cluster) { create(:cluster) }
-
- it { is_expected.to be_falsy }
- end
- end
-
- describe '#updated_since?' do
- let(:cluster) { create(:cluster) }
- let(:prometheus_app) { build(:clusters_applications_prometheus, cluster: cluster) }
- let(:timestamp) { Time.current - 5.minutes }
-
- around do |example|
- freeze_time { example.run }
- end
-
- before do
- prometheus_app.last_update_started_at = Time.current
- end
-
- context 'when app does not have status failed' do
- it 'returns true when last update started after the timestamp' do
- expect(prometheus_app.updated_since?(timestamp)).to be true
- end
-
- it 'returns false when last update started before the timestamp' do
- expect(prometheus_app.updated_since?(Time.current + 5.minutes)).to be false
- end
- end
-
- context 'when app has status failed' do
- it 'returns false when last update started after the timestamp' do
- prometheus_app.status = 6
-
- expect(prometheus_app.updated_since?(timestamp)).to be false
- end
- end
- end
-
- describe 'alert manager token' do
- subject { create(:clusters_applications_prometheus) }
-
- it 'is autogenerated on creation' do
- expect(subject.alert_manager_token).to match(/\A\h{32}\z/)
- expect(subject.encrypted_alert_manager_token).not_to be_nil
- expect(subject.encrypted_alert_manager_token_iv).not_to be_nil
- end
- end
-end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 2a2e2899d24..2d46714eb22 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -25,7 +25,6 @@ feature_category: :kubernetes_management do
it { is_expected.to have_one(:integration_prometheus) }
it { is_expected.to have_one(:application_helm) }
it { is_expected.to have_one(:application_ingress) }
- it { is_expected.to have_one(:application_prometheus) }
it { is_expected.to have_one(:application_runner) }
it { is_expected.to have_many(:kubernetes_namespaces) }
it { is_expected.to have_one(:cluster_project) }
@@ -714,13 +713,12 @@ feature_category: :kubernetes_management do
context 'when all applications are created' do
let!(:helm) { create(:clusters_applications_helm, cluster: cluster) }
let!(:ingress) { create(:clusters_applications_ingress, cluster: cluster) }
- let!(:prometheus) { create(:clusters_applications_prometheus, cluster: cluster) }
let!(:runner) { create(:clusters_applications_runner, cluster: cluster) }
let!(:jupyter) { create(:clusters_applications_jupyter, cluster: cluster) }
let!(:knative) { create(:clusters_applications_knative, cluster: cluster) }
it 'returns a list of created applications' do
- is_expected.to contain_exactly(helm, ingress, prometheus, runner, jupyter, knative)
+ is_expected.to contain_exactly(helm, ingress, runner, jupyter, knative)
end
end
@@ -1342,22 +1340,6 @@ feature_category: :kubernetes_management do
expect(cluster.prometheus_adapter).to eq(integration)
end
end
-
- context 'has application_prometheus' do
- let_it_be(:application) { create(:clusters_applications_prometheus, :no_helm_installed, cluster: cluster) }
-
- it 'returns nil' do
- expect(cluster.prometheus_adapter).to be_nil
- end
-
- context 'also has a integration_prometheus' do
- let_it_be(:integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
-
- it 'returns the integration' do
- expect(cluster.prometheus_adapter).to eq(integration)
- end
- end
- end
end
describe '#delete_cached_resources!' do
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index b280275c2e5..efe511bf1c5 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -930,4 +930,13 @@ RSpec.describe Clusters::Platforms::Kubernetes do
end
end
end
+
+ describe '#authorization_type' do
+ subject(:kubernetes) { create(:cluster_platform_kubernetes) }
+
+ let(:attr) { :authorization_type }
+ let(:attr_value) { :unknown_authorization }
+
+ it_behaves_like 'having enum with nil value'
+ end
end
diff --git a/spec/models/commit_collection_spec.rb b/spec/models/commit_collection_spec.rb
index 6dd34c3e21f..706f18a5337 100644
--- a/spec/models/commit_collection_spec.rb
+++ b/spec/models/commit_collection_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe CommitCollection do
+RSpec.describe CommitCollection, feature_category: :source_code_management do
let(:project) { create(:project, :repository) }
let(:commit) { project.commit("c1c67abbaf91f624347bb3ae96eabe3a1b742478") }
@@ -191,6 +191,19 @@ RSpec.describe CommitCollection do
end
end
+ describe '#load_tags' do
+ let(:gitaly_commit_with_tags) { project.commit('5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
+ let(:collection) { described_class.new(project, [gitaly_commit_with_tags]) }
+
+ subject { collection.load_tags }
+
+ it 'loads tags' do
+ subject
+
+ expect(collection.commits[0].referenced_by).to contain_exactly('refs/tags/v1.1.0')
+ end
+ end
+
describe '#respond_to_missing?' do
it 'returns true when the underlying Array responds to the message' do
collection = described_class.new(project, [])
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 4ff451af9de..54ca7f0c893 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -17,7 +17,11 @@ RSpec.describe CommitStatus do
it_behaves_like 'having unique enum values'
- it { is_expected.to belong_to(:pipeline) }
+ it do
+ is_expected.to belong_to(:pipeline).class_name('Ci::Pipeline')
+ .with_foreign_key(:commit_id).inverse_of(:statuses)
+ end
+
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:auto_canceled_by) }
@@ -33,6 +37,7 @@ RSpec.describe CommitStatus do
it { is_expected.to respond_to :running? }
it { is_expected.to respond_to :pending? }
it { is_expected.not_to be_retried }
+ it { expect(described_class.primary_key).to eq('id') }
describe '#author' do
subject { commit_status.author }
@@ -422,29 +427,6 @@ RSpec.describe CommitStatus do
end
end
- describe '.exclude_ignored' do
- subject { described_class.exclude_ignored.order(:id) }
-
- let(:statuses) do
- [create_status(when: 'manual', status: 'skipped'),
- create_status(when: 'manual', status: 'success'),
- create_status(when: 'manual', status: 'failed'),
- create_status(when: 'on_failure', status: 'skipped'),
- create_status(when: 'on_failure', status: 'success'),
- create_status(when: 'on_failure', status: 'failed'),
- create_status(allow_failure: true, status: 'success'),
- create_status(allow_failure: true, status: 'failed'),
- create_status(allow_failure: false, status: 'success'),
- create_status(allow_failure: false, status: 'failed'),
- create_status(allow_failure: true, status: 'manual'),
- create_status(allow_failure: false, status: 'manual')]
- end
-
- it 'returns statuses without what we want to ignore' do
- is_expected.to eq(statuses.values_at(0, 1, 2, 3, 4, 5, 6, 8, 9, 11))
- end
- end
-
describe '.failed_but_allowed' do
subject { described_class.failed_but_allowed.order(:id) }
@@ -1062,4 +1044,13 @@ RSpec.describe CommitStatus do
end
end
end
+
+ describe '#failure_reason' do
+ subject(:status) { commit_status }
+
+ let(:attr) { :failure_reason }
+ let(:attr_value) { :unknown_failure }
+
+ it_behaves_like 'having enum with nil value'
+ end
end
diff --git a/spec/models/concerns/atomic_internal_id_spec.rb b/spec/models/concerns/atomic_internal_id_spec.rb
index 5fe3141eb17..625d8fec0fb 100644
--- a/spec/models/concerns/atomic_internal_id_spec.rb
+++ b/spec/models/concerns/atomic_internal_id_spec.rb
@@ -250,11 +250,104 @@ RSpec.describe AtomicInternalId do
end
end
- describe '.track_project_iid!' do
+ describe '.track_namespace_iid!' do
it 'tracks the present value' do
expect do
- ::Issue.track_project_iid!(milestone.project, external_iid)
- end.to change { InternalId.find_by(project: milestone.project, usage: :issues)&.last_value.to_i }.to(external_iid)
+ ::Issue.track_namespace_iid!(milestone.project.project_namespace, external_iid)
+ end.to change {
+ InternalId.find_by(namespace: milestone.project.project_namespace, usage: :issues)&.last_value.to_i
+ }.to(external_iid)
+ end
+ end
+
+ context 'when transitioning a model from one scope to another' do
+ let!(:issue) { build(:issue, project: project) }
+ let(:old_issue_model) do
+ Class.new(ApplicationRecord) do
+ include AtomicInternalId
+
+ self.table_name = :issues
+
+ belongs_to :project
+ belongs_to :namespace
+
+ has_internal_id :iid, scope: :project
+
+ def self.name
+ 'TestClassA'
+ end
+ end
+ end
+
+ let(:old_issue_instance) { old_issue_model.new(issue.attributes) }
+ let(:new_issue_instance) { Issue.new(issue.attributes) }
+
+ it 'generates the iid on the new scope' do
+ # set a random iid, just so that it does not start at 1
+ old_issue_instance.iid = 123
+ old_issue_instance.save!
+
+ # creating a new old_issue_instance increments the iid.
+ expect { old_issue_model.new(issue.attributes).save! }.to change {
+ InternalId.find_by(project: project, usage: :issues)&.last_value.to_i
+ }.from(123).to(124).and(not_change { InternalId.count })
+
+ # creating a new Issue creates a new record in internal_ids, scoped to the namespace.
+ # Given the Issue#has_internal_id -> init definition the internal_ids#last_value would be the
+ # maximum between the old iid value in internal_ids, scoped to the project and max(iid) value from issues
+ # table by namespace_id.
+ # see Issue#has_internal_id
+ expect { new_issue_instance.save! }.to change {
+ InternalId.find_by(namespace: project.project_namespace, usage: :issues)&.last_value.to_i
+ }.to(125).and(change { InternalId.count }.by(1))
+
+ # transition back to project scope would generate overlapping IIDs and raise a duplicate key value error, unless
+ # we cleanup the issues usage scoped to the project first
+ expect { old_issue_model.new(issue.attributes).save! }.to raise_error(ActiveRecord::RecordNotUnique)
+
+ # delete issues usage scoped to te project
+ InternalId.where(project: project, usage: :issues).delete_all
+
+ expect { old_issue_model.new(issue.attributes).save! }.to change {
+ InternalId.find_by(project: project, usage: :issues)&.last_value.to_i
+ }.to(126).and(change { InternalId.count }.by(1))
+ end
+ end
+
+ context 'when models is scoped to namespace and does not have an init proc' do
+ let!(:issue) { build(:issue, namespace: create(:group)) }
+
+ let(:issue_model) do
+ Class.new(ApplicationRecord) do
+ include AtomicInternalId
+
+ self.table_name = :issues
+
+ belongs_to :project
+ belongs_to :namespace
+
+ has_internal_id :iid, scope: :namespace
+
+ def self.name
+ 'TestClass'
+ end
+ end
+ end
+
+ let(:model_instance) { issue_model.new(issue.attributes) }
+
+ it 'generates the iid on the new scope' do
+ expect { model_instance.save! }.to change {
+ InternalId.find_by(namespace: model_instance.namespace, usage: :issues)&.last_value.to_i
+ }.to(1).and(change { InternalId.count }.by(1))
+ end
+
+ it 'supplies a stream of iid values' do
+ expect do
+ issue_model.with_namespace_iid_supply(model_instance.namespace) do |supply|
+ 4.times { supply.next_value }
+ end
+ end.to change { InternalId.find_by(namespace: model_instance.namespace, usage: :issues)&.last_value.to_i }.by(4)
end
end
end
diff --git a/spec/models/concerns/ci/maskable_spec.rb b/spec/models/concerns/ci/maskable_spec.rb
index b57b2b15608..12157867062 100644
--- a/spec/models/concerns/ci/maskable_spec.rb
+++ b/spec/models/concerns/ci/maskable_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Maskable, feature_category: :pipeline_authoring do
+RSpec.describe Ci::Maskable, feature_category: :pipeline_composition do
let(:variable) { build(:ci_variable) }
describe 'masked value validations' do
diff --git a/spec/models/concerns/ci/partitionable/partitioned_filter_spec.rb b/spec/models/concerns/ci/partitionable/partitioned_filter_spec.rb
deleted file mode 100644
index bb25d7d1665..00000000000
--- a/spec/models/concerns/ci/partitionable/partitioned_filter_spec.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::Partitionable::PartitionedFilter, :aggregate_failures, feature_category: :continuous_integration do
- before do
- create_tables(<<~SQL)
- CREATE TABLE _test_ci_jobs_metadata (
- id serial NOT NULL,
- partition_id int NOT NULL DEFAULT 10,
- name text,
- PRIMARY KEY (id, partition_id)
- ) PARTITION BY LIST(partition_id);
-
- CREATE TABLE _test_ci_jobs_metadata_1
- PARTITION OF _test_ci_jobs_metadata
- FOR VALUES IN (10);
- SQL
- end
-
- let(:model) do
- Class.new(Ci::ApplicationRecord) do
- include Ci::Partitionable::PartitionedFilter
-
- self.primary_key = :id
- self.table_name = :_test_ci_jobs_metadata
-
- def self.name
- 'TestCiJobMetadata'
- end
- end
- end
-
- let!(:record) { model.create! }
-
- let(:where_filter) do
- /WHERE "_test_ci_jobs_metadata"."id" = #{record.id} AND "_test_ci_jobs_metadata"."partition_id" = 10/
- end
-
- describe '#save' do
- it 'uses id and partition_id' do
- record.name = 'test'
- recorder = ActiveRecord::QueryRecorder.new { record.save! }
-
- expect(recorder.log).to include(where_filter)
- expect(record.name).to eq('test')
- end
- end
-
- describe '#update' do
- it 'uses id and partition_id' do
- recorder = ActiveRecord::QueryRecorder.new { record.update!(name: 'test') }
-
- expect(recorder.log).to include(where_filter)
- expect(record.name).to eq('test')
- end
- end
-
- describe '#delete' do
- it 'uses id and partition_id' do
- recorder = ActiveRecord::QueryRecorder.new { record.delete }
-
- expect(recorder.log).to include(where_filter)
- expect(model.count).to be_zero
- end
- end
-
- describe '#destroy' do
- it 'uses id and partition_id' do
- recorder = ActiveRecord::QueryRecorder.new { record.destroy! }
-
- expect(recorder.log).to include(where_filter)
- expect(model.count).to be_zero
- end
- end
-
- def create_tables(table_sql)
- Ci::ApplicationRecord.connection.execute(table_sql)
- end
-end
diff --git a/spec/models/concerns/ci/partitionable_spec.rb b/spec/models/concerns/ci/partitionable_spec.rb
index 430ef57d493..5100f20ed25 100644
--- a/spec/models/concerns/ci/partitionable_spec.rb
+++ b/spec/models/concerns/ci/partitionable_spec.rb
@@ -52,16 +52,16 @@ RSpec.describe Ci::Partitionable do
context 'when partitioned is true' do
let(:partitioned) { true }
- it { expect(ci_model.ancestors).to include(described_class::PartitionedFilter) }
- it { expect(ci_model).to be_partitioned }
+ it { expect(ci_model.ancestors).to include(PartitionedTable) }
+ it { expect(ci_model.partitioning_strategy).to be_a(Gitlab::Database::Partitioning::CiSlidingListStrategy) }
+ it { expect(ci_model.partitioning_strategy.partitioning_key).to eq(:partition_id) }
end
context 'when partitioned is false' do
let(:partitioned) { false }
- it { expect(ci_model.ancestors).not_to include(described_class::PartitionedFilter) }
-
- it { expect(ci_model).not_to be_partitioned }
+ it { expect(ci_model.ancestors).not_to include(PartitionedTable) }
+ it { expect(ci_model).not_to respond_to(:partitioning_strategy) }
end
end
end
diff --git a/spec/models/concerns/each_batch_spec.rb b/spec/models/concerns/each_batch_spec.rb
index 2c75d4d5c41..75c5cac899b 100644
--- a/spec/models/concerns/each_batch_spec.rb
+++ b/spec/models/concerns/each_batch_spec.rb
@@ -171,4 +171,36 @@ RSpec.describe EachBatch do
end
end
end
+
+ describe '.each_batch_count' do
+ let_it_be(:users) { create_list(:user, 5, updated_at: 1.day.ago) }
+
+ it 'counts the records' do
+ count, last_value = User.each_batch_count
+
+ expect(count).to eq(5)
+ expect(last_value).to eq(nil)
+ end
+
+ context 'when using a different column' do
+ it 'returns correct count' do
+ count, _ = User.each_batch_count(column: :email, of: 2)
+
+ expect(count).to eq(5)
+ end
+ end
+
+ context 'when stopping and resuming the counting' do
+ it 'returns the correct count' do
+ count, last_value = User.each_batch_count(of: 1) do |current_count, _current_value|
+ current_count == 3 # stop when count reaches 3
+ end
+
+ expect(count).to eq(3)
+
+ final_count, _ = User.each_batch_count(of: 1, last_value: last_value, last_count: count)
+ expect(final_count).to eq(5)
+ end
+ end
+ end
end
diff --git a/spec/models/concerns/has_user_type_spec.rb b/spec/models/concerns/has_user_type_spec.rb
index bd128112113..03d2c267098 100644
--- a/spec/models/concerns/has_user_type_spec.rb
+++ b/spec/models/concerns/has_user_type_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe User do
+RSpec.describe User, feature_category: :system_access do
specify 'types consistency checks', :aggregate_failures do
expect(described_class::USER_TYPES.keys)
.to match_array(%w[human ghost alert_bot project_bot support_bot service_user security_bot visual_review_bot
- migration_bot automation_bot admin_bot suggested_reviewers_bot])
+ migration_bot automation_bot security_policy_bot admin_bot suggested_reviewers_bot service_account])
expect(described_class::USER_TYPES).to include(*described_class::BOT_USER_TYPES)
expect(described_class::USER_TYPES).to include(*described_class::NON_INTERNAL_USER_TYPES)
expect(described_class::USER_TYPES).to include(*described_class::INTERNAL_USER_TYPES)
diff --git a/spec/models/concerns/redis_cacheable_spec.rb b/spec/models/concerns/redis_cacheable_spec.rb
index c270f23defb..bf277b094ea 100644
--- a/spec/models/concerns/redis_cacheable_spec.rb
+++ b/spec/models/concerns/redis_cacheable_spec.rb
@@ -50,6 +50,58 @@ RSpec.describe RedisCacheable do
subject
end
+
+ context 'with existing cached attributes' do
+ before do
+ instance.cache_attributes({ existing_attr: 'value' })
+ end
+
+ it 'sets the cache attributes' do
+ Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:set).with(cache_key, payload.to_json, anything).and_call_original
+ end
+
+ expect { subject }.to change { instance.cached_attribute(:existing_attr) }.from('value').to(nil)
+ end
+ end
+ end
+
+ describe '#merge_cache_attributes' do
+ subject { instance.merge_cache_attributes(payload) }
+
+ let(:existing_attributes) { { existing_attr: 'value', name: 'value' } }
+
+ before do
+ instance.cache_attributes(existing_attributes)
+ end
+
+ context 'with different attribute values' do
+ let(:payload) { { name: 'new_value' } }
+
+ it 'merges the cache attributes with existing values' do
+ Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:set).with(cache_key, existing_attributes.merge(payload).to_json, anything)
+ .and_call_original
+ end
+
+ subject
+
+ expect(instance.cached_attribute(:existing_attr)).to eq 'value'
+ expect(instance.cached_attribute(:name)).to eq 'new_value'
+ end
+ end
+
+ context 'with no new or changed attribute values' do
+ let(:payload) { { name: 'value' } }
+
+ it 'does not try to set Redis key' do
+ Gitlab::Redis::Cache.with do |redis|
+ expect(redis).not_to receive(:set)
+ end
+
+ subject
+ end
+ end
end
describe '#cached_attr_reader', :clean_gitlab_redis_cache do
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index dc1002f3560..0bbe3dea812 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -74,6 +74,17 @@ RSpec.shared_examples 'routable resource with parent' do
describe '#full_name' do
it { expect(record.full_name).to eq "#{record.parent.human_name} / #{record.name}" }
+ context 'without route name' do
+ before do
+ stub_feature_flags(cached_route_lookups: true)
+ record.route.update_attribute(:name, nil)
+ end
+
+ it 'builds full name' do
+ expect(record.full_name).to eq("#{record.parent.human_name} / #{record.name}")
+ end
+ end
+
it 'hits the cache when not preloaded' do
forcibly_hit_cached_lookup(record, :full_name)
diff --git a/spec/models/concerns/subscribable_spec.rb b/spec/models/concerns/subscribable_spec.rb
index a60a0a5e26d..80060802de8 100644
--- a/spec/models/concerns/subscribable_spec.rb
+++ b/spec/models/concerns/subscribable_spec.rb
@@ -215,5 +215,31 @@ RSpec.describe Subscribable, 'Subscribable' do
expect(lazy_queries.count).to eq(0)
expect(preloaded_queries.count).to eq(1)
end
+
+ context 'with work items' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:work_item) { create(:work_item, :task, project: project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ before do
+ [issue, work_item].each do |item|
+ create(:subscription, user: user, subscribable: item, subscribed: true, project: project)
+ end
+ end
+
+ it 'loads correct subscribable type' do
+ expect(issue).to receive(:subscribable_type).and_return('Issue')
+ issue.lazy_subscription(user, project)
+
+ expect(work_item).to receive(:subscribable_type).and_return('Issue')
+ work_item.lazy_subscription(user, project)
+ end
+
+ it 'matches existing subscription type' do
+ expect(issue.subscribed?(user, project)).to eq(true)
+ expect(work_item.subscribed?(user, project)).to eq(true)
+ end
+ end
end
end
diff --git a/spec/models/concerns/token_authenticatable_strategies/base_spec.rb b/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
index 89ddc797a9d..2679bd7d93b 100644
--- a/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
+++ b/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe TokenAuthenticatableStrategies::Base do
+RSpec.describe TokenAuthenticatableStrategies::Base, feature_category: :system_access do
let(:instance) { double(:instance) }
let(:field) { double(:field) }
@@ -24,6 +24,41 @@ RSpec.describe TokenAuthenticatableStrategies::Base do
end
end
+ describe '#format_token' do
+ let(:strategy) { described_class.new(instance, field, options) }
+
+ let(:instance) { build(:ci_build, name: 'build_name_for_format_option', partition_id: partition_id) }
+ let(:partition_id) { 100 }
+ let(:field) { 'token' }
+ let(:options) { {} }
+
+ let(:token) { 'a_secret_token' }
+
+ it 'returns the origin token' do
+ expect(strategy.format_token(instance, token)).to eq(token)
+ end
+
+ context 'when format_with_prefix option is provided' do
+ context 'with symbol' do
+ let(:options) { { format_with_prefix: :partition_id_prefix_in_16_bit_encode } }
+ let(:partition_id_in_16_bit_encode_with_underscore) { "#{partition_id.to_s(16)}_" }
+ let(:formatted_token) { "#{partition_id_in_16_bit_encode_with_underscore}#{token}" }
+
+ it 'returns a formatted token from the format_with_prefix option' do
+ expect(strategy.format_token(instance, token)).to eq(formatted_token)
+ end
+ end
+
+ context 'with something else' do
+ let(:options) { { format_with_prefix: false } }
+
+ it 'raise not implemented' do
+ expect { strategy.format_token(instance, token) }.to raise_error(NotImplementedError)
+ end
+ end
+ end
+ end
+
describe '.fabricate' do
context 'when digest stragegy is specified' do
it 'fabricates digest strategy object' do
diff --git a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
index 2df86804f34..3bdb0647d62 100644
--- a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
+++ b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
@@ -2,16 +2,20 @@
require 'spec_helper'
-RSpec.describe TokenAuthenticatableStrategies::Encrypted do
+RSpec.describe TokenAuthenticatableStrategies::Encrypted, feature_category: :system_access do
let(:model) { double(:model) }
let(:instance) { double(:instance) }
+ let(:original_token) { 'my-value' }
+ let(:resource) { double(:resource) }
+ let(:options) { other_options.merge(encrypted: encrypted_option) }
+ let(:other_options) { {} }
let(:encrypted) do
- TokenAuthenticatableStrategies::EncryptionHelper.encrypt_token('my-value')
+ TokenAuthenticatableStrategies::EncryptionHelper.encrypt_token(original_token)
end
let(:encrypted_with_static_iv) do
- Gitlab::CryptoHelper.aes256_gcm_encrypt('my-value')
+ Gitlab::CryptoHelper.aes256_gcm_encrypt(original_token)
end
subject(:strategy) do
@@ -19,7 +23,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
describe '#token_fields' do
- let(:options) { { encrypted: :required } }
+ let(:encrypted_option) { :required }
it 'includes the encrypted field' do
expect(strategy.token_fields).to contain_exactly('some_field', 'some_field_encrypted')
@@ -27,50 +31,70 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
describe '#find_token_authenticatable' do
+ shared_examples 'finds the resource' do
+ it 'finds the resource by cleartext' do
+ expect(subject.find_token_authenticatable(original_token))
+ .to eq(resource)
+ end
+ end
+
+ shared_examples 'does not find any resource' do
+ it 'does not find any resource by cleartext' do
+ expect(subject.find_token_authenticatable(original_token))
+ .to be_nil
+ end
+ end
+
+ shared_examples 'finds the resource with/without setting require_prefix_for_validation' do
+ let(:standard_runner_token_prefix) { 'GR1348941' }
+ it_behaves_like 'finds the resource'
+
+ context 'when a require_prefix_for_validation is provided' do
+ let(:other_options) { { format_with_prefix: :format_with_prefix_method, require_prefix_for_validation: true } }
+
+ before do
+ allow(resource).to receive(:format_with_prefix_method).and_return(standard_runner_token_prefix)
+ end
+
+ it_behaves_like 'does not find any resource'
+
+ context 'when token starts with prefix' do
+ let(:original_token) { "#{standard_runner_token_prefix}plain_token" }
+
+ it_behaves_like 'finds the resource'
+ end
+ end
+ end
+
context 'when encryption is required' do
- let(:options) { { encrypted: :required } }
+ let(:encrypted_option) { :required }
+ let(:resource) { double(:encrypted_resource) }
- it 'finds the encrypted resource by cleartext' do
+ before do
allow(model).to receive(:where)
.and_return(model)
allow(model).to receive(:find_by)
.with('some_field_encrypted' => [encrypted, encrypted_with_static_iv])
- .and_return('encrypted resource')
-
- expect(subject.find_token_authenticatable('my-value'))
- .to eq 'encrypted resource'
+ .and_return(resource)
end
- context 'when a prefix is required' do
- let(:options) { { encrypted: :required, prefix: 'GR1348941' } }
-
- it 'finds the encrypted resource by cleartext' do
- allow(model).to receive(:where)
- .and_return(model)
- allow(model).to receive(:find_by)
- .with('some_field_encrypted' => [encrypted, encrypted_with_static_iv])
- .and_return('encrypted resource')
-
- expect(subject.find_token_authenticatable('my-value'))
- .to be_nil
- end
- end
+ it_behaves_like 'finds the resource with/without setting require_prefix_for_validation'
end
context 'when encryption is optional' do
- let(:options) { { encrypted: :optional } }
+ let(:encrypted_option) { :optional }
+ let(:resource) { double(:encrypted_resource) }
- it 'finds the encrypted resource by cleartext' do
+ before do
allow(model).to receive(:where)
.and_return(model)
allow(model).to receive(:find_by)
.with('some_field_encrypted' => [encrypted, encrypted_with_static_iv])
- .and_return('encrypted resource')
-
- expect(subject.find_token_authenticatable('my-value'))
- .to eq 'encrypted resource'
+ .and_return(resource)
end
+ it_behaves_like 'finds the resource with/without setting require_prefix_for_validation'
+
it 'uses insecure strategy when encrypted token cannot be found' do
allow(subject.send(:insecure_strategy))
.to receive(:find_token_authenticatable)
@@ -85,68 +109,27 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
expect(subject.find_token_authenticatable('my-value'))
.to eq 'plaintext resource'
end
-
- context 'when a prefix is required' do
- let(:options) { { encrypted: :optional, prefix: 'GR1348941' } }
-
- it 'finds the encrypted resource by cleartext' do
- allow(model).to receive(:where)
- .and_return(model)
- allow(model).to receive(:find_by)
- .with('some_field_encrypted' => [encrypted, encrypted_with_static_iv])
- .and_return('encrypted resource')
-
- expect(subject.find_token_authenticatable('my-value'))
- .to be_nil
- end
- end
end
context 'when encryption is migrating' do
- let(:options) { { encrypted: :migrating } }
+ let(:encrypted_option) { :migrating }
+ let(:resource) { double(:cleartext_resource) }
- it 'finds the cleartext resource by cleartext' do
+ before do
allow(model).to receive(:where)
.and_return(model)
allow(model).to receive(:find_by)
- .with('some_field' => 'my-value')
- .and_return('cleartext resource')
-
- expect(subject.find_token_authenticatable('my-value'))
- .to eq 'cleartext resource'
+ .with('some_field' => original_token)
+ .and_return(resource)
end
- it 'returns nil if resource cannot be found' do
- allow(model).to receive(:where)
- .and_return(model)
- allow(model).to receive(:find_by)
- .with('some_field' => 'my-value')
- .and_return(nil)
-
- expect(subject.find_token_authenticatable('my-value'))
- .to be_nil
- end
-
- context 'when a prefix is required' do
- let(:options) { { encrypted: :migrating, prefix: 'GR1348941' } }
-
- it 'finds the encrypted resource by cleartext' do
- allow(model).to receive(:where)
- .and_return(model)
- allow(model).to receive(:find_by)
- .with('some_field' => 'my-value')
- .and_return('cleartext resource')
-
- expect(subject.find_token_authenticatable('my-value'))
- .to be_nil
- end
- end
+ it_behaves_like 'finds the resource with/without setting require_prefix_for_validation'
end
end
describe '#get_token' do
context 'when encryption is required' do
- let(:options) { { encrypted: :required } }
+ let(:encrypted_option) { :required }
it 'returns decrypted token when an encrypted with static iv token is present' do
allow(instance).to receive(:read_attribute)
@@ -166,7 +149,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
context 'when encryption is optional' do
- let(:options) { { encrypted: :optional } }
+ let(:encrypted_option) { :optional }
it 'returns decrypted token when an encrypted token is present' do
allow(instance).to receive(:read_attribute)
@@ -198,7 +181,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
context 'when encryption is migrating' do
- let(:options) { { encrypted: :migrating } }
+ let(:encrypted_option) { :migrating }
it 'returns cleartext token when an encrypted token is present' do
allow(instance).to receive(:read_attribute)
@@ -228,7 +211,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
describe '#set_token' do
context 'when encryption is required' do
- let(:options) { { encrypted: :required } }
+ let(:encrypted_option) { :required }
it 'writes encrypted token and returns it' do
expect(instance).to receive(:[]=)
@@ -239,7 +222,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
context 'when encryption is optional' do
- let(:options) { { encrypted: :optional } }
+ let(:encrypted_option) { :optional }
it 'writes encrypted token and removes plaintext token and returns it' do
expect(instance).to receive(:[]=)
@@ -252,7 +235,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
context 'when encryption is migrating' do
- let(:options) { { encrypted: :migrating } }
+ let(:encrypted_option) { :migrating }
it 'writes encrypted token and writes plaintext token' do
expect(instance).to receive(:[]=)
diff --git a/spec/models/concerns/token_authenticatable_strategies/encryption_helper_spec.rb b/spec/models/concerns/token_authenticatable_strategies/encryption_helper_spec.rb
index 671e51e3913..004298bbdd9 100644
--- a/spec/models/concerns/token_authenticatable_strategies/encryption_helper_spec.rb
+++ b/spec/models/concerns/token_authenticatable_strategies/encryption_helper_spec.rb
@@ -6,96 +6,26 @@ RSpec.describe TokenAuthenticatableStrategies::EncryptionHelper do
let(:encrypted_token) { described_class.encrypt_token('my-value-my-value-my-value') }
describe '.encrypt_token' do
- context 'when dynamic_nonce feature flag is switched on' do
- it 'adds nonce identifier on the beginning' do
- expect(encrypted_token.first).to eq(described_class::DYNAMIC_NONCE_IDENTIFIER)
- end
-
- it 'adds nonce at the end' do
- nonce = encrypted_token.last(described_class::NONCE_SIZE)
-
- expect(nonce).to eq(::Digest::SHA256.hexdigest('my-value-my-value-my-value').bytes.take(described_class::NONCE_SIZE).pack('c*'))
- end
-
- it 'encrypts token' do
- expect(encrypted_token[1...-described_class::NONCE_SIZE]).not_to eq('my-value-my-value-my-value')
- end
+ it 'adds nonce identifier on the beginning' do
+ expect(encrypted_token.first).to eq(described_class::DYNAMIC_NONCE_IDENTIFIER)
end
- context 'when dynamic_nonce feature flag is switched off' do
- before do
- stub_feature_flags(dynamic_nonce: false)
- end
-
- it 'does not add nonce identifier on the beginning' do
- expect(encrypted_token.first).not_to eq(described_class::DYNAMIC_NONCE_IDENTIFIER)
- end
-
- it 'does not add nonce in the end' do
- nonce = encrypted_token.last(described_class::NONCE_SIZE)
-
- expect(nonce).not_to eq(::Digest::SHA256.hexdigest('my-value-my-value-my-value').bytes.take(described_class::NONCE_SIZE).pack('c*'))
- end
+ it 'adds nonce at the end' do
+ nonce = encrypted_token.last(described_class::NONCE_SIZE)
- it 'encrypts token with static iv' do
- token = Gitlab::CryptoHelper.aes256_gcm_encrypt('my-value-my-value-my-value')
+ expect(nonce).to eq(::Digest::SHA256.hexdigest('my-value-my-value-my-value').bytes.take(described_class::NONCE_SIZE).pack('c*'))
+ end
- expect(encrypted_token).to eq(token)
- end
+ it 'encrypts token' do
+ expect(encrypted_token[1...-described_class::NONCE_SIZE]).not_to eq('my-value-my-value-my-value')
end
end
describe '.decrypt_token' do
- context 'with feature flag switched off' do
- before do
- stub_feature_flags(dynamic_nonce: false)
- end
-
- it 'decrypts token with static iv' do
- encrypted_token = described_class.encrypt_token('my-value')
-
- expect(described_class.decrypt_token(encrypted_token)).to eq('my-value')
- end
-
- it 'decrypts token if feature flag changed after encryption' do
- encrypted_token = described_class.encrypt_token('my-value')
-
- expect(encrypted_token).not_to eq('my-value')
-
- stub_feature_flags(dynamic_nonce: true)
-
- expect(described_class.decrypt_token(encrypted_token)).to eq('my-value')
- end
-
- it 'decrypts token with dynamic iv' do
- iv = ::Digest::SHA256.hexdigest('my-value').bytes.take(described_class::NONCE_SIZE).pack('c*')
- token = Gitlab::CryptoHelper.aes256_gcm_encrypt('my-value', nonce: iv)
- encrypted_token = "#{described_class::DYNAMIC_NONCE_IDENTIFIER}#{token}#{iv}"
-
- expect(described_class.decrypt_token(encrypted_token)).to eq('my-value')
- end
- end
-
- context 'with feature flag switched on' do
- before do
- stub_feature_flags(dynamic_nonce: true)
- end
-
- it 'decrypts token with dynamic iv' do
- encrypted_token = described_class.encrypt_token('my-value')
-
- expect(described_class.decrypt_token(encrypted_token)).to eq('my-value')
- end
-
- it 'decrypts token if feature flag changed after encryption' do
- encrypted_token = described_class.encrypt_token('my-value')
-
- expect(encrypted_token).not_to eq('my-value')
-
- stub_feature_flags(dynamic_nonce: false)
+ it 'decrypts token with dynamic iv' do
+ encrypted_token = described_class.encrypt_token('my-value')
- expect(described_class.decrypt_token(encrypted_token)).to eq('my-value')
- end
+ expect(described_class.decrypt_token(encrypted_token)).to eq('my-value')
end
end
end
diff --git a/spec/models/concerns/uniquify_spec.rb b/spec/models/concerns/uniquify_spec.rb
deleted file mode 100644
index 9b79e4d4154..00000000000
--- a/spec/models/concerns/uniquify_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Uniquify do
- let(:uniquify) { described_class.new }
-
- describe "#string" do
- it 'returns the given string if it does not exist' do
- result = uniquify.string('test_string') { |s| false }
-
- expect(result).to eq('test_string')
- end
-
- it 'returns the given string with a counter attached if the string exists' do
- result = uniquify.string('test_string') { |s| s == 'test_string' }
-
- expect(result).to eq('test_string1')
- end
-
- it 'increments the counter for each candidate string that also exists' do
- result = uniquify.string('test_string') { |s| s == 'test_string' || s == 'test_string1' }
-
- expect(result).to eq('test_string2')
- end
-
- it 'allows to pass an initial value for the counter' do
- start_counting_from = 2
- uniquify = described_class.new(start_counting_from)
-
- result = uniquify.string('test_string') { |s| s == 'test_string' }
-
- expect(result).to eq('test_string2')
- end
-
- it 'allows passing in a base function that defines the location of the counter' do
- result = uniquify.string(-> (counter) { "test_#{counter}_string" }) do |s|
- s == 'test__string'
- end
-
- expect(result).to eq('test_1_string')
- end
- end
-end
diff --git a/spec/models/concerns/web_hooks/has_web_hooks_spec.rb b/spec/models/concerns/web_hooks/has_web_hooks_spec.rb
new file mode 100644
index 00000000000..afb2406a969
--- /dev/null
+++ b/spec/models/concerns/web_hooks/has_web_hooks_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebHooks::HasWebHooks, feature_category: :integrations do
+ let(:minimal_test_class) do
+ Class.new do
+ include WebHooks::HasWebHooks
+
+ def id
+ 1
+ end
+ end
+ end
+
+ before do
+ stub_const('MinimalTestClass', minimal_test_class)
+ end
+
+ describe '#last_failure_redis_key' do
+ subject { MinimalTestClass.new.last_failure_redis_key }
+
+ it { is_expected.to eq('web_hooks:last_failure:minimal_test_class-1') }
+ end
+
+ describe 'last_webhook_failure', :clean_gitlab_redis_shared_state do
+ subject { MinimalTestClass.new.last_webhook_failure }
+
+ it { is_expected.to eq(nil) }
+
+ context 'when there was an older failure', :clean_gitlab_redis_shared_state do
+ let(:last_failure_date) { 1.month.ago.iso8601 }
+
+ before do
+ Gitlab::Redis::SharedState.with { |r| r.set('web_hooks:last_failure:minimal_test_class-1', last_failure_date) }
+ end
+
+ it { is_expected.to eq(last_failure_date) }
+ end
+ end
+end
diff --git a/spec/models/container_registry/data_repair_detail_spec.rb b/spec/models/container_registry/data_repair_detail_spec.rb
new file mode 100644
index 00000000000..92833553a1e
--- /dev/null
+++ b/spec/models/container_registry/data_repair_detail_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ContainerRegistry::DataRepairDetail, type: :model, feature_category: :container_registry do
+ let_it_be(:project) { create(:project) }
+
+ subject { described_class.new(project: project) }
+
+ it { is_expected.to belong_to(:project).required }
+end
diff --git a/spec/models/container_registry/event_spec.rb b/spec/models/container_registry/event_spec.rb
index 07ac35f7b6a..edec5afaddb 100644
--- a/spec/models/container_registry/event_spec.rb
+++ b/spec/models/container_registry/event_spec.rb
@@ -225,6 +225,28 @@ RSpec.describe ContainerRegistry::Event do
end
end
+ context 'when it is a manifest delete event' do
+ let(:raw_event) { { 'action' => 'delete', 'target' => { 'digest' => 'x' }, 'actor' => {} } }
+
+ it 'calls the ContainerRegistryEventCounter' do
+ expect(::Gitlab::UsageDataCounters::ContainerRegistryEventCounter)
+ .to receive(:count).with('i_container_registry_delete_manifest')
+
+ subject
+ end
+ end
+
+ context 'when it is not a manifest delete event' do
+ let(:raw_event) { { 'action' => 'push', 'target' => { 'digest' => 'x' }, 'actor' => {} } }
+
+ it 'does not call the ContainerRegistryEventCounter' do
+ expect(::Gitlab::UsageDataCounters::ContainerRegistryEventCounter)
+ .not_to receive(:count).with('i_container_registry_delete_manifest')
+
+ subject
+ end
+ end
+
context 'without an actor name' do
let(:raw_event) { { 'action' => 'push', 'target' => {}, 'actor' => { 'user_type' => 'personal_access_token' } } }
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index da7b54644bd..e028a82ff76 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -528,6 +528,10 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
describe '#each_tags_page' do
let(:page_size) { 100 }
+ before do
+ allow(repository).to receive(:migrated?).and_return(true)
+ end
+
shared_examples 'iterating through a page' do |expected_tags: true|
it 'iterates through one page' do
expect(repository.gitlab_api_client).to receive(:tags)
@@ -660,7 +664,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
context 'calling on a non migrated repository' do
before do
- repository.update!(created_at: described_class::MIGRATION_PHASE_1_ENDED_AT - 3.days)
+ allow(repository).to receive(:migrated?).and_return(false)
end
it 'raises an Argument error' do
@@ -795,6 +799,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
freeze_time do
expect { subject }
.to change { repository.expiration_policy_started_at }.from(nil).to(Time.zone.now)
+ .and change { repository.expiration_policy_cleanup_status }.from('cleanup_unscheduled').to('cleanup_ongoing')
.and change { repository.last_cleanup_deleted_tags_count }.from(10).to(nil)
end
end
@@ -1236,6 +1241,8 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
end
before do
+ allow(group).to receive(:first_project_with_container_registry_tags).and_return(nil)
+
group.parent = test_group
group.save!
end
@@ -1561,22 +1568,20 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
describe '#migrated?' do
subject { repository.migrated? }
- it { is_expected.to eq(true) }
-
- context 'with a created_at older than phase 1 ends' do
+ context 'on gitlab.com' do
before do
- repository.update!(created_at: described_class::MIGRATION_PHASE_1_ENDED_AT - 3.days)
+ allow(::Gitlab).to receive(:com?).and_return(true)
end
- it { is_expected.to eq(false) }
-
- context 'with migration state set to import_done' do
- before do
- repository.update!(migration_state: 'import_done')
- end
+ it { is_expected.to eq(true) }
+ end
- it { is_expected.to eq(true) }
+ context 'not on gitlab.com' do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(false)
end
+
+ it { is_expected.to eq(false) }
end
end
diff --git a/spec/models/design_management/design_spec.rb b/spec/models/design_management/design_spec.rb
index 57e0d1cad8b..cb122fa09fc 100644
--- a/spec/models/design_management/design_spec.rb
+++ b/spec/models/design_management/design_spec.rb
@@ -55,6 +55,7 @@ RSpec.describe DesignManagement::Design, feature_category: :design_management do
it { is_expected.to validate_presence_of(:issue) }
it { is_expected.to validate_presence_of(:filename) }
it { is_expected.to validate_length_of(:filename).is_at_most(255) }
+ it { is_expected.to validate_length_of(:description).is_at_most(Gitlab::Database::MAX_TEXT_SIZE_LIMIT) }
it { is_expected.to validate_uniqueness_of(:filename).scoped_to(:issue_id) }
it "validates that the extension is an image" do
@@ -512,11 +513,11 @@ RSpec.describe DesignManagement::Design, feature_category: :design_management do
end
describe '#to_reference' do
+ let(:filename) { 'homescreen.jpg' }
let(:namespace) { build(:namespace, id: non_existing_record_id, path: 'sample-namespace') }
let(:project) { build(:project, name: 'sample-project', namespace: namespace) }
- let(:group) { create(:group, name: 'Group', path: 'sample-group') }
+ let(:group) { build(:group, name: 'Group', path: 'sample-group') }
let(:issue) { build(:issue, iid: 1, project: project) }
- let(:filename) { 'homescreen.jpg' }
let(:design) { build(:design, filename: filename, issue: issue, project: project) }
context 'when nil argument' do
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index 26795c0ea7f..34be6ec7fa9 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
+RSpec.describe ErrorTracking::ProjectErrorTrackingSetting, feature_category: :error_tracking do
include ReactiveCachingHelpers
include Gitlab::Routing
@@ -93,9 +93,7 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
end
context 'with sentry backend' do
- before do
- subject.integrated = false
- end
+ subject { build(:project_error_tracking_setting, project: project) }
it 'does not create a new client key' do
expect { subject.save! }.not_to change { ErrorTracking::ClientKey.count }
@@ -302,7 +300,7 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
it { expect(result[:issue].gitlab_commit_path).to eq(nil) }
end
- context 'when repo commit matches first release version' do
+ context 'when repo commit matches first relase version' do
let(:commit) { instance_double(Commit, id: commit_id) }
let(:repository) { instance_double(Repository, commit: commit) }
@@ -341,18 +339,19 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
describe '#update_issue' do
let(:result) { subject.update_issue(**opts) }
- let(:opts) { { issue_id: 1, params: {} } }
+ let(:issue_id) { 1 }
+ let(:opts) { { issue_id: issue_id, params: {} } }
before do
allow(subject).to receive(:sentry_client).and_return(sentry_client)
allow(sentry_client).to receive(:issue_details)
- .with({ issue_id: 1 })
+ .with({ issue_id: issue_id })
.and_return(Gitlab::ErrorTracking::DetailedError.new(project_id: sentry_project_id))
end
context 'when sentry response is successful' do
before do
- allow(sentry_client).to receive(:update_issue).with(opts).and_return(true)
+ allow(sentry_client).to receive(:update_issue).with(**opts).and_return(true)
end
it 'returns the successful response' do
@@ -362,7 +361,7 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
context 'when sentry raises an error' do
before do
- allow(sentry_client).to receive(:update_issue).with(opts).and_raise(StandardError)
+ allow(sentry_client).to receive(:update_issue).with(**opts).and_raise(StandardError)
end
it 'returns the successful response' do
@@ -391,7 +390,7 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
setting.update!(sentry_project_id: nil)
allow(sentry_client).to receive(:projects).and_return(sentry_projects)
- allow(sentry_client).to receive(:update_issue).with(opts).and_return(true)
+ allow(sentry_client).to receive(:update_issue).with(**opts).and_return(true)
end
it 'tries to backfill it from sentry API' do
@@ -419,6 +418,25 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
end
end
end
+
+ describe 'passing parameters to sentry client' do
+ include SentryClientHelpers
+
+ let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0' }
+ let(:sentry_request_url) { "#{sentry_url}/issues/#{issue_id}/" }
+ let(:token) { 'test-token' }
+ let(:sentry_client) { ErrorTracking::SentryClient.new(sentry_url, token) }
+
+ before do
+ stub_sentry_request(sentry_request_url, :put, body: true)
+
+ allow(sentry_client).to receive(:update_issue).and_call_original
+ end
+
+ it 'returns the successful response' do
+ expect(result).to eq(updated: true)
+ end
+ end
end
describe 'slugs' do
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 0a05c558d45..3134f2ba248 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -40,7 +40,19 @@ RSpec.describe Group, feature_category: :subgroups do
it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::GroupDistribution').dependent(:destroy) }
it { is_expected.to have_many(:daily_build_group_report_results).class_name('Ci::DailyBuildGroupReportResult') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout').with_foreign_key(:group_id) }
+
+ it do
+ is_expected.to have_many(:application_setting)
+ .with_foreign_key(:instance_administrators_group_id).inverse_of(:instance_group)
+ end
+
it { is_expected.to have_many(:bulk_import_exports).class_name('BulkImports::Export') }
+
+ it do
+ is_expected.to have_many(:bulk_import_entities).class_name('BulkImports::Entity')
+ .with_foreign_key(:namespace_id).inverse_of(:group)
+ end
+
it { is_expected.to have_many(:contacts).class_name('CustomerRelations::Contact') }
it { is_expected.to have_many(:organizations).class_name('CustomerRelations::Organization') }
it { is_expected.to have_many(:protected_branches).inverse_of(:group).with_foreign_key(:namespace_id) }
@@ -448,6 +460,8 @@ RSpec.describe Group, feature_category: :subgroups do
it_behaves_like 'a BulkUsersByEmailLoad model'
+ it_behaves_like 'ensures runners_token is prefixed', :group
+
context 'after initialized' do
it 'has a group_feature' do
expect(described_class.new.group_feature).to be_present
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 72958a54e10..8fd76d2a835 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -318,7 +318,7 @@ RSpec.describe WebHook, feature_category: :integrations do
end
it 'is 10 minutes' do
- expect(hook.next_backoff).to eq(described_class::INITIAL_BACKOFF)
+ expect(hook.next_backoff).to eq(WebHooks::AutoDisabling::INITIAL_BACKOFF)
end
end
@@ -328,7 +328,7 @@ RSpec.describe WebHook, feature_category: :integrations do
end
it 'is twice the initial value' do
- expect(hook.next_backoff).to eq(2 * described_class::INITIAL_BACKOFF)
+ expect(hook.next_backoff).to eq(2 * WebHooks::AutoDisabling::INITIAL_BACKOFF)
end
end
@@ -338,7 +338,7 @@ RSpec.describe WebHook, feature_category: :integrations do
end
it 'grows exponentially' do
- expect(hook.next_backoff).to eq(2 * 2 * 2 * described_class::INITIAL_BACKOFF)
+ expect(hook.next_backoff).to eq(2 * 2 * 2 * WebHooks::AutoDisabling::INITIAL_BACKOFF)
end
end
@@ -348,7 +348,7 @@ RSpec.describe WebHook, feature_category: :integrations do
end
it 'does not exceed the max backoff value' do
- expect(hook.next_backoff).to eq(described_class::MAX_BACKOFF)
+ expect(hook.next_backoff).to eq(WebHooks::AutoDisabling::MAX_BACKOFF)
end
end
end
@@ -470,13 +470,13 @@ RSpec.describe WebHook, feature_category: :integrations do
end
it 'reduces to MAX_FAILURES' do
- expect { hook.backoff! }.to change(hook, :recent_failures).to(described_class::MAX_FAILURES)
+ expect { hook.backoff! }.to change(hook, :recent_failures).to(WebHooks::AutoDisabling::MAX_FAILURES)
end
end
context 'when the recent failure value is MAX_FAILURES' do
before do
- hook.update!(recent_failures: described_class::MAX_FAILURES, disabled_until: 1.hour.ago)
+ hook.update!(recent_failures: WebHooks::AutoDisabling::MAX_FAILURES, disabled_until: 1.hour.ago)
end
it 'does not change recent_failures' do
@@ -486,7 +486,7 @@ RSpec.describe WebHook, feature_category: :integrations do
context 'when we have exhausted the grace period' do
before do
- hook.update!(recent_failures: described_class::FAILURE_THRESHOLD)
+ hook.update!(recent_failures: WebHooks::AutoDisabling::FAILURE_THRESHOLD)
end
it 'sets disabled_until to the next backoff' do
@@ -499,8 +499,8 @@ RSpec.describe WebHook, feature_category: :integrations do
context 'when we have backed off MAX_FAILURES times' do
before do
- stub_const("#{described_class}::MAX_FAILURES", 5)
- (described_class::FAILURE_THRESHOLD + 5).times { hook.backoff! }
+ stub_const("WebHooks::AutoDisabling::MAX_FAILURES", 5)
+ (WebHooks::AutoDisabling::FAILURE_THRESHOLD + 5).times { hook.backoff! }
end
it 'does not let the backoff count exceed the maximum failure count' do
@@ -516,7 +516,7 @@ RSpec.describe WebHook, feature_category: :integrations do
it 'changes disabled_until when it has elapsed', :skip_freeze_time do
travel_to(hook.disabled_until + 1.minute) do
expect { hook.backoff! }.to change { hook.disabled_until }
- expect(hook.backoff_count).to eq(described_class::MAX_FAILURES)
+ expect(hook.backoff_count).to eq(WebHooks::AutoDisabling::MAX_FAILURES)
end
end
end
@@ -539,7 +539,7 @@ RSpec.describe WebHook, feature_category: :integrations do
end
it 'does not update the hook if the the failure count exceeds the maximum value' do
- hook.recent_failures = described_class::MAX_FAILURES
+ hook.recent_failures = WebHooks::AutoDisabling::MAX_FAILURES
sql_count = ActiveRecord::QueryRecorder.new { hook.failed! }.count
diff --git a/spec/models/import_export_upload_spec.rb b/spec/models/import_export_upload_spec.rb
index e13f504b82a..9811dbf60e3 100644
--- a/spec/models/import_export_upload_spec.rb
+++ b/spec/models/import_export_upload_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe ImportExportUpload do
let(:after_commit_callbacks) { described_class._commit_callbacks.select { |cb| cb.kind == :after } }
def find_callback(callbacks, key)
- callbacks.find { |cb| cb.instance_variable_get(:@key) == key }
+ callbacks.find { |cb| cb.filter == key }
end
it 'export file is stored in after_commit callback' do
diff --git a/spec/models/import_failure_spec.rb b/spec/models/import_failure_spec.rb
index 9fee1b0ae7b..0bdcb6dde31 100644
--- a/spec/models/import_failure_spec.rb
+++ b/spec/models/import_failure_spec.rb
@@ -10,7 +10,11 @@ RSpec.describe ImportFailure do
let_it_be(:soft_failure) { create(:import_failure, :soft_failure, project: project, correlation_id_value: correlation_id) }
let_it_be(:unrelated_failure) { create(:import_failure, project: project) }
- it 'returns hard failures given a correlation ID' do
+ it 'returns failures for the given correlation ID' do
+ expect(ImportFailure.failures_by_correlation_id(correlation_id)).to match_array([hard_failure, soft_failure])
+ end
+
+ it 'returns hard failures for the given correlation ID' do
expect(ImportFailure.hard_failures_by_correlation_id(correlation_id)).to eq([hard_failure])
end
@@ -45,5 +49,15 @@ RSpec.describe ImportFailure do
it { is_expected.to validate_presence_of(:group) }
end
+
+ describe '#external_identifiers' do
+ it { is_expected.to allow_value({ note_id: 234, noteable_id: 345, noteable_type: 'MergeRequest' }).for(:external_identifiers) }
+ it { is_expected.not_to allow_value(nil).for(:external_identifiers) }
+ it { is_expected.not_to allow_value({ ids: [123] }).for(:external_identifiers) }
+
+ it 'allows up to 3 fields' do
+ is_expected.not_to allow_value({ note_id: 234, noteable_id: 345, noteable_type: 'MergeRequest', extra_attribute: 'abc' }).for(:external_identifiers)
+ end
+ end
end
end
diff --git a/spec/models/integrations/apple_app_store_spec.rb b/spec/models/integrations/apple_app_store_spec.rb
index 1a57f556895..b2a52c8aaf0 100644
--- a/spec/models/integrations/apple_app_store_spec.rb
+++ b/spec/models/integrations/apple_app_store_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Integrations::AppleAppStore, feature_category: :mobile_devops do
it { is_expected.to validate_presence_of :app_store_issuer_id }
it { is_expected.to validate_presence_of :app_store_key_id }
it { is_expected.to validate_presence_of :app_store_private_key }
+ it { is_expected.to validate_presence_of :app_store_private_key_file_name }
it { is_expected.to allow_value('aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee').for(:app_store_issuer_id) }
it { is_expected.not_to allow_value('abcde').for(:app_store_issuer_id) }
it { is_expected.to allow_value(File.read('spec/fixtures/ssl_key.pem')).for(:app_store_private_key) }
@@ -28,8 +29,8 @@ RSpec.describe Integrations::AppleAppStore, feature_category: :mobile_devops do
describe '#fields' do
it 'returns custom fields' do
- expect(apple_app_store_integration.fields.pluck(:name)).to eq(%w[app_store_issuer_id app_store_key_id
- app_store_private_key])
+ expect(apple_app_store_integration.fields.pluck(:name)).to match_array(%w[app_store_issuer_id app_store_key_id
+ app_store_private_key app_store_private_key_file_name])
end
end
diff --git a/spec/models/integrations/campfire_spec.rb b/spec/models/integrations/campfire_spec.rb
index ae923cd38fc..38d3d89cdbf 100644
--- a/spec/models/integrations/campfire_spec.rb
+++ b/spec/models/integrations/campfire_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::Campfire do
+RSpec.describe Integrations::Campfire, feature_category: :integrations do
include StubRequests
it_behaves_like Integrations::ResetSecretFields do
@@ -88,4 +88,16 @@ RSpec.describe Integrations::Campfire do
expect(WebMock).not_to have_requested(:post, '*/room/.*/speak.json')
end
end
+
+ describe '#log_error' do
+ subject { described_class.new.log_error('error') }
+
+ it 'logs an error' do
+ expect(Gitlab::IntegrationsLogger).to receive(:error).with(
+ hash_including(integration_class: 'Integrations::Campfire', message: 'error')
+ ).and_call_original
+
+ is_expected.to be_truthy
+ end
+ end
end
diff --git a/spec/models/integrations/google_play_spec.rb b/spec/models/integrations/google_play_spec.rb
new file mode 100644
index 00000000000..ab1aaad24e7
--- /dev/null
+++ b/spec/models/integrations/google_play_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::GooglePlay, feature_category: :mobile_devops do
+ describe 'Validations' do
+ context 'when active' do
+ before do
+ subject.active = true
+ end
+
+ it { is_expected.to validate_presence_of :service_account_key_file_name }
+ it { is_expected.to validate_presence_of :service_account_key }
+ it { is_expected.to allow_value(File.read('spec/fixtures/service_account.json')).for(:service_account_key) }
+ it { is_expected.not_to allow_value(File.read('spec/fixtures/group.json')).for(:service_account_key) }
+ end
+ end
+
+ context 'when integration is enabled' do
+ let(:google_play_integration) { build(:google_play_integration) }
+
+ describe '#fields' do
+ it 'returns custom fields' do
+ expect(google_play_integration.fields.pluck(:name)).to match_array(%w[service_account_key
+ service_account_key_file_name])
+ end
+ end
+
+ describe '#test' do
+ it 'returns true for a successful request' do
+ allow(Google::Auth::ServiceAccountCredentials).to receive_message_chain(:make_creds, :fetch_access_token!)
+ expect(google_play_integration.test[:success]).to be true
+ end
+
+ it 'returns false for an invalid request' do
+ allow(Google::Auth::ServiceAccountCredentials).to receive_message_chain(:make_creds,
+ :fetch_access_token!).and_raise(Signet::AuthorizationError.new('error'))
+ expect(google_play_integration.test[:success]).to be false
+ end
+ end
+
+ describe '#help' do
+ it 'renders prompt information' do
+ expect(google_play_integration.help).not_to be_empty
+ end
+ end
+
+ describe '.to_param' do
+ it 'returns the name of the integration' do
+ expect(described_class.to_param).to eq('google_play')
+ end
+ end
+
+ describe '#ci_variables' do
+ let(:google_play_integration) { build_stubbed(:google_play_integration) }
+
+ it 'returns vars when the integration is activated' do
+ ci_vars = [
+ {
+ key: 'SUPPLY_JSON_KEY_DATA',
+ value: google_play_integration.service_account_key,
+ masked: true,
+ public: false
+ }
+ ]
+
+ expect(google_play_integration.ci_variables).to match_array(ci_vars)
+ end
+ end
+ end
+
+ context 'when integration is disabled' do
+ let(:google_play_integration) { build_stubbed(:google_play_integration, active: false) }
+
+ describe '#ci_variables' do
+ it 'returns an empty array' do
+ expect(google_play_integration.ci_variables).to match_array([])
+ end
+ end
+ end
+end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index a4ccae459cf..fad8768cba0 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -590,7 +590,6 @@ RSpec.describe Integrations::Jira do
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject { close_issue }
- let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
let(:category) { 'Integrations::Jira' }
let(:action) { 'perform_integrations_action' }
let(:namespace) { project.namespace }
@@ -944,7 +943,6 @@ RSpec.describe Integrations::Jira do
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
- let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
let(:category) { 'Integrations::Jira' }
let(:action) { 'perform_integrations_action' }
let(:namespace) { project.namespace }
@@ -1095,9 +1093,7 @@ RSpec.describe Integrations::Jira do
expect(integration.web_url).to eq('')
end
- it 'includes Atlassian referrer for gitlab.com' do
- allow(Gitlab).to receive(:com?).and_return(true)
-
+ it 'includes Atlassian referrer for SaaS', :saas do
expect(integration.web_url).to eq("http://jira.test.com/path?#{described_class::ATLASSIAN_REFERRER_GITLAB_COM.to_query}")
allow(Gitlab).to receive(:staging?).and_return(true)
diff --git a/spec/models/integrations/mattermost_slash_commands_spec.rb b/spec/models/integrations/mattermost_slash_commands_spec.rb
index 070adb9ba93..e393a905f45 100644
--- a/spec/models/integrations/mattermost_slash_commands_spec.rb
+++ b/spec/models/integrations/mattermost_slash_commands_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::MattermostSlashCommands do
+RSpec.describe Integrations::MattermostSlashCommands, feature_category: :integrations do
it_behaves_like Integrations::BaseSlashCommands
describe 'Mattermost API' do
@@ -123,12 +123,5 @@ RSpec.describe Integrations::MattermostSlashCommands do
end
end
end
-
- describe '#chat_responder' do
- it 'returns the responder to use for Mattermost' do
- expect(described_class.new.chat_responder)
- .to eq(Gitlab::Chat::Responder::Mattermost)
- end
- end
end
end
diff --git a/spec/models/integrations/slack_slash_commands_spec.rb b/spec/models/integrations/slack_slash_commands_spec.rb
index 22cbaa777cd..f373fc2a2de 100644
--- a/spec/models/integrations/slack_slash_commands_spec.rb
+++ b/spec/models/integrations/slack_slash_commands_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::SlackSlashCommands do
+RSpec.describe Integrations::SlackSlashCommands, feature_category: :integrations do
it_behaves_like Integrations::BaseSlashCommands
describe '#trigger' do
@@ -40,11 +40,4 @@ RSpec.describe Integrations::SlackSlashCommands do
end
end
end
-
- describe '#chat_responder' do
- it 'returns the responder to use for Slack' do
- expect(described_class.new.chat_responder)
- .to eq(Gitlab::Chat::Responder::Slack)
- end
- end
end
diff --git a/spec/models/integrations/squash_tm_spec.rb b/spec/models/integrations/squash_tm_spec.rb
new file mode 100644
index 00000000000..f12e37dae6d
--- /dev/null
+++ b/spec/models/integrations/squash_tm_spec.rb
@@ -0,0 +1,117 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::SquashTm, feature_category: :integrations do
+ it_behaves_like Integrations::HasWebHook do
+ let_it_be(:project) { create(:project) }
+
+ let(:integration) { build(:squash_tm_integration, project: project) }
+ let(:hook_url) { "#{integration.url}?token={token}" }
+ end
+
+ it_behaves_like Integrations::ResetSecretFields do
+ let(:integration) { subject }
+ end
+
+ describe 'Validations' do
+ context 'when integration is active' do
+ before do
+ subject.active = true
+ end
+
+ it { is_expected.to validate_presence_of(:url) }
+ it { is_expected.to allow_value('https://example.com').for(:url) }
+ it { is_expected.not_to allow_value(nil).for(:url) }
+ it { is_expected.not_to allow_value('').for(:url) }
+ it { is_expected.not_to allow_value('foo').for(:url) }
+ it { is_expected.not_to allow_value('example.com').for(:url) }
+
+ it { is_expected.not_to validate_presence_of(:token) }
+ it { is_expected.to validate_length_of(:token).is_at_most(255) }
+ it { is_expected.to allow_value(nil).for(:token) }
+ it { is_expected.to allow_value('foo').for(:token) }
+ end
+
+ context 'when integration is inactive' do
+ before do
+ subject.active = false
+ end
+
+ it { is_expected.not_to validate_presence_of(:url) }
+ it { is_expected.not_to validate_presence_of(:token) }
+ end
+ end
+
+ describe '#execute' do
+ let(:integration) { build(:squash_tm_integration, project: build(:project)) }
+
+ let(:squash_tm_hook_url) do
+ "#{integration.url}?token=#{integration.token}"
+ end
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+ let(:data) { issue.to_hook_data(user) }
+
+ before do
+ stub_request(:post, squash_tm_hook_url)
+ end
+
+ it 'calls Squash TM API' do
+ integration.execute(data)
+
+ expect(a_request(:post, squash_tm_hook_url)).to have_been_made.once
+ end
+ end
+
+ describe '#test' do
+ let(:integration) { build(:squash_tm_integration) }
+
+ let(:squash_tm_hook_url) do
+ "#{integration.url}?token=#{integration.token}"
+ end
+
+ subject(:result) { integration.test({}) }
+
+ context 'when server is responding' do
+ let(:body) { 'OK' }
+ let(:status) { 200 }
+
+ before do
+ stub_request(:post, squash_tm_hook_url)
+ .to_return(status: status, body: body)
+ end
+
+ it { is_expected.to eq(success: true, result: 'OK') }
+ end
+
+ context 'when server rejects the request' do
+ let(:body) { 'Unauthorized' }
+ let(:status) { 401 }
+
+ before do
+ stub_request(:post, squash_tm_hook_url)
+ .to_return(status: status, body: body)
+ end
+
+ it { is_expected.to eq(success: false, result: body) }
+ end
+
+ context 'when test request executes with errors' do
+ before do
+ allow(integration).to receive(:execute_web_hook!)
+ .with({}, "Test Configuration Hook")
+ .and_raise(StandardError, 'error message')
+ end
+
+ it { is_expected.to eq(success: false, result: 'error message') }
+ end
+ end
+
+ describe '.default_test_event' do
+ subject { described_class.default_test_event }
+
+ it { is_expected.to eq('issue') }
+ end
+end
diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb
index f0007e1203c..59ade8783e5 100644
--- a/spec/models/internal_id_spec.rb
+++ b/spec/models/internal_id_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe InternalId do
let(:usage) { :issues }
let(:issue) { build(:issue, project: project) }
let(:id_subject) { issue }
- let(:scope) { { project: project } }
+ let(:scope) { { namespace: project.project_namespace } }
let(:init) { ->(issue, scope) { issue&.project&.issues&.size || Issue.where(**scope).count } }
it_behaves_like 'having unique enum values'
@@ -17,7 +17,7 @@ RSpec.describe InternalId do
end
describe '.flush_records!' do
- subject { described_class.flush_records!(project: project) }
+ subject { described_class.flush_records!(namespace: project.project_namespace) }
let(:another_project) { create(:project) }
@@ -27,11 +27,11 @@ RSpec.describe InternalId do
end
it 'deletes all records for the given project' do
- expect { subject }.to change { described_class.where(project: project).count }.from(1).to(0)
+ expect { subject }.to change { described_class.where(namespace: project.project_namespace).count }.from(1).to(0)
end
it 'retains records for other projects' do
- expect { subject }.not_to change { described_class.where(project: another_project).count }
+ expect { subject }.not_to change { described_class.where(namespace: another_project.project_namespace).count }
end
it 'does not allow an empty filter' do
@@ -51,7 +51,7 @@ RSpec.describe InternalId do
subject
described_class.first.tap do |record|
- expect(record.project).to eq(project)
+ expect(record.namespace).to eq(project.project_namespace)
expect(record.usage).to eq(usage.to_s)
end
end
@@ -182,7 +182,7 @@ RSpec.describe InternalId do
subject
described_class.first.tap do |record|
- expect(record.project).to eq(project)
+ expect(record.namespace).to eq(project.project_namespace)
expect(record.usage).to eq(usage.to_s)
expect(record.last_value).to eq(value)
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index e29318a7e83..8072a60326c 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -63,13 +63,18 @@ RSpec.describe Issue, feature_category: :team_planning do
it_behaves_like 'AtomicInternalId' do
let(:internal_id_attribute) { :iid }
let(:instance) { build(:issue) }
- let(:scope) { :project }
- let(:scope_attrs) { { project: instance.project } }
+ let(:scope) { :namespace }
+ let(:scope_attrs) { { namespace: instance.project.project_namespace } }
let(:usage) { :issues }
end
end
describe 'validations' do
+ it { is_expected.not_to allow_value(nil).for(:confidential) }
+ it { is_expected.to allow_value(true, false).for(:confidential) }
+ end
+
+ describe 'custom validations' do
subject(:valid?) { issue.valid? }
describe 'due_date_after_start_date' do
@@ -215,7 +220,7 @@ RSpec.describe Issue, feature_category: :team_planning do
subject { create(:issue, project: reusable_project) }
describe 'callbacks' do
- describe '#ensure_metrics' do
+ describe '#ensure_metrics!' do
it 'creates metrics after saving' do
expect(subject.metrics).to be_persisted
expect(Issue::Metrics.count).to eq(1)
@@ -575,38 +580,38 @@ RSpec.describe Issue, feature_category: :team_planning do
end
describe '#to_reference' do
- let(:namespace) { build(:namespace, path: 'sample-namespace') }
- let(:project) { build(:project, name: 'sample-project', namespace: namespace) }
- let(:issue) { build(:issue, iid: 1, project: project) }
+ let_it_be(:namespace) { create(:namespace, path: 'sample-namespace') }
+ let_it_be(:project) { create(:project, name: 'sample-project', namespace: namespace) }
+ let_it_be(:issue) { create(:issue, project: project) }
context 'when nil argument' do
it 'returns issue id' do
- expect(issue.to_reference).to eq "#1"
+ expect(issue.to_reference).to eq "##{issue.iid}"
end
it 'returns complete path to the issue with full: true' do
- expect(issue.to_reference(full: true)).to eq 'sample-namespace/sample-project#1'
+ expect(issue.to_reference(full: true)).to eq "#{project.full_path}##{issue.iid}"
end
end
context 'when argument is a project' do
context 'when same project' do
it 'returns issue id' do
- expect(issue.to_reference(project)).to eq("#1")
+ expect(issue.to_reference(project)).to eq("##{issue.iid}")
end
it 'returns full reference with full: true' do
- expect(issue.to_reference(project, full: true)).to eq 'sample-namespace/sample-project#1'
+ expect(issue.to_reference(project, full: true)).to eq "#{project.full_path}##{issue.iid}"
end
end
context 'when cross-project in same namespace' do
let(:another_project) do
- build(:project, name: 'another-project', namespace: project.namespace)
+ create(:project, name: 'another-project', namespace: project.namespace)
end
it 'returns a cross-project reference' do
- expect(issue.to_reference(another_project)).to eq "sample-project#1"
+ expect(issue.to_reference(another_project)).to eq "sample-project##{issue.iid}"
end
end
@@ -615,7 +620,7 @@ RSpec.describe Issue, feature_category: :team_planning do
let(:another_namespace_project) { build(:project, path: 'another-project', namespace: another_namespace) }
it 'returns complete path to the issue' do
- expect(issue.to_reference(another_namespace_project)).to eq 'sample-namespace/sample-project#1'
+ expect(issue.to_reference(another_namespace_project)).to eq "#{project.full_path}##{issue.iid}"
end
end
end
@@ -623,11 +628,11 @@ RSpec.describe Issue, feature_category: :team_planning do
context 'when argument is a namespace' do
context 'when same as issue' do
it 'returns path to the issue with the project name' do
- expect(issue.to_reference(namespace)).to eq 'sample-project#1'
+ expect(issue.to_reference(namespace)).to eq "sample-project##{issue.iid}"
end
it 'returns full reference with full: true' do
- expect(issue.to_reference(namespace, full: true)).to eq 'sample-namespace/sample-project#1'
+ expect(issue.to_reference(namespace, full: true)).to eq "#{project.full_path}##{issue.iid}"
end
end
@@ -635,12 +640,111 @@ RSpec.describe Issue, feature_category: :team_planning do
let(:group) { build(:group, name: 'Group', path: 'sample-group') }
it 'returns full path to the issue with full: true' do
- expect(issue.to_reference(group)).to eq 'sample-namespace/sample-project#1'
+ expect(issue.to_reference(group)).to eq "#{project.full_path}##{issue.iid}"
end
end
end
end
+ describe '#to_reference with table syntax' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user_namespace) { user.namespace }
+
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent) }
+ let_it_be(:another_group) { create(:group) }
+
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:project_namespace) { project.project_namespace }
+ let_it_be(:same_namespace_project) { create(:project, namespace: group) }
+ let_it_be(:same_namespace_project_namespace) { same_namespace_project.project_namespace }
+
+ let_it_be(:another_namespace_project) { create(:project) }
+ let_it_be(:another_namespace_project_namespace) { another_namespace_project.project_namespace }
+
+ let_it_be(:project_issue) { build(:issue, project: project, iid: 123) }
+ let_it_be(:project_issue_full_reference) { "#{project.full_path}##{project_issue.iid}" }
+
+ let_it_be(:group_issue) { build(:issue, namespace: group, iid: 123) }
+ let_it_be(:group_issue_full_reference) { "#{group.full_path}##{group_issue.iid}" }
+
+ # this one is just theoretically possible, not smth to be supported for real
+ let_it_be(:user_issue) { build(:issue, namespace: user_namespace, iid: 123) }
+ let_it_be(:user_issue_full_reference) { "#{user_namespace.full_path}##{user_issue.iid}" }
+
+ # namespace would be group, project namespace or user namespace
+ where(:issue, :full, :from, :result) do
+ ref(:project_issue) | false | nil | lazy { "##{issue.iid}" }
+ ref(:project_issue) | true | nil | ref(:project_issue_full_reference)
+ ref(:project_issue) | false | ref(:group) | lazy { "#{project.path}##{issue.iid}" }
+ ref(:project_issue) | true | ref(:group) | ref(:project_issue_full_reference)
+ ref(:project_issue) | false | ref(:parent) | ref(:project_issue_full_reference)
+ ref(:project_issue) | true | ref(:parent) | ref(:project_issue_full_reference)
+ ref(:project_issue) | false | ref(:project) | lazy { "##{issue.iid}" }
+ ref(:project_issue) | true | ref(:project) | ref(:project_issue_full_reference)
+ ref(:project_issue) | false | ref(:project_namespace) | lazy { "##{issue.iid}" }
+ ref(:project_issue) | true | ref(:project_namespace) | ref(:project_issue_full_reference)
+ ref(:project_issue) | false | ref(:same_namespace_project) | lazy { "#{project.path}##{issue.iid}" }
+ ref(:project_issue) | true | ref(:same_namespace_project) | ref(:project_issue_full_reference)
+ ref(:project_issue) | false | ref(:same_namespace_project_namespace) | lazy { "#{project.path}##{issue.iid}" }
+ ref(:project_issue) | true | ref(:same_namespace_project_namespace) | ref(:project_issue_full_reference)
+ ref(:project_issue) | false | ref(:another_group) | ref(:project_issue_full_reference)
+ ref(:project_issue) | true | ref(:another_group) | ref(:project_issue_full_reference)
+ ref(:project_issue) | false | ref(:another_namespace_project) | ref(:project_issue_full_reference)
+ ref(:project_issue) | true | ref(:another_namespace_project) | ref(:project_issue_full_reference)
+ ref(:project_issue) | false | ref(:another_namespace_project_namespace) | ref(:project_issue_full_reference)
+ ref(:project_issue) | true | ref(:another_namespace_project_namespace) | ref(:project_issue_full_reference)
+ ref(:project_issue) | false | ref(:user_namespace) | ref(:project_issue_full_reference)
+ ref(:project_issue) | true | ref(:user_namespace) | ref(:project_issue_full_reference)
+
+ ref(:group_issue) | false | nil | lazy { "##{issue.iid}" }
+ ref(:group_issue) | true | nil | ref(:group_issue_full_reference)
+ ref(:group_issue) | false | ref(:user_namespace) | ref(:group_issue_full_reference)
+ ref(:group_issue) | true | ref(:user_namespace) | ref(:group_issue_full_reference)
+ ref(:group_issue) | false | ref(:group) | lazy { "##{issue.iid}" }
+ ref(:group_issue) | true | ref(:group) | ref(:group_issue_full_reference)
+ ref(:group_issue) | false | ref(:parent) | lazy { "#{group.path}##{issue.iid}" }
+ ref(:group_issue) | true | ref(:parent) | ref(:group_issue_full_reference)
+ ref(:group_issue) | false | ref(:project) | lazy { "#{group.path}##{issue.iid}" }
+ ref(:group_issue) | true | ref(:project) | ref(:group_issue_full_reference)
+ ref(:group_issue) | false | ref(:project_namespace) | lazy { "#{group.path}##{issue.iid}" }
+ ref(:group_issue) | true | ref(:project_namespace) | ref(:group_issue_full_reference)
+ ref(:group_issue) | false | ref(:another_group) | ref(:group_issue_full_reference)
+ ref(:group_issue) | true | ref(:another_group) | ref(:group_issue_full_reference)
+ ref(:group_issue) | false | ref(:another_namespace_project) | ref(:group_issue_full_reference)
+ ref(:group_issue) | true | ref(:another_namespace_project) | ref(:group_issue_full_reference)
+ ref(:group_issue) | false | ref(:another_namespace_project_namespace) | ref(:group_issue_full_reference)
+ ref(:group_issue) | true | ref(:another_namespace_project_namespace) | ref(:group_issue_full_reference)
+
+ ref(:user_issue) | false | nil | lazy { "##{issue.iid}" }
+ ref(:user_issue) | true | nil | ref(:user_issue_full_reference)
+ ref(:user_issue) | false | ref(:user_namespace) | lazy { "##{issue.iid}" }
+ ref(:user_issue) | true | ref(:user_namespace) | ref(:user_issue_full_reference)
+ ref(:user_issue) | false | ref(:group) | ref(:user_issue_full_reference)
+ ref(:user_issue) | true | ref(:group) | ref(:user_issue_full_reference)
+ ref(:user_issue) | false | ref(:parent) | ref(:user_issue_full_reference)
+ ref(:user_issue) | true | ref(:parent) | ref(:user_issue_full_reference)
+ ref(:user_issue) | false | ref(:project) | ref(:user_issue_full_reference)
+ ref(:user_issue) | true | ref(:project) | ref(:user_issue_full_reference)
+ ref(:user_issue) | false | ref(:project_namespace) | ref(:user_issue_full_reference)
+ ref(:user_issue) | true | ref(:project_namespace) | ref(:user_issue_full_reference)
+ ref(:user_issue) | false | ref(:another_group) | ref(:user_issue_full_reference)
+ ref(:user_issue) | true | ref(:another_group) | ref(:user_issue_full_reference)
+ ref(:user_issue) | false | ref(:another_namespace_project) | ref(:user_issue_full_reference)
+ ref(:user_issue) | true | ref(:another_namespace_project) | ref(:user_issue_full_reference)
+ ref(:user_issue) | false | ref(:another_namespace_project_namespace) | ref(:user_issue_full_reference)
+ ref(:user_issue) | true | ref(:another_namespace_project_namespace) | ref(:user_issue_full_reference)
+ end
+
+ with_them do
+ it 'returns correct reference' do
+ expect(issue.to_reference(from, full: full)).to eq(result)
+ end
+ end
+ end
+
describe '#assignee_or_author?' do
let(:issue) { create(:issue, project: reusable_project) }
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 6a52f12553f..30607116c61 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe Member, feature_category: :subgroups do
describe 'Associations' do
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:member_namespace) }
- it { is_expected.to belong_to(:member_role) }
it { is_expected.to have_one(:member_task) }
end
@@ -173,96 +172,6 @@ RSpec.describe Member, feature_category: :subgroups do
end
end
end
-
- context 'member role access level' do
- let_it_be_with_reload(:member) { create(:group_member, access_level: Gitlab::Access::DEVELOPER) }
-
- context 'when no member role is associated' do
- it 'is valid' do
- expect(member).to be_valid
- end
- end
-
- context 'when member role is associated' do
- let!(:member_role) do
- create(
- :member_role,
- members: [member],
- base_access_level: Gitlab::Access::DEVELOPER,
- namespace: member.member_namespace
- )
- end
-
- context 'when member role matches access level' do
- it 'is valid' do
- expect(member).to be_valid
- end
- end
-
- context 'when member role does not match access level' do
- it 'is invalid' do
- member_role.base_access_level = Gitlab::Access::MAINTAINER
-
- expect(member).not_to be_valid
- end
- end
-
- context 'when access_level is changed' do
- it 'is invalid' do
- member.access_level = Gitlab::Access::MAINTAINER
-
- expect(member).not_to be_valid
- expect(member.errors[:access_level]).to include(
- _("cannot be changed since member is associated with a custom role")
- )
- end
- end
- end
- end
-
- context 'member role namespace' do
- let_it_be_with_reload(:member) { create(:group_member) }
-
- context 'when no member role is associated' do
- it 'is valid' do
- expect(member).to be_valid
- end
- end
-
- context 'when member role is associated' do
- let_it_be(:member_role) do
- create(:member_role, members: [member], namespace: member.group, base_access_level: member.access_level)
- end
-
- context 'when member#member_namespace is a group within hierarchy of member_role#namespace' do
- it 'is valid' do
- member.member_namespace = create(:group, parent: member_role.namespace)
-
- expect(member).to be_valid
- end
- end
-
- context 'when member#member_namespace is a project within hierarchy of member_role#namespace' do
- it 'is valid' do
- project = create(:project, group: member_role.namespace)
- member.member_namespace = Namespace.find(project.parent_id)
-
- expect(member).to be_valid
- end
- end
-
- context 'when member#member_namespace is outside hierarchy of member_role#namespace' do
- it 'is invalid' do
- member.member_namespace = create(:group)
-
- expect(member).not_to be_valid
- expect(member.errors[:member_namespace]).to include(
- _("must be in same hierarchy as custom role's namespace")
- )
- end
- end
- end
- end
end
describe 'Scopes & finders' do
@@ -774,6 +683,24 @@ RSpec.describe Member, feature_category: :subgroups do
end
end
+ describe '.filter_by_user_type' do
+ let_it_be(:service_account) { create(:user, :service_account) }
+ let_it_be(:service_account_member) { create(:group_member, user: service_account) }
+ let_it_be(:other_member) { create(:group_member) }
+
+ context 'when the user type is valid' do
+ it 'returns service accounts' do
+ expect(described_class.filter_by_user_type('service_account')).to match_array([service_account_member])
+ end
+ end
+
+ context 'when the user type is invalid' do
+ it 'returns nil' do
+ expect(described_class.filter_by_user_type('invalid_type')).to eq(nil)
+ end
+ end
+ end
+
describe '#accept_request' do
let(:member) { create(:project_member, requested_at: Time.current.utc) }
diff --git a/spec/models/members/member_role_spec.rb b/spec/models/members/member_role_spec.rb
deleted file mode 100644
index 4bf33eb1fce..00000000000
--- a/spec/models/members/member_role_spec.rb
+++ /dev/null
@@ -1,107 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe MemberRole, feature_category: :authentication_and_authorization do
- describe 'associations' do
- it { is_expected.to belong_to(:namespace) }
- it { is_expected.to have_many(:members) }
- end
-
- describe 'validation' do
- subject(:member_role) { build(:member_role) }
-
- it { is_expected.to validate_presence_of(:namespace) }
- it { is_expected.to validate_presence_of(:base_access_level) }
-
- context 'for attributes_locked_after_member_associated' do
- context 'when assigned to member' do
- it 'cannot be changed' do
- member_role.save!
- member_role.members << create(:project_member)
-
- expect(member_role).not_to be_valid
- expect(member_role.errors.messages[:base]).to include(
- s_("MemberRole|cannot be changed because it is already assigned to a user. "\
- "Please create a new Member Role instead")
- )
- end
- end
-
- context 'when not assigned to member' do
- it 'can be changed' do
- expect(member_role).to be_valid
- end
- end
- end
-
- context 'when for namespace' do
- let_it_be(:root_group) { create(:group) }
-
- context 'when namespace is a subgroup' do
- it 'is invalid' do
- subgroup = create(:group, parent: root_group)
- member_role.namespace = subgroup
-
- expect(member_role).to be_invalid
- expect(member_role.errors[:namespace]).to include(
- s_("MemberRole|must be top-level namespace")
- )
- end
- end
-
- context 'when namespace is a root group' do
- it 'is valid' do
- member_role.namespace = root_group
-
- expect(member_role).to be_valid
- end
- end
-
- context 'when namespace is not present' do
- it 'is invalid with a different error message' do
- member_role.namespace = nil
-
- expect(member_role).to be_invalid
- expect(member_role.errors[:namespace]).to include(_("can't be blank"))
- end
- end
-
- context 'when namespace is outside hierarchy of member' do
- it 'creates a validation error' do
- member_role.save!
- member_role.namespace = create(:group)
-
- expect(member_role).not_to be_valid
- expect(member_role.errors[:namespace]).to include(s_("MemberRole|can't be changed"))
- end
- end
- end
- end
-
- describe 'callbacks' do
- context 'for preventing deletion after member is associated' do
- let_it_be(:member_role) { create(:member_role) }
-
- subject(:destroy_member_role) { member_role.destroy } # rubocop: disable Rails/SaveBang
-
- it 'allows deletion without any member associated' do
- expect(destroy_member_role).to be_truthy
- end
-
- it 'prevent deletion when member is associated' do
- create(:group_member, { group: member_role.namespace,
- access_level: Gitlab::Access::DEVELOPER,
- member_role: member_role })
- member_role.members.reload
-
- expect(destroy_member_role).to be_falsey
- expect(member_role.errors.messages[:base])
- .to(
- include(s_("MemberRole|cannot be deleted because it is already assigned to a user. "\
- "Please disassociate the member role from all users before deletion."))
- )
- end
- end
- end
-end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index f0069b89494..f2bc9b42b77 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe ProjectMember do
describe 'validations' do
it { is_expected.to allow_value('Project').for(:source_type) }
- it { is_expected.not_to allow_value('project').for(:source_type) }
+ it { is_expected.not_to allow_value('Group').for(:source_type) }
it { is_expected.to validate_inclusion_of(:access_level).in_array(Gitlab::Access.values) }
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 2e2355ba710..b82b16fdec3 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -20,6 +20,11 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it { is_expected.to belong_to(:target_project).class_name('Project') }
it { is_expected.to belong_to(:source_project).class_name('Project') }
it { is_expected.to belong_to(:merge_user).class_name("User") }
+
+ it do
+ is_expected.to belong_to(:head_pipeline).class_name('Ci::Pipeline').inverse_of(:merge_requests_as_head_pipeline)
+ end
+
it { is_expected.to have_many(:assignees).through(:merge_request_assignees) }
it { is_expected.to have_many(:reviewers).through(:merge_request_reviewers) }
it { is_expected.to have_many(:merge_request_diffs) }
@@ -393,8 +398,8 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
instance1 = MergeRequest.find(merge_request.id)
instance2 = MergeRequest.find(merge_request.id)
- instance1.ensure_metrics
- instance2.ensure_metrics
+ instance1.ensure_metrics!
+ instance2.ensure_metrics!
metrics_records = MergeRequest::Metrics.where(merge_request_id: merge_request.id)
expect(metrics_records.size).to eq(1)
@@ -760,6 +765,23 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
+ context 'when scoped with :merged_before and :merged_after' do
+ before do
+ mr2.metrics.update!(merged_at: mr1.metrics.merged_at - 1.week)
+ mr3.metrics.update!(merged_at: mr1.metrics.merged_at + 1.week)
+ end
+
+ it 'excludes merge requests outside of the date range' do
+ expect(
+ project.merge_requests.merge(
+ MergeRequest::Metrics
+ .merged_before(mr1.metrics.merged_at + 1.day)
+ .merged_after(mr1.metrics.merged_at - 1.day)
+ ).total_time_to_merge
+ ).to be_within(1).of(expected_total_time([mr1]))
+ end
+ end
+
def expected_total_time(mrs)
mrs = mrs.reject { |mr| mr.merged_at.nil? }
mrs.reduce(0.0) do |sum, mr|
@@ -4274,8 +4296,11 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it 'refreshes the number of open merge requests of the target project' do
project = subject.target_project
- expect { subject.destroy! }
- .to change { project.open_merge_requests_count }.from(1).to(0)
+ expect do
+ subject.destroy!
+
+ BatchLoader::Executor.clear_current
+ end.to change { project.open_merge_requests_count }.from(1).to(0)
end
end
@@ -4301,6 +4326,14 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
transition!
end
+ context 'when skip_merge_status_trigger is set to true' do
+ before do
+ subject.skip_merge_status_trigger = true
+ end
+
+ it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
context 'when transaction is not committed' do
it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription' do
def transition!
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index a0698ac30f5..89df24d75c9 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -31,7 +31,6 @@ RSpec.describe Namespace, feature_category: :subgroups do
it { is_expected.to have_many :pending_builds }
it { is_expected.to have_one :namespace_route }
it { is_expected.to have_many :namespace_members }
- it { is_expected.to have_many :member_roles }
it { is_expected.to have_one :cluster_enabled_grant }
it { is_expected.to have_many(:work_items) }
it { is_expected.to have_many :achievements }
@@ -173,15 +172,24 @@ RSpec.describe Namespace, feature_category: :subgroups do
# rubocop:disable Lint/BinaryOperatorWithIdenticalOperands
where(:namespace_type, :path, :valid) do
- ref(:project_sti_name) | 'j' | true
- ref(:project_sti_name) | 'path.' | true
- ref(:project_sti_name) | 'blob' | false
- ref(:group_sti_name) | 'j' | false
- ref(:group_sti_name) | 'path.' | false
- ref(:group_sti_name) | 'blob' | true
- ref(:user_sti_name) | 'j' | false
- ref(:user_sti_name) | 'path.' | false
- ref(:user_sti_name) | 'blob' | true
+ ref(:project_sti_name) | 'j' | true
+ ref(:project_sti_name) | 'path.' | false
+ ref(:project_sti_name) | '.path' | false
+ ref(:project_sti_name) | 'path.git' | false
+ ref(:project_sti_name) | 'namespace__path' | false
+ ref(:project_sti_name) | 'blob' | false
+ ref(:group_sti_name) | 'j' | false
+ ref(:group_sti_name) | 'path.' | false
+ ref(:group_sti_name) | '.path' | false
+ ref(:group_sti_name) | 'path.git' | false
+ ref(:group_sti_name) | 'namespace__path' | false
+ ref(:group_sti_name) | 'blob' | true
+ ref(:user_sti_name) | 'j' | false
+ ref(:user_sti_name) | 'path.' | false
+ ref(:user_sti_name) | '.path' | false
+ ref(:user_sti_name) | 'path.git' | false
+ ref(:user_sti_name) | 'namespace__path' | false
+ ref(:user_sti_name) | 'blob' | true
end
# rubocop:enable Lint/BinaryOperatorWithIdenticalOperands
@@ -193,6 +201,26 @@ RSpec.describe Namespace, feature_category: :subgroups do
expect(namespace.valid?).to be(valid)
end
end
+
+ context 'when path starts or ends with a special character' do
+ it 'does not raise validation error for path for existing namespaces' do
+ parent.update_attribute(:path, '_path_')
+
+ expect { parent.update!(name: 'Foo') }.not_to raise_error
+ end
+ end
+
+ context 'when restrict_special_characters_in_namespace_path feature flag is disabled' do
+ before do
+ stub_feature_flags(restrict_special_characters_in_namespace_path: false)
+ end
+
+ it 'allows special character at the start or end of project namespace path' do
+ namespace = build(:namespace, type: project_sti_name, parent: parent, path: '_path_')
+
+ expect(namespace).to be_valid
+ end
+ end
end
describe '1 char path length' do
@@ -237,6 +265,117 @@ RSpec.describe Namespace, feature_category: :subgroups do
end
end
+ describe "ReferencePatternValidation" do
+ subject { described_class.reference_pattern }
+
+ it { is_expected.to match("@group1") }
+ it { is_expected.to match("@group1/group2/group3") }
+ it { is_expected.to match("@1234/1234/1234") }
+ it { is_expected.to match("@.q-w_e") }
+ end
+
+ describe '#to_reference_base' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user_namespace) { user.namespace }
+
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent) }
+ let_it_be(:another_group) { create(:group) }
+
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:project_namespace) { project.project_namespace }
+
+ let_it_be(:another_namespace_project) { create(:project) }
+ let_it_be(:another_namespace_project_namespace) { another_namespace_project.project_namespace }
+
+ # testing references with namespace being: group, project namespace and user namespace
+ where(:namespace, :full, :from, :result) do
+ ref(:parent) | false | nil | nil
+ ref(:parent) | true | nil | lazy { parent.full_path }
+ ref(:parent) | false | ref(:group) | lazy { parent.path }
+ ref(:parent) | true | ref(:group) | lazy { parent.full_path }
+ ref(:parent) | false | ref(:parent) | nil
+ ref(:parent) | true | ref(:parent) | lazy { parent.full_path }
+ ref(:parent) | false | ref(:project) | lazy { parent.path }
+ ref(:parent) | true | ref(:project) | lazy { parent.full_path }
+ ref(:parent) | false | ref(:project_namespace) | lazy { parent.path }
+ ref(:parent) | true | ref(:project_namespace) | lazy { parent.full_path }
+ ref(:parent) | false | ref(:another_group) | lazy { parent.full_path }
+ ref(:parent) | true | ref(:another_group) | lazy { parent.full_path }
+ ref(:parent) | false | ref(:another_namespace_project) | lazy { parent.full_path }
+ ref(:parent) | true | ref(:another_namespace_project) | lazy { parent.full_path }
+ ref(:parent) | false | ref(:another_namespace_project_namespace) | lazy { parent.full_path }
+ ref(:parent) | true | ref(:another_namespace_project_namespace) | lazy { parent.full_path }
+ ref(:parent) | false | ref(:user_namespace) | lazy { parent.full_path }
+ ref(:parent) | true | ref(:user_namespace) | lazy { parent.full_path }
+
+ ref(:group) | false | nil | nil
+ ref(:group) | true | nil | lazy { group.full_path }
+ ref(:group) | false | ref(:group) | nil
+ ref(:group) | true | ref(:group) | lazy { group.full_path }
+ ref(:group) | false | ref(:parent) | lazy { group.path }
+ ref(:group) | true | ref(:parent) | lazy { group.full_path }
+ ref(:group) | false | ref(:project) | lazy { group.path }
+ ref(:group) | true | ref(:project) | lazy { group.full_path }
+ ref(:group) | false | ref(:project_namespace) | lazy { group.path }
+ ref(:group) | true | ref(:project_namespace) | lazy { group.full_path }
+ ref(:group) | false | ref(:another_group) | lazy { group.full_path }
+ ref(:group) | true | ref(:another_group) | lazy { group.full_path }
+ ref(:group) | false | ref(:another_namespace_project) | lazy { group.full_path }
+ ref(:group) | true | ref(:another_namespace_project) | lazy { group.full_path }
+ ref(:group) | false | ref(:another_namespace_project_namespace) | lazy { group.full_path }
+ ref(:group) | true | ref(:another_namespace_project_namespace) | lazy { group.full_path }
+ ref(:group) | false | ref(:user_namespace) | lazy { group.full_path }
+ ref(:group) | true | ref(:user_namespace) | lazy { group.full_path }
+
+ ref(:project_namespace) | false | nil | nil
+ ref(:project_namespace) | true | nil | lazy { project_namespace.full_path }
+ ref(:project_namespace) | false | ref(:group) | lazy { project_namespace.path }
+ ref(:project_namespace) | true | ref(:group) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | false | ref(:parent) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | true | ref(:parent) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | false | ref(:project) | nil
+ ref(:project_namespace) | true | ref(:project) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | false | ref(:project_namespace) | nil
+ ref(:project_namespace) | true | ref(:project_namespace) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | false | ref(:another_group) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | true | ref(:another_group) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | false | ref(:another_namespace_project) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | true | ref(:another_namespace_project) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | false | ref(:another_namespace_project_namespace) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | true | ref(:another_namespace_project_namespace) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | false | ref(:user_namespace) | lazy { project_namespace.full_path }
+ ref(:project_namespace) | true | ref(:user_namespace) | lazy { project_namespace.full_path }
+
+ ref(:user_namespace) | false | nil | nil
+ ref(:user_namespace) | true | nil | lazy { user_namespace.full_path }
+ ref(:user_namespace) | false | ref(:user_namespace) | nil
+ ref(:user_namespace) | true | ref(:user_namespace) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | false | ref(:group) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | true | ref(:group) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | false | ref(:parent) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | true | ref(:parent) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | false | ref(:project) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | true | ref(:project) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | false | ref(:project_namespace) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | true | ref(:project_namespace) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | false | ref(:another_group) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | true | ref(:another_group) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | false | ref(:another_namespace_project) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | true | ref(:another_namespace_project) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | false | ref(:another_namespace_project_namespace) | lazy { user_namespace.full_path }
+ ref(:user_namespace) | true | ref(:another_namespace_project_namespace) | lazy { user_namespace.full_path }
+ end
+
+ with_them do
+ it 'returns correct path' do
+ expect(namespace.to_reference_base(from, full: full)).to eq(result)
+ end
+ end
+ end
+
describe 'handling STI', :aggregate_failures do
let(:namespace_type) { nil }
let(:parent) { nil }
@@ -436,6 +575,14 @@ RSpec.describe Namespace, feature_category: :subgroups do
end
end
+ context 'when parent is nil' do
+ let(:namespace) { build(:group, parent: nil) }
+
+ it 'returns []' do
+ expect(namespace.traversal_ids).to eq []
+ end
+ end
+
context 'when made a child group' do
let!(:namespace) { create(:group) }
let!(:parent_namespace) { create(:group, children: [namespace]) }
@@ -458,6 +605,17 @@ RSpec.describe Namespace, feature_category: :subgroups do
expect(namespace.root_ancestor).to eq new_root
end
end
+
+ context 'within a transaction' do
+ # We would like traversal_ids to be defined within a transaction, but it's not possible yet.
+ # This spec exists to assert that the behavior is known.
+ it 'is not defined yet' do
+ Namespace.transaction do
+ group = create(:group)
+ expect(group.traversal_ids).to be_empty
+ end
+ end
+ end
end
context 'traversal scopes' do
@@ -542,17 +700,6 @@ RSpec.describe Namespace, feature_category: :subgroups do
it { expect(child.traversal_ids).to eq [parent.id, child.id] }
it { expect(parent.sync_events.count).to eq 1 }
it { expect(child.sync_events.count).to eq 1 }
-
- context 'when set_traversal_ids_on_save feature flag is disabled' do
- before do
- stub_feature_flags(set_traversal_ids_on_save: false)
- end
-
- it 'only sets traversal_ids on reload' do
- expect { parent.reload }.to change(parent, :traversal_ids).from([]).to([parent.id])
- expect { child.reload }.to change(child, :traversal_ids).from([]).to([parent.id, child.id])
- end
- end
end
context 'traversal_ids on update' do
@@ -565,18 +712,6 @@ RSpec.describe Namespace, feature_category: :subgroups do
it 'sets the traversal_ids attribute' do
expect { subject }.to change { namespace1.traversal_ids }.from([namespace1.id]).to([namespace2.id, namespace1.id])
end
-
- context 'when set_traversal_ids_on_save feature flag is disabled' do
- before do
- stub_feature_flags(set_traversal_ids_on_save: false)
- end
-
- it 'sets traversal_ids after reload' do
- subject
-
- expect { namespace1.reload }.to change(namespace1, :traversal_ids).from([]).to([namespace2.id, namespace1.id])
- end
- end
end
it 'creates a Namespaces::SyncEvent using triggers' do
@@ -677,24 +812,41 @@ RSpec.describe Namespace, feature_category: :subgroups do
describe '#any_project_has_container_registry_tags?' do
subject { namespace.any_project_has_container_registry_tags? }
- let!(:project_without_registry) { create(:project, namespace: namespace) }
+ let(:project) { create(:project, namespace: namespace) }
- context 'without tags' do
- it { is_expected.to be_falsey }
+ it 'returns true if there is a project with container registry tags' do
+ expect(namespace).to receive(:first_project_with_container_registry_tags).and_return(project)
+
+ expect(subject).to be_truthy
end
- context 'with tags' do
- before do
- repositories = create_list(:container_repository, 3)
- create(:project, namespace: namespace, container_repositories: repositories)
+ it 'returns false if there is no project with container registry tags' do
+ expect(namespace).to receive(:first_project_with_container_registry_tags).and_return(nil)
+ expect(subject).to be_falsey
+ end
+ end
+
+ describe '#first_project_with_container_registry_tags' do
+ let(:container_repository) { create(:container_repository) }
+ let!(:project) { create(:project, namespace: namespace, container_repositories: [container_repository]) }
+
+ context 'when Gitlab API is not supported' do
+ before do
stub_container_registry_config(enabled: true)
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(false)
end
- it 'finds tags' do
+ it 'returns the project' do
stub_container_registry_tags(repository: :any, tags: ['tag'])
- is_expected.to be_truthy
+ expect(namespace.first_project_with_container_registry_tags).to eq(project)
+ end
+
+ it 'returns no project' do
+ stub_container_registry_tags(repository: :any, tags: nil)
+
+ expect(namespace.first_project_with_container_registry_tags).to be_nil
end
it 'does not cause N+1 query in fetching registries' do
@@ -704,29 +856,52 @@ RSpec.describe Namespace, feature_category: :subgroups do
other_repositories = create_list(:container_repository, 2)
create(:project, namespace: namespace, container_repositories: other_repositories)
- expect { namespace.any_project_has_container_registry_tags? }.not_to exceed_query_limit(control_count + 1)
+ expect { namespace.first_project_with_container_registry_tags }.not_to exceed_query_limit(control_count + 1)
end
end
- end
- describe '#first_project_with_container_registry_tags' do
- let(:container_repository) { create(:container_repository) }
- let!(:project) { create(:project, namespace: namespace, container_repositories: [container_repository]) }
+ context 'when Gitlab API is supported' do
+ before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(true)
+ stub_container_registry_config(enabled: true, api_url: 'http://container-registry', key: 'spec/fixtures/x509_certificate_pk.key')
+ end
- before do
- stub_container_registry_config(enabled: true)
- end
+ it 'calls and returns GitlabApiClient.one_project_with_container_registry_tag' do
+ expect(ContainerRegistry::GitlabApiClient)
+ .to receive(:one_project_with_container_registry_tag)
+ .with(namespace.full_path)
+ .and_return(project)
- it 'returns the project' do
- stub_container_registry_tags(repository: :any, tags: ['tag'])
+ expect(namespace.first_project_with_container_registry_tags).to eq(project)
+ end
- expect(namespace.first_project_with_container_registry_tags).to eq(project)
- end
+ context 'when the feature flag use_sub_repositories_api is disabled' do
+ before do
+ stub_feature_flags(use_sub_repositories_api: false)
+ end
+
+ it 'returns the project' do
+ stub_container_registry_tags(repository: :any, tags: ['tag'])
+
+ expect(namespace.first_project_with_container_registry_tags).to eq(project)
+ end
+
+ it 'returns no project' do
+ stub_container_registry_tags(repository: :any, tags: nil)
+
+ expect(namespace.first_project_with_container_registry_tags).to be_nil
+ end
- it 'returns no project' do
- stub_container_registry_tags(repository: :any, tags: nil)
+ it 'does not cause N+1 query in fetching registries' do
+ stub_container_registry_tags(repository: :any, tags: [])
+ control_count = ActiveRecord::QueryRecorder.new { namespace.any_project_has_container_registry_tags? }.count
- expect(namespace.first_project_with_container_registry_tags).to be_nil
+ other_repositories = create_list(:container_repository, 2)
+ create(:project, namespace: namespace, container_repositories: other_repositories)
+
+ expect { namespace.first_project_with_container_registry_tags }.not_to exceed_query_limit(control_count + 1)
+ end
+ end
end
end
@@ -755,6 +930,7 @@ RSpec.describe Namespace, feature_category: :subgroups do
with_them do
before do
+ allow(ContainerRegistry::GitlabApiClient).to receive(:one_project_with_container_registry_tag).and_return(nil)
stub_container_registry_config(enabled: true, api_url: 'http://container-registry', key: 'spec/fixtures/x509_certificate_pk.key')
allow(Gitlab).to receive(:com?).and_return(true)
allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(gitlab_api_supported)
@@ -975,43 +1151,6 @@ RSpec.describe Namespace, feature_category: :subgroups do
end
end
- describe '.find_by_pages_host' do
- it 'finds namespace by GitLab Pages host and is case-insensitive' do
- namespace = create(:namespace, name: 'topNAMEspace', path: 'topNAMEspace')
- create(:namespace, name: 'annother_namespace')
- host = "TopNamespace.#{Settings.pages.host.upcase}"
-
- expect(described_class.find_by_pages_host(host)).to eq(namespace)
- end
-
- context 'when there is non-top-level group with searched name' do
- before do
- create(:group, :nested, path: 'pages')
- end
-
- it 'ignores this group' do
- host = "pages.#{Settings.pages.host.upcase}"
-
- expect(described_class.find_by_pages_host(host)).to be_nil
- end
-
- it 'finds right top level group' do
- group = create(:group, path: 'pages')
-
- host = "pages.#{Settings.pages.host.upcase}"
-
- expect(described_class.find_by_pages_host(host)).to eq(group)
- end
- end
-
- it "returns no result if the provided host is not subdomain of the Pages host" do
- create(:namespace, name: 'namespace.io')
- host = "namespace.io"
-
- expect(described_class.find_by_pages_host(host)).to eq(nil)
- end
- end
-
describe '.top_most' do
let_it_be(:namespace) { create(:namespace) }
let_it_be(:group) { create(:group) }
@@ -1037,6 +1176,7 @@ RSpec.describe Namespace, feature_category: :subgroups do
allow(namespace).to receive(:path_was).and_return(namespace.path)
allow(namespace).to receive(:path).and_return('new_path')
+ allow(namespace).to receive(:first_project_with_container_registry_tags).and_return(project)
end
it 'raises an error about not movable project' do
@@ -1917,6 +2057,62 @@ RSpec.describe Namespace, feature_category: :subgroups do
expect(very_deep_nested_group.root_ancestor).to eq(root_group)
end
end
+
+ context 'when parent is changed' do
+ let(:group) { create(:group) }
+ let(:new_parent) { create(:group) }
+
+ shared_examples 'updates root_ancestor' do
+ it do
+ expect { subject }.to change { group.root_ancestor }.from(group).to(new_parent)
+ end
+ end
+
+ context 'by object' do
+ subject { group.parent = new_parent }
+
+ include_examples 'updates root_ancestor'
+ end
+
+ context 'by id' do
+ subject { group.parent_id = new_parent.id }
+
+ include_examples 'updates root_ancestor'
+ end
+ end
+
+ context 'within a transaction' do
+ context 'with a persisted parent' do
+ let(:parent) { create(:group) }
+
+ it do
+ Namespace.transaction do
+ group = create(:group, parent: parent)
+ expect(group.root_ancestor).to eq parent
+ end
+ end
+ end
+
+ context 'with a non-persisted parent' do
+ let(:parent) { build(:group) }
+
+ it do
+ Namespace.transaction do
+ group = create(:group, parent: parent)
+ expect(group.root_ancestor).to eq parent
+ end
+ end
+ end
+
+ context 'without a parent' do
+ it do
+ Namespace.transaction do
+ group = create(:group)
+ expect(group.root_ancestor).to eq group
+ end
+ end
+ end
+ end
end
describe '#full_path_before_last_save' do
@@ -2105,34 +2301,6 @@ RSpec.describe Namespace, feature_category: :subgroups do
end
end
- describe '#pages_virtual_domain' do
- let(:project) { create(:project, namespace: namespace) }
- let(:virtual_domain) { namespace.pages_virtual_domain }
-
- before do
- project.mark_pages_as_deployed
- project.update_pages_deployment!(create(:pages_deployment, project: project))
- end
-
- it 'returns the virual domain' do
- expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.lookup_paths).not_to be_empty
- expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{namespace.root_ancestor.id}_/)
- end
-
- context 'when :cache_pages_domain_api is disabled' do
- before do
- stub_feature_flags(cache_pages_domain_api: false)
- end
-
- it 'returns the virual domain' do
- expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.lookup_paths).not_to be_empty
- expect(virtual_domain.cache_key).to be_nil
- end
- end
- end
-
describe '#any_project_with_pages_deployed?' do
it 'returns true if any project nested under the group has pages deployed' do
parent_1 = create(:group) # Three projects, one with pages
diff --git a/spec/models/namespaces/randomized_suffix_path_spec.rb b/spec/models/namespaces/randomized_suffix_path_spec.rb
index a2484030f3c..fc5ccd95ce6 100644
--- a/spec/models/namespaces/randomized_suffix_path_spec.rb
+++ b/spec/models/namespaces/randomized_suffix_path_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Namespaces::RandomizedSuffixPath, feature_category: :not_owned do
+RSpec.describe Namespaces::RandomizedSuffixPath, feature_category: :shared do
let(:path) { 'backintime' }
subject(:suffixed_path) { described_class.new(path) }
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 013070f7be5..c1de8125c0d 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -1100,6 +1100,16 @@ RSpec.describe Note do
end
end
+ describe '#for_work_item?' do
+ it 'returns true for a work item' do
+ expect(build(:note_on_work_item).for_work_item?).to be true
+ end
+
+ it 'returns false for an issue' do
+ expect(build(:note_on_issue).for_work_item?).to be false
+ end
+ end
+
describe '#for_project_snippet?' do
it 'returns true for a project snippet note' do
expect(build(:note_on_project_snippet).for_project_snippet?).to be true
diff --git a/spec/models/oauth_access_token_spec.rb b/spec/models/oauth_access_token_spec.rb
index fc53d926dd6..5fa590eab58 100644
--- a/spec/models/oauth_access_token_spec.rb
+++ b/spec/models/oauth_access_token_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe OauthAccessToken do
it 'uses the expires_in value' do
token = OauthAccessToken.new(expires_in: 1.minute)
- expect(token.expires_in).to eq 1.minute
+ expect(token).to be_valid
end
end
@@ -67,7 +67,7 @@ RSpec.describe OauthAccessToken do
it 'uses default value' do
token = OauthAccessToken.new(expires_in: nil)
- expect(token.expires_in).to eq 2.hours
+ expect(token).to be_invalid
end
end
end
diff --git a/spec/models/onboarding/completion_spec.rb b/spec/models/onboarding/completion_spec.rb
index e1fad4255bc..0639762b76c 100644
--- a/spec/models/onboarding/completion_spec.rb
+++ b/spec/models/onboarding/completion_spec.rb
@@ -2,24 +2,24 @@
require 'spec_helper'
-RSpec.describe Onboarding::Completion do
+RSpec.describe Onboarding::Completion, feature_category: :onboarding do
+ let(:completed_actions) { {} }
+ let(:project) { build(:project, namespace: namespace) }
+ let!(:onboarding_progress) { create(:onboarding_progress, namespace: namespace, **completed_actions) }
+
+ let_it_be(:namespace) { create(:namespace) }
+
describe '#percentage' do
- let(:completed_actions) { {} }
- let!(:onboarding_progress) { create(:onboarding_progress, namespace: namespace, **completed_actions) }
let(:tracked_action_columns) do
- [
- *described_class::ACTION_ISSUE_IDS.keys,
- *described_class::ACTION_PATHS,
- :security_scan_enabled
- ].map { |key| ::Onboarding::Progress.column_name(key) }
+ [*described_class::ACTION_PATHS, :security_scan_enabled].map do |key|
+ ::Onboarding::Progress.column_name(key)
+ end
end
- let_it_be(:namespace) { create(:namespace) }
-
- subject { described_class.new(namespace).percentage }
+ subject(:percentage) { described_class.new(project).percentage }
context 'when no onboarding_progress exists' do
- subject { described_class.new(build(:namespace)).percentage }
+ subject(:percentage) { described_class.new(build(:project)).percentage }
it { is_expected.to eq(0) }
end
@@ -29,6 +29,8 @@ RSpec.describe Onboarding::Completion do
end
context 'when all tracked actions have been completed' do
+ let(:project) { build(:project, :stubbed_commit_count, namespace: namespace) }
+
let(:completed_actions) do
tracked_action_columns.index_with { Time.current }
end
@@ -44,7 +46,7 @@ RSpec.describe Onboarding::Completion do
stub_experiments(security_actions_continuous_onboarding: :control)
end
- it { is_expected.to eq(11) }
+ it { is_expected.to eq(10) }
end
context 'when candidate' do
@@ -52,7 +54,50 @@ RSpec.describe Onboarding::Completion do
stub_experiments(security_actions_continuous_onboarding: :candidate)
end
- it { is_expected.to eq(9) }
+ it { is_expected.to eq(8) }
+ end
+ end
+ end
+
+ describe '#completed?' do
+ subject(:completed?) { described_class.new(project).completed?(column) }
+
+ context 'when code_added' do
+ let(:column) { :code_added }
+
+ context 'when commit_count > 1' do
+ let(:project) { build(:project, :stubbed_commit_count, namespace: namespace) }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when branch_count > 1' do
+ let(:project) { build(:project, :stubbed_branch_count, namespace: namespace) }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when empty repository' do
+ let(:project) { build(:project, namespace: namespace) }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ context 'when security_scan_enabled' do
+ let(:column) { :security_scan_enabled_at }
+ let(:completed_actions) { { security_scan_enabled_at: security_scan_enabled_at } }
+
+ context 'when is completed' do
+ let(:security_scan_enabled_at) { Time.current }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when is not completed' do
+ let(:security_scan_enabled_at) { nil }
+
+ it { is_expected.to eq(false) }
end
end
end
diff --git a/spec/models/packages/debian/file_metadatum_spec.rb b/spec/models/packages/debian/file_metadatum_spec.rb
index 1215adfa6a1..8cbd83c3e2d 100644
--- a/spec/models/packages/debian/file_metadatum_spec.rb
+++ b/spec/models/packages/debian/file_metadatum_spec.rb
@@ -79,6 +79,7 @@ RSpec.describe Packages::Debian::FileMetadatum, type: :model do
:debian_package_file | :dsc | true | false | true
:debian_package_file | :deb | true | true | true
:debian_package_file | :udeb | true | true | true
+ :debian_package_file | :ddeb | true | true | true
:debian_package_file | :buildinfo | true | false | true
:debian_package_file | :changes | false | false | true
end
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index 9b341034aaa..d80f8247261 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -153,6 +153,7 @@ RSpec.describe Packages::PackageFile, type: :model do
let_it_be(:debian_changes) { debian_package.package_files.last }
let_it_be(:debian_deb) { create(:debian_package_file, package: debian_package) }
let_it_be(:debian_udeb) { create(:debian_package_file, :udeb, package: debian_package) }
+ let_it_be(:debian_ddeb) { create(:debian_package_file, :ddeb, package: debian_package) }
let_it_be(:debian_contrib) do
create(:debian_package_file, package: debian_package).tap do |pf|
diff --git a/spec/models/pages/lookup_path_spec.rb b/spec/models/pages/lookup_path_spec.rb
index ef79ba28d5d..38ff1bb090e 100644
--- a/spec/models/pages/lookup_path_spec.rb
+++ b/spec/models/pages/lookup_path_spec.rb
@@ -61,15 +61,13 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
it 'uses deployment from object storage' do
freeze_time do
- expect(source).to(
- eq({
- type: 'zip',
- path: deployment.file.url(expire_at: 1.day.from_now),
- global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
- sha256: deployment.file_sha256,
- file_size: deployment.size,
- file_count: deployment.file_count
- })
+ expect(source).to eq(
+ type: 'zip',
+ path: deployment.file.url(expire_at: 1.day.from_now),
+ global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
+ sha256: deployment.file_sha256,
+ file_size: deployment.size,
+ file_count: deployment.file_count
)
end
end
@@ -87,15 +85,13 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
it 'uses file protocol' do
freeze_time do
- expect(source).to(
- eq({
- type: 'zip',
- path: 'file://' + deployment.file.path,
- global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
- sha256: deployment.file_sha256,
- file_size: deployment.size,
- file_count: deployment.file_count
- })
+ expect(source).to eq(
+ type: 'zip',
+ path: "file://#{deployment.file.path}",
+ global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
+ sha256: deployment.file_sha256,
+ file_size: deployment.size,
+ file_count: deployment.file_count
)
end
end
@@ -108,15 +104,13 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
it 'uses deployment from object storage' do
freeze_time do
- expect(source).to(
- eq({
- type: 'zip',
- path: deployment.file.url(expire_at: 1.day.from_now),
- global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
- sha256: deployment.file_sha256,
- file_size: deployment.size,
- file_count: deployment.file_count
- })
+ expect(source).to eq(
+ type: 'zip',
+ path: deployment.file.url(expire_at: 1.day.from_now),
+ global_id: "gid://gitlab/PagesDeployment/#{deployment.id}",
+ sha256: deployment.file_sha256,
+ file_size: deployment.size,
+ file_count: deployment.file_count
)
end
end
@@ -143,4 +137,25 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do
expect(lookup_path.prefix).to eq('/myproject/')
end
end
+
+ describe '#unique_domain' do
+ let(:project) { build(:project) }
+
+ context 'when unique domain is disabled' do
+ it 'returns nil' do
+ project.project_setting.pages_unique_domain_enabled = false
+
+ expect(lookup_path.unique_domain).to be_nil
+ end
+ end
+
+ context 'when unique domain is enabled' do
+ it 'returns the project unique domain' do
+ project.project_setting.pages_unique_domain_enabled = true
+ project.project_setting.pages_unique_domain = 'unique-domain'
+
+ expect(lookup_path.unique_domain).to eq('unique-domain')
+ end
+ end
+ end
end
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index f054fde78e7..b218d4dce09 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe PagesDomain do
describe 'associations' do
it { is_expected.to belong_to(:project) }
- it { is_expected.to have_many(:serverless_domain_clusters) }
end
describe '.for_project' do
@@ -546,44 +545,6 @@ RSpec.describe PagesDomain do
end
end
- describe '#pages_virtual_domain' do
- let(:project) { create(:project) }
- let(:pages_domain) { create(:pages_domain, project: project) }
-
- context 'when there are no pages deployed for the project' do
- it 'returns nil' do
- expect(pages_domain.pages_virtual_domain).to be_nil
- end
- end
-
- context 'when there are pages deployed for the project' do
- let(:virtual_domain) { pages_domain.pages_virtual_domain }
-
- before do
- project.mark_pages_as_deployed
- project.update_pages_deployment!(create(:pages_deployment, project: project))
- end
-
- it 'returns the virual domain when there are pages deployed for the project' do
- expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.lookup_paths).not_to be_empty
- expect(virtual_domain.cache_key).to match(/pages_domain_for_domain_#{pages_domain.id}_/)
- end
-
- context 'when :cache_pages_domain_api is disabled' do
- before do
- stub_feature_flags(cache_pages_domain_api: false)
- end
-
- it 'returns the virual domain when there are pages deployed for the project' do
- expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
- expect(virtual_domain.lookup_paths).not_to be_empty
- expect(virtual_domain.cache_key).to be_nil
- end
- end
- end
- end
-
describe '#validate_custom_domain_count_per_project' do
let_it_be(:project) { create(:project) }
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index 2320ff669d0..45b8de509fc 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe PersonalAccessToken, feature_category: :authentication_and_authorization do
+RSpec.describe PersonalAccessToken, feature_category: :system_access do
subject { described_class }
describe '.build' do
@@ -405,4 +405,26 @@ RSpec.describe PersonalAccessToken, feature_category: :authentication_and_author
end
end
end
+
+ describe 'token format' do
+ let(:personal_access_token) { described_class.new }
+
+ it 'generates a token' do
+ expect { personal_access_token.ensure_token }
+ .to change { personal_access_token.token }.from(nil).to(a_string_starting_with(described_class.token_prefix))
+ end
+
+ context 'when there is an existing token' do
+ let(:token) { 'an_existing_secret_token' }
+
+ before do
+ personal_access_token.set_token(token)
+ end
+
+ it 'does not change the existing token' do
+ expect { personal_access_token.ensure_token }
+ .not_to change { personal_access_token.token }.from(token)
+ end
+ end
+ end
end
diff --git a/spec/models/preloaders/runner_machine_policy_preloader_spec.rb b/spec/models/preloaders/runner_machine_policy_preloader_spec.rb
new file mode 100644
index 00000000000..26fc101d8dc
--- /dev/null
+++ b/spec/models/preloaders/runner_machine_policy_preloader_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Preloaders::RunnerMachinePolicyPreloader, feature_category: :runner_fleet do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:runner1) { create(:ci_runner) }
+ let_it_be(:runner2) { create(:ci_runner) }
+ let_it_be(:runner_machine1) { create(:ci_runner_machine, runner: runner1) }
+ let_it_be(:runner_machine2) { create(:ci_runner_machine, runner: runner2) }
+
+ let(:base_runner_machines) do
+ Project.where(id: [runner_machine1, runner_machine2])
+ end
+
+ it 'avoids N+1 queries when authorizing a list of runner machines', :request_store do
+ preload_runner_machines_for_policy(user)
+ control = ActiveRecord::QueryRecorder.new { authorize_all_runner_machines(user) }
+
+ new_runner1 = create(:ci_runner)
+ new_runner2 = create(:ci_runner)
+ new_runner_machine1 = create(:ci_runner_machine, runner: new_runner1)
+ new_runner_machine2 = create(:ci_runner_machine, runner: new_runner2)
+
+ pristine_runner_machines = Project.where(id: base_runner_machines + [new_runner_machine1, new_runner_machine2])
+
+ preload_runner_machines_for_policy(user, pristine_runner_machines)
+ expect { authorize_all_runner_machines(user, pristine_runner_machines) }.not_to exceed_query_limit(control)
+ end
+
+ def authorize_all_runner_machines(current_user, runner_machine_list = base_runner_machines)
+ runner_machine_list.each { |runner_machine| current_user.can?(:read_runner_machine, runner_machine) }
+ end
+
+ def preload_runner_machines_for_policy(current_user, runner_machine_list = base_runner_machines)
+ described_class.new(runner_machine_list, current_user).execute
+ end
+end
diff --git a/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
index 7d04817b621..3fba2ac003b 100644
--- a/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
+++ b/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
@@ -66,22 +66,6 @@ RSpec.describe Preloaders::UserMaxAccessLevelInGroupsPreloader do
create(:group_group_link, :guest, shared_with_group: group1, shared_group: group4)
end
- context 'when `include_memberships_from_group_shares_in_preloader` feature flag is disabled' do
- before do
- stub_feature_flags(include_memberships_from_group_shares_in_preloader: false)
- end
-
- it 'sets access_level to `NO_ACCESS` in cache for groups arising from group shares' do
- described_class.new(groups, user).execute
-
- groups.each do |group|
- cached_access_level = group.max_member_access_for_user(user)
-
- expect(cached_access_level).to eq(Gitlab::Access::NO_ACCESS)
- end
- end
- end
-
it 'sets the right access level in cache for groups arising from group shares' do
described_class.new(groups, user).execute
diff --git a/spec/models/project_ci_cd_setting_spec.rb b/spec/models/project_ci_cd_setting_spec.rb
index 2c490c33747..0a818147bfc 100644
--- a/spec/models/project_ci_cd_setting_spec.rb
+++ b/spec/models/project_ci_cd_setting_spec.rb
@@ -27,22 +27,8 @@ RSpec.describe ProjectCiCdSetting do
end
end
- describe '#set_default_for_inbound_job_token_scope_enabled' do
- context 'when feature flag ci_inbound_job_token_scope is enabled' do
- before do
- stub_feature_flags(ci_inbound_job_token_scope: true)
- end
-
- it { is_expected.to be_inbound_job_token_scope_enabled }
- end
-
- context 'when feature flag ci_inbound_job_token_scope is disabled' do
- before do
- stub_feature_flags(ci_inbound_job_token_scope: false)
- end
-
- it { is_expected.not_to be_inbound_job_token_scope_enabled }
- end
+ describe '#default_for_inbound_job_token_scope_enabled' do
+ it { is_expected.to be_inbound_job_token_scope_enabled }
end
describe '#default_git_depth' do
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index fe0b46c3117..5da6a66b3ae 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -314,6 +314,40 @@ RSpec.describe ProjectFeature, feature_category: :projects do
end
end
+ describe '#public_packages?' do
+ let_it_be(:public_project) { create(:project, :public) }
+
+ context 'with packages config enabled' do
+ context 'when project is private' do
+ it 'returns false' do
+ expect(project.project_feature.public_packages?).to eq(false)
+ end
+
+ context 'with package_registry_access_level set to public' do
+ before do
+ project.project_feature.update!(package_registry_access_level: ProjectFeature::PUBLIC)
+ end
+
+ it 'returns true' do
+ expect(project.project_feature.public_packages?).to eq(true)
+ end
+ end
+ end
+
+ context 'when project is public' do
+ it 'returns true' do
+ expect(public_project.project_feature.public_packages?).to eq(true)
+ end
+ end
+ end
+
+ it 'returns false if packages config is not enabled' do
+ stub_config(packages: { enabled: false })
+
+ expect(public_project.project_feature.public_packages?).to eq(false)
+ end
+ end
+
# rubocop:disable Gitlab/FeatureAvailableUsage
describe '#feature_available?' do
let(:features) { ProjectFeature::FEATURES }
diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb
index feb5985818b..42433a2a84a 100644
--- a/spec/models/project_setting_spec.rb
+++ b/spec/models/project_setting_spec.rb
@@ -39,6 +39,44 @@ RSpec.describe ProjectSetting, type: :model do
[nil, 'not_allowed', :invalid].each do |invalid_value|
it { is_expected.not_to allow_value([invalid_value]).for(:target_platforms) }
end
+
+ context "when pages_unique_domain is required", feature_category: :pages do
+ it "is not required if pages_unique_domain_enabled is false" do
+ project_setting = build(:project_setting, pages_unique_domain_enabled: false)
+
+ expect(project_setting).to be_valid
+ expect(project_setting.errors.full_messages).not_to include("Pages unique domain can't be blank")
+ end
+
+ it "is required when pages_unique_domain_enabled is true" do
+ project_setting = build(:project_setting, pages_unique_domain_enabled: true)
+
+ expect(project_setting).not_to be_valid
+ expect(project_setting.errors.full_messages).to include("Pages unique domain can't be blank")
+ end
+
+ it "is required if it is already saved in the database" do
+ project_setting = create(
+ :project_setting,
+ pages_unique_domain: "random-unique-domain-here",
+ pages_unique_domain_enabled: true
+ )
+
+ project_setting.pages_unique_domain = nil
+
+ expect(project_setting).not_to be_valid
+ expect(project_setting.errors.full_messages).to include("Pages unique domain can't be blank")
+ end
+ end
+
+ it "validates uniqueness of pages_unique_domain", feature_category: :pages do
+ create(:project_setting, pages_unique_domain: "random-unique-domain-here")
+
+ project_setting = build(:project_setting, pages_unique_domain: "random-unique-domain-here")
+
+ expect(project_setting).not_to be_valid
+ expect(project_setting.errors.full_messages).to include("Pages unique domain has already been taken")
+ end
end
describe 'target_platforms=' do
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index dfc8919e19d..15e5db5af60 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -14,6 +14,8 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
it_behaves_like 'having unique enum values'
+ it_behaves_like 'ensures runners_token is prefixed', :project
+
describe 'associations' do
it { is_expected.to belong_to(:group) }
it { is_expected.to belong_to(:namespace) }
@@ -50,6 +52,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
it { is_expected.to have_one(:packagist_integration) }
it { is_expected.to have_one(:pushover_integration) }
it { is_expected.to have_one(:apple_app_store_integration) }
+ it { is_expected.to have_one(:google_play_integration) }
it { is_expected.to have_one(:asana_integration) }
it { is_expected.to have_many(:boards) }
it { is_expected.to have_one(:campfire_integration) }
@@ -88,6 +91,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
it { is_expected.to have_one(:alerting_setting).class_name('Alerting::ProjectAlertingSetting') }
it { is_expected.to have_one(:mock_ci_integration) }
it { is_expected.to have_one(:mock_monitoring_integration) }
+ it { is_expected.to have_one(:service_desk_custom_email_verification).class_name('ServiceDesk::CustomEmailVerification') }
it { is_expected.to have_many(:commit_statuses) }
it { is_expected.to have_many(:ci_pipelines) }
it { is_expected.to have_many(:ci_refs) }
@@ -135,6 +139,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
it { is_expected.to have_many(:reviews).inverse_of(:project) }
it { is_expected.to have_many(:packages).class_name('Packages::Package') }
it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile') }
+ it { is_expected.to have_many(:rpm_repository_files).class_name('Packages::Rpm::RepositoryFile').inverse_of(:project).dependent(:destroy) }
it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::ProjectDistribution').dependent(:destroy) }
it { is_expected.to have_one(:packages_cleanup_policy).class_name('Packages::Cleanup::Policy').inverse_of(:project) }
it { is_expected.to have_many(:pipeline_artifacts).dependent(:restrict_with_error) }
@@ -939,6 +944,17 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
+ describe '#commit_notes' do
+ let_it_be(:project) { create(:project) }
+
+ it "returns project's commit notes" do
+ note_1 = create(:note_on_commit, project: project, commit_id: 'commit_id_1')
+ note_2 = create(:note_on_commit, project: project, commit_id: 'commit_id_2')
+
+ expect(project.commit_notes).to match_array([note_1, note_2])
+ end
+ end
+
describe '#personal_namespace_holder?' do
let_it_be(:group) { create(:group) }
let_it_be(:namespace_user) { create(:user) }
@@ -1151,7 +1167,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
describe '#ci_inbound_job_token_scope_enabled?' do
- it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
+ it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_', default: true do
let(:delegated_method) { :inbound_job_token_scope_enabled? }
end
end
@@ -1380,6 +1396,60 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
+ describe '#to_reference_base' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user_namespace) { user.namespace }
+
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent) }
+ let_it_be(:another_group) { create(:group) }
+
+ let_it_be(:project1) { create(:project, namespace: group) }
+ let_it_be(:project_namespace) { project1.project_namespace }
+
+ # different project same group
+ let_it_be(:project2) { create(:project, namespace: group) }
+ let_it_be(:project_namespace2) { project2.project_namespace }
+
+ # different project from different group
+ let_it_be(:project3) { create(:project) }
+ let_it_be(:project_namespace3) { project3.project_namespace }
+
+ # testing references with namespace being: group, project namespace and user namespace
+ where(:project, :full, :from, :result) do
+ ref(:project1) | false | nil | nil
+ ref(:project1) | true | nil | lazy { project.full_path }
+ ref(:project1) | false | ref(:group) | lazy { project.path }
+ ref(:project1) | true | ref(:group) | lazy { project.full_path }
+ ref(:project1) | false | ref(:parent) | lazy { project.full_path }
+ ref(:project1) | true | ref(:parent) | lazy { project.full_path }
+ ref(:project1) | false | ref(:project1) | nil
+ ref(:project1) | true | ref(:project1) | lazy { project.full_path }
+ ref(:project1) | false | ref(:project_namespace) | nil
+ ref(:project1) | true | ref(:project_namespace) | lazy { project.full_path }
+ ref(:project1) | false | ref(:project2) | lazy { project.path }
+ ref(:project1) | true | ref(:project2) | lazy { project.full_path }
+ ref(:project1) | false | ref(:project_namespace2) | lazy { project.path }
+ ref(:project1) | true | ref(:project_namespace2) | lazy { project.full_path }
+ ref(:project1) | false | ref(:another_group) | lazy { project.full_path }
+ ref(:project1) | true | ref(:another_group) | lazy { project.full_path }
+ ref(:project1) | false | ref(:project3) | lazy { project.full_path }
+ ref(:project1) | true | ref(:project3) | lazy { project.full_path }
+ ref(:project1) | false | ref(:project_namespace3) | lazy { project.full_path }
+ ref(:project1) | true | ref(:project_namespace3) | lazy { project.full_path }
+ ref(:project1) | false | ref(:user_namespace) | lazy { project.full_path }
+ ref(:project1) | true | ref(:user_namespace) | lazy { project.full_path }
+ end
+
+ with_them do
+ it 'returns correct path' do
+ expect(project.to_reference_base(from, full: full)).to eq(result)
+ end
+ end
+ end
+
describe '#merge_method' do
where(:ff, :rebase, :method) do
true | true | :ff
@@ -2666,7 +2736,11 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
describe '#pages_url', feature_category: :pages do
+ let(:group_name) { 'group' }
+ let(:project_name) { 'project' }
+
let(:group) { create(:group, name: group_name) }
+ let(:nested_group) { create(:group, parent: group) }
let(:project_path) { project_name.downcase }
let(:project) do
@@ -2689,101 +2763,114 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
.and_return(['http://example.com', port].compact.join(':'))
end
- context 'group page' do
- let(:group_name) { 'Group' }
- let(:project_name) { 'group.example.com' }
+ context 'when pages_unique_domain feature flag is disabled' do
+ before do
+ stub_feature_flags(pages_unique_domain: false)
+ end
- it { is_expected.to eq("http://group.example.com") }
+ it { is_expected.to eq('http://group.example.com/project') }
+ end
- context 'mixed case path' do
- let(:project_path) { 'Group.example.com' }
+ context 'when pages_unique_domain feature flag is enabled' do
+ before do
+ stub_feature_flags(pages_unique_domain: true)
- it { is_expected.to eq("http://group.example.com") }
+ project.project_setting.update!(
+ pages_unique_domain_enabled: pages_unique_domain_enabled,
+ pages_unique_domain: 'unique-domain'
+ )
end
- end
- context 'project page' do
- let(:group_name) { 'Group' }
- let(:project_name) { 'Project' }
+ context 'when pages_unique_domain_enabled is false' do
+ let(:pages_unique_domain_enabled) { false }
- it { is_expected.to eq("http://group.example.com/project") }
+ it { is_expected.to eq('http://group.example.com/project') }
+ end
- context 'mixed case path' do
- let(:project_path) { 'Project' }
+ context 'when pages_unique_domain_enabled is false' do
+ let(:pages_unique_domain_enabled) { true }
- it { is_expected.to eq("http://group.example.com/Project") }
+ it { is_expected.to eq('http://unique-domain.example.com') }
end
end
- context 'when there is an explicit port' do
- let(:port) { 3000 }
-
- context 'when not in dev mode' do
- before do
- stub_rails_env('production')
- end
+ context 'with nested group' do
+ let(:project) { create(:project, namespace: nested_group, name: project_name) }
+ let(:expected_url) { "http://group.example.com/#{nested_group.path}/#{project.path}" }
- context 'group page' do
- let(:group_name) { 'Group' }
- let(:project_name) { 'group.example.com' }
+ context 'group page' do
+ let(:project_name) { 'group.example.com' }
- it { is_expected.to eq('http://group.example.com:3000/group.example.com') }
+ it { is_expected.to eq(expected_url) }
+ end
- context 'mixed case path' do
- let(:project_path) { 'Group.example.com' }
+ context 'project page' do
+ let(:project_name) { 'Project' }
- it { is_expected.to eq('http://group.example.com:3000/Group.example.com') }
- end
- end
+ it { is_expected.to eq(expected_url) }
+ end
+ end
- context 'project page' do
- let(:group_name) { 'Group' }
- let(:project_name) { 'Project' }
+ context 'when the project matches its namespace url' do
+ let(:project_name) { 'group.example.com' }
- it { is_expected.to eq("http://group.example.com:3000/project") }
+ it { is_expected.to eq('http://group.example.com') }
- context 'mixed case path' do
- let(:project_path) { 'Project' }
+ context 'with different group name capitalization' do
+ let(:group_name) { 'Group' }
- it { is_expected.to eq("http://group.example.com:3000/Project") }
- end
- end
+ it { is_expected.to eq("http://group.example.com") }
end
- context 'when in dev mode' do
- before do
- stub_rails_env('development')
- end
-
- context 'group page' do
- let(:group_name) { 'Group' }
- let(:project_name) { 'group.example.com' }
+ context 'with different project path capitalization' do
+ let(:project_path) { 'Group.example.com' }
- it { is_expected.to eq('http://group.example.com:3000') }
+ it { is_expected.to eq("http://group.example.com") }
+ end
- context 'mixed case path' do
- let(:project_path) { 'Group.example.com' }
+ context 'with different project name capitalization' do
+ let(:project_name) { 'Project' }
- it { is_expected.to eq('http://group.example.com:3000') }
- end
- end
+ it { is_expected.to eq("http://group.example.com/project") }
+ end
- context 'project page' do
- let(:group_name) { 'Group' }
- let(:project_name) { 'Project' }
+ context 'when there is an explicit port' do
+ let(:port) { 3000 }
- it { is_expected.to eq("http://group.example.com:3000/project") }
+ context 'when not in dev mode' do
+ before do
+ stub_rails_env('production')
+ end
- context 'mixed case path' do
- let(:project_path) { 'Project' }
+ it { is_expected.to eq('http://group.example.com:3000/group.example.com') }
+ end
- it { is_expected.to eq("http://group.example.com:3000/Project") }
+ context 'when in dev mode' do
+ before do
+ stub_rails_env('development')
end
+
+ it { is_expected.to eq('http://group.example.com:3000') }
end
end
end
end
+ describe '#pages_unique_url', feature_category: :pages do
+ let(:project_settings) { create(:project_setting, pages_unique_domain: 'unique-domain') }
+ let(:project) { build(:project, project_setting: project_settings) }
+ let(:domain) { 'example.com' }
+
+ before do
+ allow(Settings.pages).to receive(:host).and_return(domain)
+ allow(Gitlab.config.pages).to receive(:url).and_return("http://#{domain}")
+ end
+
+ it 'returns the pages unique url' do
+ expect(project.pages_unique_url).to eq('http://unique-domain.example.com')
+ end
+ end
+
describe '#pages_namespace_url', feature_category: :pages do
let(:group) { create(:group, name: group_name) }
let(:project) { create(:project, namespace: group, name: project_name) }
@@ -3565,6 +3652,44 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
+ describe '#beautified_import_status_name' do
+ context 'when import not finished' do
+ it 'returns the right beautified import status' do
+ project = create(:project, :import_started)
+
+ expect(project.beautified_import_status_name).to eq('started')
+ end
+ end
+
+ context 'when import is finished' do
+ context 'when import is partially completed' do
+ it 'returns partially completed' do
+ project = create(:project)
+
+ create(:import_state, project: project, status: 'finished', checksums: {
+ 'fetched' => { 'labels' => 10 },
+ 'imported' => { 'labels' => 9 }
+ })
+
+ expect(project.beautified_import_status_name).to eq('partially completed')
+ end
+ end
+
+ context 'when import is fully completed' do
+ it 'returns completed' do
+ project = create(:project)
+
+ create(:import_state, project: project, status: 'finished', checksums: {
+ 'fetched' => { 'labels' => 10 },
+ 'imported' => { 'labels' => 10 }
+ })
+
+ expect(project.beautified_import_status_name).to eq('completed')
+ end
+ end
+ end
+ end
+
describe '#add_import_job' do
let(:import_jid) { '123' }
@@ -3843,21 +3968,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
describe '#ancestors' do
- context 'with linear_project_ancestors feature flag enabled' do
- before do
- stub_feature_flags(linear_project_ancestors: true)
- end
-
- include_examples '#ancestors'
- end
-
- context 'with linear_project_ancestors feature flag disabled' do
- before do
- stub_feature_flags(linear_project_ancestors: false)
- end
-
- include_examples '#ancestors'
- end
+ include_examples '#ancestors'
end
describe '#ancestors_upto' do
@@ -4643,52 +4754,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
- describe '#pages_url' do
- let(:group) { create(:group, name: 'Group') }
- let(:nested_group) { create(:group, parent: group) }
- let(:domain) { 'Example.com' }
-
- subject { project.pages_url }
-
- before do
- allow(Settings.pages).to receive(:host).and_return(domain)
- allow(Gitlab.config.pages).to receive(:url).and_return('http://example.com')
- end
-
- context 'top-level group' do
- let(:project) { create(:project, namespace: group, name: project_name) }
-
- context 'group page' do
- let(:project_name) { 'group.example.com' }
-
- it { is_expected.to eq("http://group.example.com") }
- end
-
- context 'project page' do
- let(:project_name) { 'Project' }
-
- it { is_expected.to eq("http://group.example.com/project") }
- end
- end
-
- context 'nested group' do
- let(:project) { create(:project, namespace: nested_group, name: project_name) }
- let(:expected_url) { "http://group.example.com/#{nested_group.path}/#{project.path}" }
-
- context 'group page' do
- let(:project_name) { 'group.example.com' }
-
- it { is_expected.to eq(expected_url) }
- end
-
- context 'project page' do
- let(:project_name) { 'Project' }
-
- it { is_expected.to eq(expected_url) }
- end
- end
- end
-
describe '#lfs_http_url_to_repo' do
let(:project) { create(:project) }
@@ -6049,7 +6114,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
it 'executes hooks which were backed off and are no longer backed off' do
project = create(:project)
hook = create(:project_hook, project: project, push_events: true)
- WebHook::FAILURE_THRESHOLD.succ.times { hook.backoff! }
+ WebHooks::AutoDisabling::FAILURE_THRESHOLD.succ.times { hook.backoff! }
expect_any_instance_of(ProjectHook).to receive(:async_execute).once
@@ -7305,20 +7370,6 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
- describe 'with integrations and chat names' do
- subject { create(:project) }
-
- let(:integration) { create(:integration, project: subject) }
-
- before do
- create_list(:chat_name, 5, integration: integration)
- end
-
- it 'does not remove chat names on removal' do
- expect { subject.destroy! }.not_to change { ChatName.count }
- end
- end
-
describe 'with_issues_or_mrs_available_for_user' do
before do
Project.delete_all
@@ -8842,6 +8893,32 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do
end
end
+ describe 'deprecated project attributes' do
+ where(:project_attr, :project_method, :project_feature_attr) do
+ :wiki_enabled | :wiki_enabled? | :wiki_access_level
+ :builds_enabled | :builds_enabled? | :builds_access_level
+ :merge_requests_enabled | :merge_requests_enabled? | :merge_requests_access_level
+ :issues_enabled | :issues_enabled? | :issues_access_level
+ :snippets_enabled | :snippets_enabled? | :snippets_access_level
+ end
+
+ with_them do
+ it 'delegates the attributes to project feature' do
+ project = Project.new(project_attr => false)
+
+ expect(project.public_send(project_method)).to eq(false)
+ expect(project.project_feature.public_send(project_feature_attr)).to eq(ProjectFeature::DISABLED)
+ end
+
+ it 'sets the default value' do
+ project = Project.new
+
+ expect(project.public_send(project_method)).to eq(true)
+ expect(project.project_feature.public_send(project_feature_attr)).to eq(ProjectFeature::ENABLED)
+ end
+ end
+ end
+
private
def finish_job(export_job)
diff --git a/spec/models/projects/data_transfer_spec.rb b/spec/models/projects/data_transfer_spec.rb
index 6d3ddbdd74e..ab798185bbb 100644
--- a/spec/models/projects/data_transfer_spec.rb
+++ b/spec/models/projects/data_transfer_spec.rb
@@ -7,6 +7,12 @@ RSpec.describe Projects::DataTransfer, feature_category: :source_code_management
it { expect(subject).to be_valid }
+ # tests DataTransferCounterAttribute with the appropiate attributes
+ it_behaves_like CounterAttribute,
+ %i[repository_egress artifacts_egress packages_egress registry_egress] do
+ let(:model) { create(:project_data_transfer, project: project) }
+ end
+
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:namespace) }
diff --git a/spec/models/projects/forks/divergence_counts_spec.rb b/spec/models/projects/forks/details_spec.rb
index fd69cc0f3e7..4c0a2e3453a 100644
--- a/spec/models/projects/forks/divergence_counts_spec.rb
+++ b/spec/models/projects/forks/details_spec.rb
@@ -2,24 +2,29 @@
require 'spec_helper'
-RSpec.describe Projects::Forks::DivergenceCounts, feature_category: :source_code_management do
+RSpec.describe Projects::Forks::Details, feature_category: :source_code_management do
+ include ExclusiveLeaseHelpers
include ProjectForksHelper
let_it_be(:user) { create(:user) }
+ let_it_be(:source_repo) { create(:project, :repository, :public).repository }
+ let_it_be(:fork_repo) { fork_project(source_repo.project, user, { repository: true }).repository }
- describe '#counts', :use_clean_rails_redis_caching do
- let(:source_repo) { create(:project, :repository, :public).repository }
- let(:fork_repo) { fork_project(source_repo.project, user, { repository: true }).repository }
- let(:fork_branch) { 'fork-branch' }
- let(:cache_key) { ['project_forks', fork_repo.project.id, fork_branch, 'divergence_counts'] }
+ let(:fork_branch) { 'fork-branch' }
+ let(:cache_key) { ['project_fork_details', fork_repo.project.id, fork_branch].join(':') }
+ describe '#counts', :use_clean_rails_redis_caching do
def expect_cached_counts(value)
counts = described_class.new(fork_repo.project, fork_branch).counts
ahead, behind = value
expect(counts).to eq({ ahead: ahead, behind: behind })
- cached_value = [source_repo.commit.sha, fork_repo.commit(fork_branch).sha, value]
+ cached_value = {
+ source_sha: source_repo.commit.sha,
+ sha: fork_repo.commit(fork_branch).sha,
+ counts: value
+ }
expect(Rails.cache.read(cache_key)).to eq(cached_value)
end
@@ -72,6 +77,9 @@ RSpec.describe Projects::Forks::DivergenceCounts, feature_category: :source_code
end
context 'when counts calculated from a branch that exists upstream' do
+ let_it_be(:source_repo) { create(:project, :repository, :public).repository }
+ let_it_be(:fork_repo) { fork_project(source_repo.project, user, { repository: true }).repository }
+
let(:fork_branch) { 'feature' }
it 'compares the fork branch to upstream default branch' do
@@ -94,5 +102,61 @@ RSpec.describe Projects::Forks::DivergenceCounts, feature_category: :source_code
expect_cached_counts([2, 30])
end
end
+
+ context 'when specified branch does not exist' do
+ it 'returns nils as counts' do
+ counts = described_class.new(fork_repo.project, 'non-existent-branch').counts
+ expect(counts).to eq({ ahead: nil, behind: nil })
+ end
+ end
+ end
+
+ describe '#update!', :use_clean_rails_redis_caching do
+ it 'updates the cache with the specified value' do
+ value = { source_sha: source_repo.commit.sha, sha: fork_repo.commit.sha, counts: [0, 0], has_conflicts: true }
+
+ described_class.new(fork_repo.project, fork_branch).update!(value)
+
+ expect(Rails.cache.read(cache_key)).to eq(value)
+ end
+ end
+
+ describe '#has_conflicts', :use_clean_rails_redis_caching do
+ it 'returns whether merge for the stored commits failed due to conflicts' do
+ details = described_class.new(fork_repo.project, fork_branch)
+
+ expect do
+ value = { source_sha: source_repo.commit.sha, sha: fork_repo.commit.sha, counts: [0, 0], has_conflicts: true }
+
+ details.update!(value)
+ end.to change { details.has_conflicts? }.from(false).to(true)
+ end
+ end
+
+ describe '#exclusive_lease' do
+ it 'returns exclusive lease to the details' do
+ key = ['project_details', fork_repo.project.id, fork_branch].join(':')
+ uuid = SecureRandom.uuid
+ details = described_class.new(fork_repo.project, fork_branch)
+
+ expect(Gitlab::ExclusiveLease).to receive(:get_uuid).with(key).and_return(uuid)
+ expect(Gitlab::ExclusiveLease).to receive(:new).with(
+ key, uuid: uuid, timeout: described_class::LEASE_TIMEOUT
+ ).and_call_original
+
+ expect(details.exclusive_lease).to be_a(Gitlab::ExclusiveLease)
+ end
+ end
+
+ describe 'syncing?', :use_clean_rails_redis_caching do
+ it 'returns whether there is a sync in progress' do
+ details = described_class.new(fork_repo.project, fork_branch)
+
+ expect(details.exclusive_lease.try_obtain).to be_present
+ expect(details.syncing?).to eq(true)
+
+ details.exclusive_lease.cancel
+ expect(details.syncing?).to eq(false)
+ end
end
end
diff --git a/spec/models/projects/import_export/relation_export_spec.rb b/spec/models/projects/import_export/relation_export_spec.rb
index 8643fbc7b46..c724f30250d 100644
--- a/spec/models/projects/import_export/relation_export_spec.rb
+++ b/spec/models/projects/import_export/relation_export_spec.rb
@@ -2,9 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::ImportExport::RelationExport, type: :model do
- subject { create(:project_relation_export) }
-
+RSpec.describe Projects::ImportExport::RelationExport, type: :model, feature_category: :importers do
describe 'associations' do
it { is_expected.to belong_to(:project_export_job) }
it { is_expected.to have_one(:upload) }
@@ -13,12 +11,16 @@ RSpec.describe Projects::ImportExport::RelationExport, type: :model do
describe 'validations' do
it { is_expected.to validate_presence_of(:project_export_job) }
it { is_expected.to validate_presence_of(:relation) }
- it { is_expected.to validate_uniqueness_of(:relation).scoped_to(:project_export_job_id) }
it { is_expected.to validate_presence_of(:status) }
it { is_expected.to validate_numericality_of(:status).only_integer }
it { is_expected.to validate_length_of(:relation).is_at_most(255) }
it { is_expected.to validate_length_of(:jid).is_at_most(255) }
it { is_expected.to validate_length_of(:export_error).is_at_most(300) }
+
+ it 'validates uniquness of the relation attribute' do
+ create(:project_relation_export)
+ expect(subject).to validate_uniqueness_of(:relation).scoped_to(:project_export_job_id)
+ end
end
describe '.by_relation' do
@@ -52,4 +54,16 @@ RSpec.describe Projects::ImportExport::RelationExport, type: :model do
expect(described_class.relation_names_list).not_to include('events', 'notes')
end
end
+
+ describe '#mark_as_failed' do
+ it 'sets status to failed and sets the export error', :aggregate_failures do
+ relation_export = create(:project_relation_export)
+
+ relation_export.mark_as_failed("Error message")
+ relation_export.reload
+
+ expect(relation_export.failed?).to eq(true)
+ expect(relation_export.export_error).to eq("Error message")
+ end
+ end
end
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index 71e22f848cc..c99c92e6c19 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProtectedBranch do
+RSpec.describe ProtectedBranch, feature_category: :source_code_management do
subject { build_stubbed(:protected_branch) }
describe 'Associations' do
@@ -214,85 +214,52 @@ RSpec.describe ProtectedBranch do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:protected_branch) { create(:protected_branch, project: project, name: "“jawn”") }
- let(:rely_on_new_cache) { true }
-
- shared_examples_for 'hash based cache implementation' do
- it 'calls only hash based cache implementation' do
- expect_next_instance_of(ProtectedBranches::CacheService) do |instance|
- expect(instance).to receive(:fetch).with('missing-branch', anything).and_call_original
- end
-
- expect(Rails.cache).not_to receive(:fetch)
-
- described_class.protected?(project, 'missing-branch')
- end
- end
-
before do
- stub_feature_flags(rely_on_protected_branches_cache: rely_on_new_cache)
allow(described_class).to receive(:matching).and_call_original
# the original call works and warms the cache
described_class.protected?(project, protected_branch.name)
end
- context 'Dry-run: true (rely_on_protected_branches_cache is off, new hash-based is used)' do
- let(:rely_on_new_cache) { false }
+ it 'correctly invalidates a cache' do
+ expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).exactly(3).times.and_call_original
- it 'recalculates a fresh value every time in order to check the cache is not returning stale data' do
- expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).twice
+ create_params = { name: 'bar', merge_access_levels_attributes: [{ access_level: Gitlab::Access::DEVELOPER }] }
+ branch = ProtectedBranches::CreateService.new(project, project.owner, create_params).execute
+ expect(described_class.protected?(project, protected_branch.name)).to eq(true)
- 2.times { described_class.protected?(project, protected_branch.name) }
- end
+ ProtectedBranches::UpdateService.new(project, project.owner, name: 'ber').execute(branch)
+ expect(described_class.protected?(project, protected_branch.name)).to eq(true)
- it_behaves_like 'hash based cache implementation'
+ ProtectedBranches::DestroyService.new(project, project.owner).execute(branch)
+ expect(described_class.protected?(project, protected_branch.name)).to eq(true)
end
- context 'Dry-run: false (rely_on_protected_branches_cache is enabled, new hash-based cache is used)' do
- let(:rely_on_new_cache) { true }
-
- it 'correctly invalidates a cache' do
- expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).exactly(3).times.and_call_original
-
- create_params = { name: 'bar', merge_access_levels_attributes: [{ access_level: Gitlab::Access::DEVELOPER }] }
- branch = ProtectedBranches::CreateService.new(project, project.owner, create_params).execute
- expect(described_class.protected?(project, protected_branch.name)).to eq(true)
+ context 'when project is updated' do
+ it 'does not invalidate a cache' do
+ expect(described_class).not_to receive(:matching).with(protected_branch.name, protected_refs: anything)
- ProtectedBranches::UpdateService.new(project, project.owner, name: 'ber').execute(branch)
- expect(described_class.protected?(project, protected_branch.name)).to eq(true)
+ project.touch
- ProtectedBranches::DestroyService.new(project, project.owner).execute(branch)
- expect(described_class.protected?(project, protected_branch.name)).to eq(true)
- end
-
- it_behaves_like 'hash based cache implementation'
-
- context 'when project is updated' do
- it 'does not invalidate a cache' do
- expect(described_class).not_to receive(:matching).with(protected_branch.name, protected_refs: anything)
-
- project.touch
-
- described_class.protected?(project, protected_branch.name)
- end
+ described_class.protected?(project, protected_branch.name)
end
+ end
- context 'when other project protected branch is updated' do
- it 'does not invalidate the current project cache' do
- expect(described_class).not_to receive(:matching).with(protected_branch.name, protected_refs: anything)
+ context 'when other project protected branch is updated' do
+ it 'does not invalidate the current project cache' do
+ expect(described_class).not_to receive(:matching).with(protected_branch.name, protected_refs: anything)
- another_project = create(:project)
- ProtectedBranches::CreateService.new(another_project, another_project.owner, name: 'bar').execute
+ another_project = create(:project)
+ ProtectedBranches::CreateService.new(another_project, another_project.owner, name: 'bar').execute
- described_class.protected?(project, protected_branch.name)
- end
+ described_class.protected?(project, protected_branch.name)
end
+ end
- it 'correctly uses the cached version' do
- expect(described_class).not_to receive(:matching)
+ it 'correctly uses the cached version' do
+ expect(described_class).not_to receive(:matching)
- expect(described_class.protected?(project, protected_branch.name)).to eq(true)
- end
+ expect(described_class.protected?(project, protected_branch.name)).to eq(true)
end
end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index b8780b3faae..f970e818db9 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -413,6 +413,27 @@ RSpec.describe Repository, feature_category: :source_code_management do
repository.commits('master', limit: 1)
end
end
+
+ context 'when include_referenced_by is passed' do
+ context 'when commit has references' do
+ let(:ref) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' }
+ let(:include_referenced_by) { ['refs/tags'] }
+
+ subject { repository.commits(ref, limit: 1, include_referenced_by: include_referenced_by).first }
+
+ it 'returns commits with referenced_by excluding that match the patterns' do
+ expect(subject.referenced_by).to match_array(['refs/tags/v1.1.0'])
+ end
+
+ context 'when matching multiple references' do
+ let(:include_referenced_by) { ['refs/tags', 'refs/heads'] }
+
+ it 'returns commits with referenced_by that match the patterns' do
+ expect(subject.referenced_by).to match_array(['refs/tags/v1.1.0', 'refs/heads/improve/awesome', 'refs/heads/merge-test'])
+ end
+ end
+ end
+ end
end
context "when 'author' is set" do
@@ -682,6 +703,14 @@ RSpec.describe Repository, feature_category: :source_code_management do
it { is_expected.to be_nil }
end
+
+ context 'when root reference is empty' do
+ subject { empty_repo.merged_to_root_ref?('master') }
+
+ let(:empty_repo) { build(:project, :empty_repo).repository }
+
+ it { is_expected.to be_nil }
+ end
end
describe "#root_ref_sha" do
@@ -690,7 +719,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
subject { repository.root_ref_sha }
before do
- allow(repository).to receive(:commit).with(repository.root_ref) { commit }
+ allow(repository).to receive(:head_commit) { commit }
end
it { is_expected.to eq(commit.sha) }
@@ -1020,8 +1049,34 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
end
+ describe "#move_dir_files" do
+ it 'move directory files successfully' do
+ expect do
+ repository.move_dir_files(user, 'files/new_js', 'files/js',
+ branch_name: 'master',
+ message: 'move directory images to new_images',
+ author_email: author_email,
+ author_name: author_name)
+ end.to change { repository.count_commits(ref: 'master') }.by(1)
+ files = repository.ls_files('master')
+
+ expect(files).not_to include('files/js/application.js')
+ expect(files).to include('files/new_js/application.js')
+ end
+
+ it 'skips commit with same path' do
+ expect do
+ repository.move_dir_files(user, 'files/js', 'files/js',
+ branch_name: 'master',
+ message: 'no commit',
+ author_email: author_email,
+ author_name: author_name)
+ end.to change { repository.count_commits(ref: 'master') }.by(0)
+ end
+ end
+
describe "#delete_file" do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
it 'removes file successfully' do
expect do
@@ -1422,46 +1477,38 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
end
- [true, false].each do |ff|
- context "with feature flag license_from_gitaly=#{ff}" do
- before do
- stub_feature_flags(license_from_gitaly: ff)
- end
-
- describe '#license', :use_clean_rails_memory_store_caching, :clean_gitlab_redis_cache do
- let(:project) { create(:project, :repository) }
+ describe '#license', :use_clean_rails_memory_store_caching, :clean_gitlab_redis_cache do
+ let(:project) { create(:project, :repository) }
- before do
- repository.delete_file(user, 'LICENSE',
- message: 'Remove LICENSE', branch_name: 'master')
- end
+ before do
+ repository.delete_file(user, 'LICENSE',
+ message: 'Remove LICENSE', branch_name: 'master')
+ end
- it 'returns nil when no license is detected' do
- expect(repository.license).to be_nil
- end
+ it 'returns nil when no license is detected' do
+ expect(repository.license).to be_nil
+ end
- it 'returns nil when the repository does not exist' do
- expect(repository).to receive(:exists?).and_return(false)
+ it 'returns nil when the repository does not exist' do
+ expect(repository).to receive(:exists?).and_return(false)
- expect(repository.license).to be_nil
- end
+ expect(repository.license).to be_nil
+ end
- it 'returns other when the content is not recognizable' do
- repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
- message: 'Add LICENSE', branch_name: 'master')
+ it 'returns other when the content is not recognizable' do
+ repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
+ message: 'Add LICENSE', branch_name: 'master')
- expect(repository.license_key).to eq('other')
- end
+ expect(repository.license_key).to eq('other')
+ end
- it 'returns the license' do
- license = Licensee::License.new('mit')
- repository.create_file(user, 'LICENSE',
- license.content,
- message: 'Add LICENSE', branch_name: 'master')
+ it 'returns the license' do
+ license = Licensee::License.new('mit')
+ repository.create_file(user, 'LICENSE',
+ license.content,
+ message: 'Add LICENSE', branch_name: 'master')
- expect(repository.license_key).to eq(license.key)
- end
- end
+ expect(repository.license_key).to eq(license.key)
end
end
@@ -2302,7 +2349,6 @@ RSpec.describe Repository, feature_category: :source_code_management do
:contribution_guide,
:changelog,
:license_blob,
- :license_licensee,
:license_gitaly,
:gitignore,
:gitlab_ci_yml,
@@ -2957,11 +3003,10 @@ RSpec.describe Repository, feature_category: :source_code_management do
describe '#refresh_method_caches' do
it 'refreshes the caches of the given types' do
expect(repository).to receive(:expire_method_caches)
- .with(%i(readme_path license_blob license_licensee license_gitaly))
+ .with(%i(readme_path license_blob license_gitaly))
expect(repository).to receive(:readme_path)
expect(repository).to receive(:license_blob)
- expect(repository).to receive(:license_licensee)
expect(repository).to receive(:license_gitaly)
repository.refresh_method_caches(%i(readme license))
diff --git a/spec/models/serverless/domain_cluster_spec.rb b/spec/models/serverless/domain_cluster_spec.rb
deleted file mode 100644
index 487385c62c1..00000000000
--- a/spec/models/serverless/domain_cluster_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Serverless::DomainCluster do
- subject { create(:serverless_domain_cluster) }
-
- describe 'default values' do
- subject(:domain_cluster) { build(:serverless_domain_cluster) }
-
- before do
- allow(::Serverless::Domain).to receive(:generate_uuid).and_return('randomtoken')
- end
-
- it { expect(domain_cluster.uuid).to eq('randomtoken') }
- end
-
- describe 'validations' do
- it { is_expected.to validate_presence_of(:pages_domain) }
- it { is_expected.to validate_presence_of(:knative) }
-
- it { is_expected.to validate_presence_of(:uuid) }
- it { is_expected.to validate_length_of(:uuid).is_equal_to(::Serverless::Domain::UUID_LENGTH) }
- it { is_expected.to validate_uniqueness_of(:uuid) }
-
- it 'validates that uuid has only hex characters' do
- subject = build(:serverless_domain_cluster, uuid: 'z1234567890123')
- subject.valid?
-
- expect(subject.errors[:uuid]).to include('only allows hex characters')
- end
- end
-
- describe 'associations' do
- it { is_expected.to belong_to(:pages_domain) }
- it { is_expected.to belong_to(:knative) }
- it { is_expected.to belong_to(:creator).optional }
- end
-
- describe 'uuid' do
- context 'when nil' do
- it 'generates a value by default' do
- attributes = build(:serverless_domain_cluster).attributes.merge(uuid: nil)
- expect(::Serverless::Domain).to receive(:generate_uuid).and_call_original
-
- subject = Serverless::DomainCluster.new(attributes)
-
- expect(subject.uuid).not_to be_blank
- end
- end
-
- context 'when not nil' do
- it 'does not override the existing value' do
- uuid = 'abcd1234567890'
- expect(build(:serverless_domain_cluster, uuid: uuid).uuid).to eq(uuid)
- end
- end
- end
-
- describe 'cluster' do
- it { is_expected.to respond_to(:cluster) }
- end
-
- describe 'domain' do
- it { is_expected.to respond_to(:domain) }
- end
-
- describe 'certificate' do
- it { is_expected.to respond_to(:certificate) }
- end
-
- describe 'key' do
- it { is_expected.to respond_to(:key) }
- end
-end
diff --git a/spec/models/serverless/domain_spec.rb b/spec/models/serverless/domain_spec.rb
deleted file mode 100644
index f997b28b149..00000000000
--- a/spec/models/serverless/domain_spec.rb
+++ /dev/null
@@ -1,97 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Serverless::Domain do
- let(:function_name) { 'test-function' }
- let(:pages_domain_name) { 'serverless.gitlab.io' }
- let(:pages_domain) { create(:pages_domain, :instance_serverless, domain: pages_domain_name) }
- let!(:serverless_domain_cluster) { create(:serverless_domain_cluster, uuid: 'abcdef12345678', pages_domain: pages_domain) }
- let(:valid_cluster_uuid) { 'aba1cdef123456f278' }
- let(:invalid_cluster_uuid) { 'aba1cdef123456f178' }
- let!(:environment) { create(:environment, name: 'test') }
-
- let(:valid_uri) { "https://#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
- let(:valid_fqdn) { "#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
- let(:invalid_uri) { "https://#{function_name}-#{invalid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
-
- shared_examples 'a valid Domain' do
- describe '#uri' do
- it 'matches valid URI' do
- expect(subject.uri.to_s).to eq valid_uri
- end
- end
-
- describe '#function_name' do
- it 'returns function_name' do
- expect(subject.function_name).to eq function_name
- end
- end
-
- describe '#serverless_domain_cluster' do
- it 'returns serverless_domain_cluster' do
- expect(subject.serverless_domain_cluster).to eq serverless_domain_cluster
- end
- end
-
- describe '#environment' do
- it 'returns environment' do
- expect(subject.environment).to eq environment
- end
- end
- end
-
- describe '.new' do
- context 'with valid arguments' do
- subject do
- described_class.new(
- function_name: function_name,
- serverless_domain_cluster: serverless_domain_cluster,
- environment: environment
- )
- end
-
- it_behaves_like 'a valid Domain'
- end
-
- context 'with invalid arguments' do
- subject do
- described_class.new(
- function_name: function_name,
- environment: environment
- )
- end
-
- it { is_expected.not_to be_valid }
- end
-
- context 'with nil cluster argument' do
- subject do
- described_class.new(
- function_name: function_name,
- serverless_domain_cluster: nil,
- environment: environment
- )
- end
-
- it { is_expected.not_to be_valid }
- end
- end
-
- describe '.generate_uuid' do
- it 'has 14 characters' do
- expect(described_class.generate_uuid.length).to eq(described_class::UUID_LENGTH)
- end
-
- it 'consists of only hexadecimal characters' do
- expect(described_class.generate_uuid).to match(/\A\h+\z/)
- end
-
- it 'uses random characters' do
- uuid = 'abcd1234567890'
-
- expect(SecureRandom).to receive(:hex).with(described_class::UUID_LENGTH / 2).and_return(uuid)
- expect(described_class.generate_uuid).to eq(uuid)
- end
- end
-end
diff --git a/spec/models/serverless/function_spec.rb b/spec/models/serverless/function_spec.rb
deleted file mode 100644
index 632f5eba5c3..00000000000
--- a/spec/models/serverless/function_spec.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ::Serverless::Function do
- let(:project) { create(:project) }
- let(:func) { described_class.new(project, 'test', 'test-ns') }
-
- it 'has a proper id' do
- expect(func.id).to eql("#{project.id}/test/test-ns")
- expect(func.name).to eql("test")
- expect(func.namespace).to eql("test-ns")
- end
-
- it 'can decode an identifier' do
- f = described_class.find_by_id("#{project.id}/testfunc/dummy-ns")
-
- expect(f.name).to eql("testfunc")
- expect(f.namespace).to eql("dummy-ns")
- end
-end
diff --git a/spec/models/service_desk/custom_email_verification_spec.rb b/spec/models/service_desk/custom_email_verification_spec.rb
new file mode 100644
index 00000000000..f0a6028f21d
--- /dev/null
+++ b/spec/models/service_desk/custom_email_verification_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ServiceDesk::CustomEmailVerification, feature_category: :service_desk do
+ let(:user) { build_stubbed(:user) }
+ let(:project) { build_stubbed(:project) }
+ let(:verification) { build_stubbed(:service_desk_custom_email_verification, project: project) }
+ let(:token) { 'XXXXXXXXXXXX' }
+
+ describe '.generate_token' do
+ it 'matches expected output' do
+ expect(described_class.generate_token).to match(/\A\p{Alnum}{12}\z/)
+ end
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:state) }
+ end
+
+ describe '#accepted_until' do
+ context 'when no custom email is set up' do
+ it 'returns nil' do
+ expect(subject.accepted_until).to be_nil
+ end
+ end
+
+ context 'when custom email is set up' do
+ subject { verification.accepted_until }
+
+ it { is_expected.to be_nil }
+
+ context 'when verification process started' do
+ let(:triggered_at) { 2.minutes.ago }
+
+ before do
+ verification.assign_attributes(
+ state: "running",
+ triggered_at: triggered_at,
+ triggerer: user,
+ token: token
+ )
+ end
+
+ it { is_expected.to eq(described_class::TIMEFRAME.since(triggered_at)) }
+ end
+ end
+ end
+
+ describe '#in_timeframe?' do
+ context 'when no custom email is set up' do
+ it 'returns false' do
+ expect(subject).not_to be_in_timeframe
+ end
+ end
+
+ context 'when custom email is set up' do
+ it { is_expected.not_to be_in_timeframe }
+
+ context 'when verification process started' do
+ let(:triggered_at) { 1.second.ago }
+
+ before do
+ subject.assign_attributes(
+ state: "running",
+ triggered_at: triggered_at,
+ triggerer: user,
+ token: token
+ )
+ end
+
+ it { is_expected.to be_in_timeframe }
+
+ context 'and timeframe was missed' do
+ let(:triggered_at) { (described_class::TIMEFRAME + 1).ago }
+
+ before do
+ subject.triggered_at = triggered_at
+ end
+
+ it { is_expected.not_to be_in_timeframe }
+ end
+ end
+ end
+ end
+
+ describe 'encrypted #token' do
+ subject { build_stubbed(:service_desk_custom_email_verification, token: token) }
+
+ it 'saves and retrieves the encrypted token and iv correctly' do
+ expect(subject.encrypted_token).not_to be_nil
+ expect(subject.encrypted_token_iv).not_to be_nil
+
+ expect(subject.token).to eq(token)
+ end
+ end
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:triggerer) }
+
+ it 'can access service desk setting from project' do
+ setting = build_stubbed(:service_desk_setting, project: project)
+
+ expect(verification.service_desk_setting).to eq(setting)
+ end
+ end
+end
diff --git a/spec/models/service_desk_setting_spec.rb b/spec/models/service_desk_setting_spec.rb
index 32c36375a3d..b99494e6736 100644
--- a/spec/models/service_desk_setting_spec.rb
+++ b/spec/models/service_desk_setting_spec.rb
@@ -3,6 +3,9 @@
require 'spec_helper'
RSpec.describe ServiceDeskSetting, feature_category: :service_desk do
+ let(:verification) { build(:service_desk_custom_email_verification) }
+ let(:project) { build(:project) }
+
describe 'validations' do
subject(:service_desk_setting) { create(:service_desk_setting) }
@@ -23,6 +26,8 @@ RSpec.describe ServiceDeskSetting, feature_category: :service_desk do
context 'when custom_email_enabled is true' do
before do
+ # Test without ServiceDesk::CustomEmailVerification for simplicity
+ # See dedicated simplified tests below
subject.custom_email_enabled = true
end
@@ -55,7 +60,18 @@ RSpec.describe ServiceDeskSetting, feature_category: :service_desk do
it { is_expected.not_to allow_value('/example').for(:custom_email_smtp_address) }
end
- describe '.valid_issue_template' do
+ context 'when custom email verification is present/was triggered' do
+ before do
+ subject.project.service_desk_custom_email_verification = verification
+ end
+
+ it { is_expected.to validate_presence_of(:custom_email) }
+ it { is_expected.to validate_presence_of(:custom_email_smtp_username) }
+ it { is_expected.to validate_presence_of(:custom_email_smtp_port) }
+ it { is_expected.to validate_presence_of(:custom_email_smtp_address) }
+ end
+
+ describe '#valid_issue_template' do
let_it_be(:project) { create(:project, :custom_repo, files: { '.gitlab/issue_templates/service_desk.md' => 'template' }) }
it 'is not valid if template does not exist' do
@@ -73,7 +89,20 @@ RSpec.describe ServiceDeskSetting, feature_category: :service_desk do
end
end
- describe '.valid_project_key' do
+ describe '#custom_email_address_for_verification' do
+ it 'returns nil' do
+ expect(subject.custom_email_address_for_verification).to be_nil
+ end
+
+ context 'when custom_email exists' do
+ it 'returns correct verification address' do
+ subject.custom_email = 'support@example.com'
+ expect(subject.custom_email_address_for_verification).to eq('support+verify@example.com')
+ end
+ end
+ end
+
+ describe '#valid_project_key' do
# Creates two projects with same full path slug
# group1/test/one and group1/test-one will both have 'group-test-one' slug
let_it_be(:group) { create(:group) }
@@ -109,15 +138,15 @@ RSpec.describe ServiceDeskSetting, feature_category: :service_desk do
end
end
- describe 'encrypted password' do
+ describe 'encrypted #custom_email_smtp_password' do
let_it_be(:settings) do
create(
:service_desk_setting,
custom_email_enabled: true,
- custom_email: 'supersupport@example.com',
+ custom_email: 'support@example.com',
custom_email_smtp_address: 'smtp.example.com',
custom_email_smtp_port: 587,
- custom_email_smtp_username: 'supersupport@example.com',
+ custom_email_smtp_username: 'support@example.com',
custom_email_smtp_password: 'supersecret'
)
end
@@ -131,6 +160,24 @@ RSpec.describe ServiceDeskSetting, feature_category: :service_desk do
end
describe 'associations' do
+ let(:custom_email_settings) do
+ build_stubbed(
+ :service_desk_setting,
+ custom_email: 'support@example.com',
+ custom_email_smtp_address: 'smtp.example.com',
+ custom_email_smtp_port: 587,
+ custom_email_smtp_username: 'support@example.com',
+ custom_email_smtp_password: 'supersecret'
+ )
+ end
+
it { is_expected.to belong_to(:project) }
+
+ it 'can access custom email verification from project' do
+ project.service_desk_custom_email_verification = verification
+ custom_email_settings.project = project
+
+ expect(custom_email_settings.custom_email_verification).to eq(verification)
+ end
end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index e87667d9604..04f1bffce0a 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -174,6 +174,11 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to have_many(:revoked_user_achievements).class_name('Achievements::UserAchievement').with_foreign_key('revoked_by_user_id').inverse_of(:revoked_by_user) }
it { is_expected.to have_many(:achievements).through(:user_achievements).class_name('Achievements::Achievement').inverse_of(:users) }
it { is_expected.to have_many(:namespace_commit_emails).class_name('Users::NamespaceCommitEmail') }
+ it { is_expected.to have_many(:audit_events).with_foreign_key(:author_id).inverse_of(:user) }
+
+ it do
+ is_expected.to have_many(:alert_assignees).class_name('::AlertManagement::AlertAssignee').inverse_of(:assignee)
+ end
describe 'default values' do
let(:user) { described_class.new }
@@ -188,6 +193,7 @@ RSpec.describe User, feature_category: :user_profile do
it { expect(user.notified_of_own_activity).to be_falsey }
it { expect(user.preferred_language).to eq(Gitlab::CurrentSettings.default_preferred_language) }
it { expect(user.theme_id).to eq(described_class.gitlab_config.default_theme) }
+ it { expect(user.color_scheme_id).to eq(Gitlab::CurrentSettings.default_syntax_highlighting_theme) }
end
describe '#user_detail' do
@@ -474,6 +480,37 @@ RSpec.describe User, feature_category: :user_profile do
expect(user).to be_valid
end
end
+
+ context 'namespace_move_dir_allowed' do
+ context 'when the user is not a new record' do
+ before do
+ expect(user.new_record?).to eq(false)
+ end
+
+ it 'checks when username changes' do
+ expect(user).to receive(:namespace_move_dir_allowed)
+
+ user.username = 'newuser'
+ user.validate
+ end
+
+ it 'does not check if the username did not change' do
+ expect(user).not_to receive(:namespace_move_dir_allowed)
+ expect(user.username_changed?).to eq(false)
+
+ user.validate
+ end
+ end
+
+ it 'does not check if the user is a new record' do
+ user = User.new(username: 'newuser')
+
+ expect(user.new_record?).to eq(true)
+ expect(user).not_to receive(:namespace_move_dir_allowed)
+
+ user.validate
+ end
+ end
end
describe 'name' do
@@ -978,10 +1015,6 @@ RSpec.describe User, feature_category: :user_profile do
end
end
- describe 'and U2F' do
- it_behaves_like "returns the right users", :two_factor_via_u2f
- end
-
describe 'and WebAuthn' do
it_behaves_like "returns the right users", :two_factor_via_webauthn
end
@@ -997,26 +1030,6 @@ RSpec.describe User, feature_category: :user_profile do
expect(users_without_two_factor).not_to include(user_with_2fa.id)
end
- describe 'and u2f' do
- it 'excludes users with 2fa enabled via U2F' do
- user_with_2fa = create(:user, :two_factor_via_u2f)
- user_without_2fa = create(:user)
- users_without_two_factor = described_class.without_two_factor.pluck(:id)
-
- expect(users_without_two_factor).to include(user_without_2fa.id)
- expect(users_without_two_factor).not_to include(user_with_2fa.id)
- end
-
- it 'excludes users with 2fa enabled via OTP and U2F' do
- user_with_2fa = create(:user, :two_factor_via_otp, :two_factor_via_u2f)
- user_without_2fa = create(:user)
- users_without_two_factor = described_class.without_two_factor.pluck(:id)
-
- expect(users_without_two_factor).to include(user_without_2fa.id)
- expect(users_without_two_factor).not_to include(user_with_2fa.id)
- end
- end
-
describe 'and webauthn' do
it 'excludes users with 2fa enabled via WebAuthn' do
user_with_2fa = create(:user, :two_factor_via_webauthn)
@@ -2174,6 +2187,24 @@ RSpec.describe User, feature_category: :user_profile do
end
end
+ context 'Duo Auth' do
+ context 'when enabled via GitLab settings' do
+ before do
+ allow(::Gitlab.config.duo_auth).to receive(:enabled).and_return(true)
+ end
+
+ it { expect(user.two_factor_otp_enabled?).to eq(true) }
+ end
+
+ context 'when disabled via GitLab settings' do
+ before do
+ allow(::Gitlab.config.duo_auth).to receive(:enabled).and_return(false)
+ end
+
+ it { expect(user.two_factor_otp_enabled?).to eq(false) }
+ end
+ end
+
context 'FortiTokenCloud' do
context 'when enabled via GitLab settings' do
before do
@@ -2207,54 +2238,6 @@ RSpec.describe User, feature_category: :user_profile do
end
end
- context 'two_factor_u2f_enabled?' do
- let_it_be(:user) { create(:user, :two_factor) }
-
- context 'when webauthn feature flag is enabled' do
- context 'user has no U2F registration' do
- it { expect(user.two_factor_u2f_enabled?).to eq(false) }
- end
-
- context 'user has existing U2F registration' do
- it 'returns false' do
- device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
- create(:u2f_registration,
- name: 'my u2f device',
- user: user,
- certificate: Base64.strict_encode64(device.cert_raw),
- key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
- public_key: Base64.strict_encode64(device.origin_public_key_raw))
-
- expect(user.two_factor_u2f_enabled?).to eq(false)
- end
- end
- end
-
- context 'when webauthn feature flag is disabled' do
- before do
- stub_feature_flags(webauthn: false)
- end
-
- context 'user has no U2F registration' do
- it { expect(user.two_factor_u2f_enabled?).to eq(false) }
- end
-
- context 'user has existing U2F registration' do
- it 'returns true' do
- device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
- create(:u2f_registration,
- name: 'my u2f device',
- user: user,
- certificate: Base64.strict_encode64(device.cert_raw),
- key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
- public_key: Base64.strict_encode64(device.origin_public_key_raw))
-
- expect(user.two_factor_u2f_enabled?).to eq(true)
- end
- end
- end
- end
-
describe 'needs_new_otp_secret?', :freeze_time do
let(:user) { create(:user) }
@@ -2396,14 +2379,6 @@ RSpec.describe User, feature_category: :user_profile do
end
it_behaves_like 'manageable groups examples'
-
- context 'when feature flag :linear_user_manageable_groups is disabled' do
- before do
- stub_feature_flags(linear_user_manageable_groups: false)
- end
-
- it_behaves_like 'manageable groups examples'
- end
end
end
end
@@ -7104,43 +7079,105 @@ RSpec.describe User, feature_category: :user_profile do
context 'when user is confirmed' do
let(:user) { create(:user) }
- it 'is falsey' do
- expect(user.confirmed?).to be_truthy
- expect(subject).to be_falsey
+ it 'is false' do
+ expect(user.confirmed?).to be(true)
+ expect(subject).to be(false)
end
end
context 'when user is not confirmed' do
let_it_be(:user) { build_stubbed(:user, :unconfirmed, confirmation_sent_at: Time.current) }
- it 'is truthy when soft_email_confirmation feature is disabled' do
- stub_feature_flags(soft_email_confirmation: false)
- expect(subject).to be_truthy
+ context 'when email confirmation setting is set to `off`' do
+ before do
+ stub_application_setting_enum('email_confirmation_setting', 'off')
+ end
+
+ it { is_expected.to be(false) }
end
- context 'when soft_email_confirmation feature is enabled' do
+ context 'when email confirmation setting is set to `soft`' do
before do
- stub_feature_flags(soft_email_confirmation: true)
+ stub_application_setting_enum('email_confirmation_setting', 'soft')
end
- it 'is falsey when confirmation period is valid' do
- expect(subject).to be_falsey
+ context 'when confirmation period is valid' do
+ it { is_expected.to be(false) }
end
- it 'is truthy when confirmation period is expired' do
- travel_to(User.allow_unconfirmed_access_for.from_now + 1.day) do
- expect(subject).to be_truthy
+ context 'when confirmation period is expired' do
+ before do
+ travel_to(User.allow_unconfirmed_access_for.from_now + 1.day)
end
+
+ it { is_expected.to be(true) }
end
context 'when user has no confirmation email sent' do
let(:user) { build(:user, :unconfirmed, confirmation_sent_at: nil) }
- it 'is truthy' do
- expect(subject).to be_truthy
- end
+ it { is_expected.to be(true) }
end
end
+
+ context 'when email confirmation setting is set to `hard`' do
+ before do
+ stub_application_setting_enum('email_confirmation_setting', 'hard')
+ end
+
+ it { is_expected.to be(true) }
+ end
+ end
+ end
+
+ describe '#confirmation_period_valid?' do
+ subject { user.send(:confirmation_period_valid?) }
+
+ let_it_be(:user) { create(:user) }
+
+ context 'when email confirmation setting is set to `off`' do
+ before do
+ stub_feature_flags(soft_email_confirmation: false)
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when email confirmation setting is set to `soft`' do
+ before do
+ stub_application_setting_enum('email_confirmation_setting', 'soft')
+ end
+
+ context 'when within confirmation window' do
+ before do
+ user.update!(confirmation_sent_at: Date.today)
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when outside confirmation window' do
+ before do
+ user.update!(confirmation_sent_at: Date.today - described_class.confirm_within - 7.days)
+ end
+
+ it { is_expected.to be(false) }
+ end
+ end
+
+ context 'when email confirmation setting is set to `hard`' do
+ before do
+ stub_feature_flags(soft_email_confirmation: false)
+ stub_application_setting_enum('email_confirmation_setting', 'hard')
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ describe '#in_confirmation_period?' do
+ it 'is expected to be an alias' do
+ expect(user.method(:in_confirmation_period?).original_name).to eq(:confirmation_period_valid?)
+ end
end
end
diff --git a/spec/models/wiki_directory_spec.rb b/spec/models/wiki_directory_spec.rb
index 1b177934ace..c30e79f79ce 100644
--- a/spec/models/wiki_directory_spec.rb
+++ b/spec/models/wiki_directory_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe WikiDirectory do
let_it_be(:toplevel1) { build(:wiki_page, title: 'aaa-toplevel1') }
let_it_be(:toplevel2) { build(:wiki_page, title: 'zzz-toplevel2') }
let_it_be(:toplevel3) { build(:wiki_page, title: 'zzz-toplevel3') }
+ let_it_be(:home) { build(:wiki_page, title: 'home') }
+ let_it_be(:homechild) { build(:wiki_page, title: 'Home/homechild') }
let_it_be(:parent1) { build(:wiki_page, title: 'parent1') }
let_it_be(:parent2) { build(:wiki_page, title: 'parent2') }
let_it_be(:child1) { build(:wiki_page, title: 'parent1/child1') }
@@ -24,13 +26,18 @@ RSpec.describe WikiDirectory do
it 'returns a nested array of entries' do
entries = described_class.group_pages(
- [toplevel1, toplevel2, toplevel3,
+ [toplevel1, toplevel2, toplevel3, home, homechild,
parent1, parent2, child1, child2, child3,
subparent, grandchild1, grandchild2].sort_by(&:title)
)
expect(entries).to match(
[
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'Home', entries: [homechild]
+ )
+ ),
toplevel1,
a_kind_of(WikiDirectory).and(
having_attributes(
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index 21da06a222f..efade74688a 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -588,6 +588,20 @@ RSpec.describe WikiPage do
expect(page.content).to eq new_content
end
+ context 'when page combine with directory' do
+ it 'moving the file and directory' do
+ wiki.create_page('testpage/testtitle', 'content')
+ wiki.create_page('testpage', 'content')
+
+ page = wiki.find_page('testpage')
+ page.update(title: 'testfolder/testpage')
+
+ page = wiki.find_page('testfolder/testpage/testtitle')
+
+ expect(page.slug).to eq 'testfolder/testpage/testtitle'
+ end
+ end
+
describe 'in subdir' do
it 'moves the page to the root folder if the title is preceded by /' do
page = create_wiki_page(container, title: 'foo/Existing Page')
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 6aacaa3c119..13f17e11276 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -163,6 +163,30 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
end
end
+ describe 'transform_quick_action_params' do
+ let(:work_item) { build(:work_item, :task) }
+
+ subject(:transformed_params) do
+ work_item.transform_quick_action_params({
+ title: 'bar',
+ assignee_ids: ['foo']
+ })
+ end
+
+ it 'correctly separates widget params from regular params' do
+ expect(transformed_params).to eq({
+ common: {
+ title: 'bar'
+ },
+ widgets: {
+ assignees_widget: {
+ assignee_ids: ['foo']
+ }
+ }
+ })
+ end
+ end
+
describe 'callbacks' do
describe 'record_create_action' do
it 'records the creation action after saving' do
diff --git a/spec/models/work_items/widget_definition_spec.rb b/spec/models/work_items/widget_definition_spec.rb
index 08f8f4d9663..3a4670c996f 100644
--- a/spec/models/work_items/widget_definition_spec.rb
+++ b/spec/models/work_items/widget_definition_spec.rb
@@ -11,7 +11,8 @@ RSpec.describe WorkItems::WidgetDefinition, feature_category: :team_planning do
::WorkItems::Widgets::Assignees,
::WorkItems::Widgets::StartAndDueDate,
::WorkItems::Widgets::Milestone,
- ::WorkItems::Widgets::Notes
+ ::WorkItems::Widgets::Notes,
+ ::WorkItems::Widgets::Notifications
]
if Gitlab.ee?
diff --git a/spec/models/work_items/widgets/notifications_spec.rb b/spec/models/work_items/widgets/notifications_spec.rb
new file mode 100644
index 00000000000..2942c149660
--- /dev/null
+++ b/spec/models/work_items/widgets/notifications_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Notifications, feature_category: :team_planning do
+ let_it_be(:work_item) { create(:work_item) }
+
+ describe '.type' do
+ it { expect(described_class.type).to eq(:notifications) }
+ end
+
+ describe '#type' do
+ it { expect(described_class.new(work_item).type).to eq(:notifications) }
+ end
+
+ describe '#subscribed?' do
+ it { expect(described_class.new(work_item).subscribed?(work_item.author, work_item.project)).to eq(true) }
+ end
+end