summaryrefslogtreecommitdiff
path: root/spec/models
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-06-20 11:10:13 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-06-20 11:10:13 +0000
commit0ea3fcec397b69815975647f5e2aa5fe944a8486 (patch)
tree7979381b89d26011bcf9bdc989a40fcc2f1ed4ff /spec/models
parent72123183a20411a36d607d70b12d57c484394c8e (diff)
downloadgitlab-ce-0ea3fcec397b69815975647f5e2aa5fe944a8486.tar.gz
Add latest changes from gitlab-org/gitlab@15-1-stable-eev15.1.0-rc42
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/application_setting_spec.rb43
-rw-r--r--spec/models/bulk_imports/entity_spec.rb26
-rw-r--r--spec/models/bulk_imports/export_status_spec.rb99
-rw-r--r--spec/models/bulk_imports/file_transfer/project_config_spec.rb2
-rw-r--r--spec/models/bulk_imports/tracker_spec.rb2
-rw-r--r--spec/models/ci/bridge_spec.rb33
-rw-r--r--spec/models/ci/build_spec.rb266
-rw-r--r--spec/models/ci/job_artifact_spec.rb40
-rw-r--r--spec/models/ci/namespace_mirror_spec.rb5
-rw-r--r--spec/models/ci/pipeline_spec.rb178
-rw-r--r--spec/models/ci/runner_spec.rb62
-rw-r--r--spec/models/ci/secure_file_spec.rb15
-rw-r--r--spec/models/ci/sources/pipeline_spec.rb2
-rw-r--r--spec/models/clusters/agent_spec.rb53
-rw-r--r--spec/models/clusters/cluster_enabled_grant_spec.rb7
-rw-r--r--spec/models/clusters/cluster_spec.rb4
-rw-r--r--spec/models/clusters/integrations/prometheus_spec.rb38
-rw-r--r--spec/models/commit_signatures/gpg_signature_spec.rb87
-rw-r--r--spec/models/commit_signatures/ssh_signature_spec.rb38
-rw-r--r--spec/models/commit_signatures/x509_commit_signature_spec.rb28
-rw-r--r--spec/models/commit_spec.rb10
-rw-r--r--spec/models/compare_spec.rb15
-rw-r--r--spec/models/concerns/as_cte_spec.rb40
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb10
-rw-r--r--spec/models/concerns/ci/artifactable_spec.rb4
-rw-r--r--spec/models/concerns/integrations/has_data_fields_spec.rb88
-rw-r--r--spec/models/concerns/issuable_spec.rb8
-rw-r--r--spec/models/concerns/limitable_spec.rb4
-rw-r--r--spec/models/concerns/pg_full_text_searchable_spec.rb11
-rw-r--r--spec/models/concerns/project_features_compatibility_spec.rb2
-rw-r--r--spec/models/concerns/sensitive_serializable_hash_spec.rb26
-rw-r--r--spec/models/container_registry/event_spec.rb14
-rw-r--r--spec/models/container_repository_spec.rb14
-rw-r--r--spec/models/customer_relations/contact_spec.rb95
-rw-r--r--spec/models/customer_relations/organization_spec.rb79
-rw-r--r--spec/models/data_list_spec.rb2
-rw-r--r--spec/models/deployment_cluster_spec.rb7
-rw-r--r--spec/models/deployment_spec.rb283
-rw-r--r--spec/models/environment_spec.rb52
-rw-r--r--spec/models/error_tracking/error_event_spec.rb44
-rw-r--r--spec/models/factories_spec.rb134
-rw-r--r--spec/models/group_spec.rb106
-rw-r--r--spec/models/hooks/project_hook_spec.rb9
-rw-r--r--spec/models/hooks/service_hook_spec.rb8
-rw-r--r--spec/models/hooks/system_hook_spec.rb8
-rw-r--r--spec/models/hooks/web_hook_log_spec.rb56
-rw-r--r--spec/models/hooks/web_hook_spec.rb96
-rw-r--r--spec/models/integration_spec.rb99
-rw-r--r--spec/models/integrations/campfire_spec.rb4
-rw-r--r--spec/models/integrations/field_spec.rb21
-rw-r--r--spec/models/integrations/harbor_spec.rb10
-rw-r--r--spec/models/integrations/irker_spec.rb36
-rw-r--r--spec/models/integrations/issue_tracker_data_spec.rb8
-rw-r--r--spec/models/integrations/jira_spec.rb55
-rw-r--r--spec/models/integrations/jira_tracker_data_spec.rb8
-rw-r--r--spec/models/integrations/prometheus_spec.rb11
-rw-r--r--spec/models/integrations/zentao_tracker_data_spec.rb6
-rw-r--r--spec/models/issue_spec.rb68
-rw-r--r--spec/models/key_spec.rb151
-rw-r--r--spec/models/members/group_member_spec.rb141
-rw-r--r--spec/models/members/last_group_owner_assigner_spec.rb13
-rw-r--r--spec/models/members/project_member_spec.rb24
-rw-r--r--spec/models/merge_request/cleanup_schedule_spec.rb35
-rw-r--r--spec/models/merge_request_diff_file_spec.rb8
-rw-r--r--spec/models/merge_request_spec.rb216
-rw-r--r--spec/models/milestone_spec.rb6
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb13
-rw-r--r--spec/models/namespace_setting_spec.rb1
-rw-r--r--spec/models/namespace_spec.rb258
-rw-r--r--spec/models/packages/cleanup/policy_spec.rb2
-rw-r--r--spec/models/packages/package_spec.rb20
-rw-r--r--spec/models/plan_limits_spec.rb3
-rw-r--r--spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb13
-rw-r--r--spec/models/project_feature_spec.rb50
-rw-r--r--spec/models/project_group_link_spec.rb6
-rw-r--r--spec/models/project_spec.rb231
-rw-r--r--spec/models/project_statistics_spec.rb26
-rw-r--r--spec/models/projects/build_artifacts_size_refresh_spec.rb59
-rw-r--r--spec/models/protected_tag_spec.rb52
-rw-r--r--spec/models/release_spec.rb26
-rw-r--r--spec/models/remote_mirror_spec.rb2
-rw-r--r--spec/models/repository_spec.rb47
-rw-r--r--spec/models/route_spec.rb51
-rw-r--r--spec/models/terraform/state_spec.rb18
-rw-r--r--spec/models/terraform/state_version_spec.rb1
-rw-r--r--spec/models/time_tracking/timelog_category_spec.rb59
-rw-r--r--spec/models/user_spec.rb33
-rw-r--r--spec/models/users/callout_spec.rb12
-rw-r--r--spec/models/work_item_spec.rb31
-rw-r--r--spec/models/work_items/parent_link_spec.rb66
-rw-r--r--spec/models/work_items/type_spec.rb11
-rw-r--r--spec/models/work_items/widgets/base_spec.rb19
-rw-r--r--spec/models/work_items/widgets/description_spec.rb25
-rw-r--r--spec/models/work_items/widgets/hierarchy_spec.rb52
94 files changed, 3306 insertions, 1054 deletions
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 20cd96e831c..61f008416ea 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -85,12 +85,15 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_numericality_of(:container_registry_import_max_step_duration).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_pre_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.to validate_numericality_of(:container_registry_pre_import_tags_rate).is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_tags_count) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_retries) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_start_max_retries) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_step_duration) }
it { is_expected.not_to allow_value(nil).for(:container_registry_pre_import_timeout) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_timeout) }
+ it { is_expected.not_to allow_value(nil).for(:container_registry_pre_import_tags_rate) }
+ it { is_expected.to allow_value(1.5).for(:container_registry_pre_import_tags_rate) }
it { is_expected.to validate_presence_of(:container_registry_import_target_plan) }
it { is_expected.to validate_presence_of(:container_registry_import_created_before) }
@@ -551,11 +554,45 @@ RSpec.describe ApplicationSetting do
it { is_expected.to allow_value(*KeyRestrictionValidator.supported_key_restrictions(type)).for(field) }
it { is_expected.not_to allow_value(128).for(field) }
end
+ end
+ end
- it_behaves_like 'key validations'
+ describe '#ensure_key_restrictions!' do
+ context 'with non-compliant FIPS settings' do
+ before do
+ setting.update_columns(
+ rsa_key_restriction: 1024,
+ dsa_key_restriction: 0,
+ ecdsa_key_restriction: 521,
+ ed25519_key_restriction: -1,
+ ecdsa_sk_key_restriction: 0,
+ ed25519_sk_key_restriction: 0
+ )
+ end
- context 'FIPS mode', :fips_mode do
- it_behaves_like 'key validations'
+ context 'in non-FIPS mode', fips_mode: false do
+ it 'keeps existing key restrictions' do
+ expect { setting.ensure_key_restrictions! }.not_to change { setting.valid? }
+ expect(setting).to be_valid
+ expect(setting.rsa_key_restriction).to eq(1024)
+ expect(setting.dsa_key_restriction).to eq(0)
+ expect(setting.ecdsa_key_restriction).to eq(521)
+ expect(setting.ed25519_key_restriction).to eq(-1)
+ expect(setting.ecdsa_sk_key_restriction).to eq(0)
+ expect(setting.ed25519_sk_key_restriction).to eq(0)
+ end
+ end
+
+ context 'in FIPS mode', :fips_mode do
+ it 'updates key restrictions to meet FIPS compliance' do
+ expect { setting.ensure_key_restrictions! }.to change { setting.valid? }.from(false).to(true)
+ expect(setting.rsa_key_restriction).to eq(3072)
+ expect(setting.dsa_key_restriction).to eq(-1)
+ expect(setting.ecdsa_key_restriction).to eq(521)
+ expect(setting.ed25519_key_restriction).to eq(-1)
+ expect(setting.ecdsa_sk_key_restriction).to eq(256)
+ expect(setting.ed25519_sk_key_restriction).to eq(256)
+ end
end
end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 6f6a7c9bcd8..874009d552a 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe BulkImports::Entity, type: :model do
it 'returns group pipelines' do
entity = build(:bulk_import_entity, :group_entity)
- expect(entity.pipelines.flatten).to include(BulkImports::Groups::Pipelines::GroupPipeline)
+ expect(entity.pipelines.collect { _1[:pipeline] }).to include(BulkImports::Groups::Pipelines::GroupPipeline)
end
end
@@ -168,29 +168,7 @@ RSpec.describe BulkImports::Entity, type: :model do
it 'returns project pipelines' do
entity = build(:bulk_import_entity, :project_entity)
- expect(entity.pipelines.flatten).to include(BulkImports::Projects::Pipelines::ProjectPipeline)
- end
- end
- end
-
- describe '#create_pipeline_trackers!' do
- context 'when entity is group' do
- it 'creates trackers for group entity' do
- entity = create(:bulk_import_entity, :group_entity)
- entity.create_pipeline_trackers!
-
- expect(entity.trackers.count).to eq(BulkImports::Groups::Stage.new(entity).pipelines.count)
- expect(entity.trackers.map(&:pipeline_name)).to include(BulkImports::Groups::Pipelines::GroupPipeline.to_s)
- end
- end
-
- context 'when entity is project' do
- it 'creates trackers for project entity' do
- entity = create(:bulk_import_entity, :project_entity)
- entity.create_pipeline_trackers!
-
- expect(entity.trackers.count).to eq(BulkImports::Projects::Stage.new(entity).pipelines.count)
- expect(entity.trackers.map(&:pipeline_name)).to include(BulkImports::Projects::Pipelines::ProjectPipeline.to_s)
+ expect(entity.pipelines.collect { _1[:pipeline] }).to include(BulkImports::Projects::Pipelines::ProjectPipeline)
end
end
end
diff --git a/spec/models/bulk_imports/export_status_spec.rb b/spec/models/bulk_imports/export_status_spec.rb
index 79ed6b39358..6ade82409dc 100644
--- a/spec/models/bulk_imports/export_status_spec.rb
+++ b/spec/models/bulk_imports/export_status_spec.rb
@@ -10,11 +10,9 @@ RSpec.describe BulkImports::ExportStatus do
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:response_double) do
- double(parsed_response: [{ 'relation' => 'labels', 'status' => status, 'error' => 'error!' }])
- end
-
- let(:invalid_response_double) do
- double(parsed_response: [{ 'relation' => 'not_a_real_relation', 'status' => status, 'error' => 'error!' }])
+ instance_double(HTTParty::Response,
+ parsed_response: [{ 'relation' => 'labels', 'status' => status, 'error' => 'error!' }]
+ )
end
subject { described_class.new(tracker, relation) }
@@ -40,22 +38,34 @@ RSpec.describe BulkImports::ExportStatus do
it 'returns false' do
expect(subject.started?).to eq(false)
end
+ end
- context 'when returned relation is invalid' do
- before do
- allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
- allow(client).to receive(:get).and_return(invalid_response_double)
- end
- end
+ context 'when export status is not present' do
+ let(:response_double) do
+ instance_double(HTTParty::Response, parsed_response: [])
+ end
- it 'returns false' do
- expect(subject.started?).to eq(false)
+ it 'returns false' do
+ expect(subject.started?).to eq(false)
+ end
+ end
+
+ context 'when something goes wrong during export status fetch' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:get).and_raise(
+ BulkImports::NetworkError.new("Unsuccessful response", response: nil)
+ )
end
end
+
+ it 'returns false' do
+ expect(subject.started?).to eq(false)
+ end
end
end
- describe '#failed' do
+ describe '#failed?' do
context 'when export status is failed' do
let(:status) { BulkImports::Export::FAILED }
@@ -74,12 +84,67 @@ RSpec.describe BulkImports::ExportStatus do
context 'when export status is not present' do
let(:response_double) do
- double(parsed_response: [])
+ instance_double(HTTParty::Response, parsed_response: [])
+ end
+
+ it 'returns false' do
+ expect(subject.started?).to eq(false)
+ end
+ end
+
+ context 'when something goes wrong during export status fetch' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:get).and_raise(
+ BulkImports::NetworkError.new("Unsuccessful response", response: nil)
+ )
+ end
+ end
+
+ it 'returns false' do
+ expect(subject.started?).to eq(false)
+ end
+ end
+ end
+
+ describe '#empty?' do
+ context 'when export status is present' do
+ let(:status) { 'any status' }
+
+ it { expect(subject.empty?).to eq(false) }
+ end
+
+ context 'when export status is not present' do
+ let(:response_double) do
+ instance_double(HTTParty::Response, parsed_response: [])
end
it 'returns true' do
- expect(subject.failed?).to eq(true)
- expect(subject.error).to eq('Empty relation export status')
+ expect(subject.empty?).to eq(true)
+ end
+ end
+
+ context 'when export status is empty' do
+ let(:response_double) do
+ instance_double(HTTParty::Response, parsed_response: nil)
+ end
+
+ it 'returns true' do
+ expect(subject.empty?).to eq(true)
+ end
+ end
+
+ context 'when something goes wrong during export status fetch' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:get).and_raise(
+ BulkImports::NetworkError.new("Unsuccessful response", response: nil)
+ )
+ end
+ end
+
+ it 'returns false' do
+ expect(subject.started?).to eq(false)
end
end
end
diff --git a/spec/models/bulk_imports/file_transfer/project_config_spec.rb b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
index 61caff647d6..0f02c5c546f 100644
--- a/spec/models/bulk_imports/file_transfer/project_config_spec.rb
+++ b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe BulkImports::FileTransfer::ProjectConfig do
describe '#file_relations' do
it 'returns project file relations' do
- expect(subject.file_relations).to contain_exactly('uploads', 'lfs_objects')
+ expect(subject.file_relations).to contain_exactly('uploads', 'lfs_objects', 'repository', 'design')
end
end
end
diff --git a/spec/models/bulk_imports/tracker_spec.rb b/spec/models/bulk_imports/tracker_spec.rb
index 0b6f692a477..1aa76d4dadd 100644
--- a/spec/models/bulk_imports/tracker_spec.rb
+++ b/spec/models/bulk_imports/tracker_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe BulkImports::Tracker, type: :model do
describe '#pipeline_class' do
it 'returns the pipeline class' do
entity = create(:bulk_import_entity)
- pipeline_class = BulkImports::Groups::Stage.new(entity).pipelines.first[1]
+ pipeline_class = BulkImports::Groups::Stage.new(entity).pipelines.first[:pipeline]
tracker = create(:bulk_import_tracker, pipeline_name: pipeline_class)
expect(tracker.pipeline_class).to eq(pipeline_class)
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 6409ea9fc3d..cb29cce554f 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -259,25 +259,16 @@ RSpec.describe Ci::Bridge do
context 'forward variables' do
using RSpec::Parameterized::TableSyntax
- where(:yaml_variables, :pipeline_variables, :ff, :variables) do
- nil | nil | true | %w[BRIDGE]
- nil | false | true | %w[BRIDGE]
- nil | true | true | %w[BRIDGE PVAR1]
- false | nil | true | %w[]
- false | false | true | %w[]
- false | true | true | %w[PVAR1]
- true | nil | true | %w[BRIDGE]
- true | false | true | %w[BRIDGE]
- true | true | true | %w[BRIDGE PVAR1]
- nil | nil | false | %w[BRIDGE]
- nil | false | false | %w[BRIDGE]
- nil | true | false | %w[BRIDGE]
- false | nil | false | %w[BRIDGE]
- false | false | false | %w[BRIDGE]
- false | true | false | %w[BRIDGE]
- true | nil | false | %w[BRIDGE]
- true | false | false | %w[BRIDGE]
- true | true | false | %w[BRIDGE]
+ where(:yaml_variables, :pipeline_variables, :variables) do
+ nil | nil | %w[BRIDGE]
+ nil | false | %w[BRIDGE]
+ nil | true | %w[BRIDGE PVAR1]
+ false | nil | %w[]
+ false | false | %w[]
+ false | true | %w[PVAR1]
+ true | nil | %w[BRIDGE]
+ true | false | %w[BRIDGE]
+ true | true | %w[BRIDGE PVAR1]
end
with_them do
@@ -292,10 +283,6 @@ RSpec.describe Ci::Bridge do
}
end
- before do
- stub_feature_flags(ci_trigger_forward_variables: ff)
- end
-
it 'returns variables according to the forward value' do
expect(bridge.downstream_variables.map { |v| v[:key] }).to contain_exactly(*variables)
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index dcf6915a01e..6ad6bb16eb5 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -294,31 +294,28 @@ RSpec.describe Ci::Build do
end
end
- describe '.with_reports' do
- subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
+ describe '.with_artifacts' do
+ subject(:builds) { described_class.with_artifacts(artifact_scope) }
- context 'when build has a test report' do
- let!(:build) { create(:ci_build, :success, :test_reports) }
+ let(:artifact_scope) { Ci::JobArtifact.where(file_type: 'archive') }
- it 'selects the build' do
- is_expected.to eq([build])
- end
- end
+ let!(:build_1) { create(:ci_build, :artifacts) }
+ let!(:build_2) { create(:ci_build, :codequality_reports) }
+ let!(:build_3) { create(:ci_build, :test_reports) }
+ let!(:build_4) { create(:ci_build, :artifacts) }
- context 'when build does not have test reports' do
- let!(:build) { create(:ci_build, :success, :trace_artifact) }
-
- it 'does not select the build' do
- is_expected.to be_empty
- end
+ it 'returns artifacts matching the given scope' do
+ expect(builds).to contain_exactly(build_1, build_4)
end
- context 'when there are multiple builds with test reports' do
- let!(:builds) { create_list(:ci_build, 5, :success, :test_reports) }
+ context 'when there are multiple builds containing artifacts' do
+ before do
+ create_list(:ci_build, 5, :success, :test_reports)
+ end
it 'does not execute a query for selecting job artifact one by one' do
recorded = ActiveRecord::QueryRecorder.new do
- subject.each do |build|
+ builds.each do |build|
build.job_artifacts.map { |a| a.file.exists? }
end
end
@@ -1367,7 +1364,7 @@ RSpec.describe Ci::Build do
before do
allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- allow(Deployments::HooksWorker).to receive(:perform_async)
+ allow(deployment).to receive(:execute_hooks)
end
it 'has deployments record with created status' do
@@ -1423,7 +1420,7 @@ RSpec.describe Ci::Build do
before do
allow(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
- allow(Deployments::HooksWorker).to receive(:perform_async)
+ allow(deployment).to receive(:execute_hooks)
end
it_behaves_like 'avoid deadlock'
@@ -1509,14 +1506,28 @@ RSpec.describe Ci::Build do
it 'transitions to running and calls webhook' do
freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+ expect(deployment).to receive(:execute_hooks).with(Time.current)
subject
end
expect(deployment).to be_running
end
+
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ subject
+ end
+ end
+ end
end
end
end
@@ -1830,6 +1841,27 @@ RSpec.describe Ci::Build do
end
context 'build is erasable' do
+ context 'when project is undergoing stats refresh' do
+ let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts) }
+
+ describe '#erase' do
+ before do
+ allow(build.project).to receive(:refreshing_build_artifacts_size?).and_return(true)
+ end
+
+ it 'logs and continues with deleting the artifacts' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with(
+ method: 'Ci::Build#erase',
+ project_id: build.project.id
+ )
+
+ build.erase
+
+ expect(build.job_artifacts.count).to eq(0)
+ end
+ end
+ end
+
context 'new artifacts' do
let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts) }
@@ -1924,6 +1956,23 @@ RSpec.describe Ci::Build do
expect(build.send("job_artifacts_#{file_type}")).not_to be_nil
end
end
+
+ context 'when the project is undergoing stats refresh' do
+ before do
+ allow(build.project).to receive(:refreshing_build_artifacts_size?).and_return(true)
+ end
+
+ it 'logs and continues with deleting the artifacts' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with(
+ method: 'Ci::Build#erase_erasable_artifacts!',
+ project_id: build.project.id
+ )
+
+ subject
+
+ expect(build.job_artifacts.erasable).to be_empty
+ end
+ end
end
describe '#first_pending' do
@@ -2757,6 +2806,7 @@ RSpec.describe Ci::Build do
{ key: 'CI_PROJECT_ID', value: project.id.to_s, public: true, masked: false },
{ key: 'CI_PROJECT_NAME', value: project.path, public: true, masked: false },
{ key: 'CI_PROJECT_TITLE', value: project.title, public: true, masked: false },
+ { key: 'CI_PROJECT_DESCRIPTION', value: project.description, public: true, masked: false },
{ key: 'CI_PROJECT_PATH', value: project.full_path, public: true, masked: false },
{ key: 'CI_PROJECT_PATH_SLUG', value: project.full_path_slug, public: true, masked: false },
{ key: 'CI_PROJECT_NAMESPACE', value: project.namespace.full_path, public: true, masked: false },
@@ -3486,7 +3536,7 @@ RSpec.describe Ci::Build do
]
end
- context 'when gitlab-deploy-token exists' do
+ context 'when gitlab-deploy-token exists for project' do
before do
project.deploy_tokens << deploy_token
end
@@ -3496,11 +3546,32 @@ RSpec.describe Ci::Build do
end
end
- context 'when gitlab-deploy-token does not exist' do
+ context 'when gitlab-deploy-token does not exist for project' do
it 'does not include deploy token variables' do
expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER'}).to be_nil
expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD'}).to be_nil
end
+
+ context 'when gitlab-deploy-token exists for group' do
+ before do
+ group.deploy_tokens << deploy_token
+ end
+
+ it 'includes deploy token variables' do
+ is_expected.to include(*deploy_token_variables)
+ end
+
+ context 'when the FF ci_variable_for_group_gitlab_deploy_token is disabled' do
+ before do
+ stub_feature_flags(ci_variable_for_group_gitlab_deploy_token: false)
+ end
+
+ it 'does not include deploy token variables' do
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD'}).to be_nil
+ end
+ end
+ end
end
end
@@ -4298,6 +4369,56 @@ RSpec.describe Ci::Build do
end
end
end
+
+ context 'when build is part of parallel build' do
+ let(:build_1) { create(:ci_build, name: 'build 1/2') }
+ let(:test_report) { Gitlab::Ci::Reports::TestReports.new }
+
+ before do
+ build_1.collect_test_reports!(test_report)
+ end
+
+ it 'uses the group name for test suite name' do
+ expect(test_report.test_suites.keys).to contain_exactly('build')
+ end
+
+ context 'when there are more than one parallel builds' do
+ let(:build_2) { create(:ci_build, name: 'build 2/2') }
+
+ before do
+ build_2.collect_test_reports!(test_report)
+ end
+
+ it 'merges the test suite from parallel builds' do
+ expect(test_report.test_suites.keys).to contain_exactly('build')
+ end
+ end
+ end
+
+ context 'when build is part of matrix build' do
+ let(:test_report) { Gitlab::Ci::Reports::TestReports.new }
+ let(:matrix_build_1) { create(:ci_build, :matrix) }
+
+ before do
+ matrix_build_1.collect_test_reports!(test_report)
+ end
+
+ it 'uses the job name for the test suite' do
+ expect(test_report.test_suites.keys).to contain_exactly(matrix_build_1.name)
+ end
+
+ context 'when there are more than one matrix builds' do
+ let(:matrix_build_2) { create(:ci_build, :matrix) }
+
+ before do
+ matrix_build_2.collect_test_reports!(test_report)
+ end
+
+ it 'keeps separate test suites' do
+ expect(test_report.test_suites.keys).to match_array([matrix_build_1.name, matrix_build_2.name])
+ end
+ end
+ end
end
describe '#collect_accessibility_reports!' do
@@ -4355,68 +4476,6 @@ RSpec.describe Ci::Build do
end
end
- describe '#collect_coverage_reports!' do
- subject { build.collect_coverage_reports!(coverage_report) }
-
- let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
-
- it { expect(coverage_report.files).to eq({}) }
-
- context 'when build has a coverage report' do
- context 'when there is a Cobertura coverage report from simplecov-cobertura' do
- before do
- create(:ci_job_artifact, :cobertura, job: build, project: build.project)
- end
-
- it 'parses blobs and add the results to the coverage report' do
- expect { subject }.not_to raise_error
-
- expect(coverage_report.files.keys).to match_array(['app/controllers/abuse_reports_controller.rb'])
- expect(coverage_report.files['app/controllers/abuse_reports_controller.rb'].count).to eq(23)
- end
- end
-
- context 'when there is a Cobertura coverage report from gocov-xml' do
- before do
- create(:ci_job_artifact, :coverage_gocov_xml, job: build, project: build.project)
- end
-
- it 'parses blobs and add the results to the coverage report' do
- expect { subject }.not_to raise_error
-
- expect(coverage_report.files.keys).to match_array(['auth/token.go', 'auth/rpccredentials.go'])
- expect(coverage_report.files['auth/token.go'].count).to eq(49)
- expect(coverage_report.files['auth/rpccredentials.go'].count).to eq(10)
- end
- end
-
- context 'when there is a Cobertura coverage report with class filename paths not relative to project root' do
- before do
- allow(build.project).to receive(:full_path).and_return('root/javademo')
- allow(build.pipeline).to receive(:all_worktree_paths).and_return(['src/main/java/com/example/javademo/User.java'])
-
- create(:ci_job_artifact, :coverage_with_paths_not_relative_to_project_root, job: build, project: build.project)
- end
-
- it 'parses blobs and add the results to the coverage report with corrected paths' do
- expect { subject }.not_to raise_error
-
- expect(coverage_report.files.keys).to match_array(['src/main/java/com/example/javademo/User.java'])
- end
- end
-
- context 'when there is a corrupted Cobertura coverage report' do
- before do
- create(:ci_job_artifact, :coverage_with_corrupted_data, job: build, project: build.project)
- end
-
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::InvalidLineInformationError)
- end
- end
- end
- end
-
describe '#collect_codequality_reports!' do
subject(:codequality_report) { build.collect_codequality_reports!(Gitlab::Ci::Reports::CodequalityReports.new) }
@@ -4506,6 +4565,18 @@ RSpec.describe Ci::Build do
end
end
+ describe '#each_report' do
+ let(:report_types) { Ci::JobArtifact::COVERAGE_REPORT_FILE_TYPES }
+
+ let!(:codequality) { create(:ci_job_artifact, :codequality, job: build) }
+ let!(:coverage) { create(:ci_job_artifact, :coverage_gocov_xml, job: build) }
+ let!(:junit) { create(:ci_job_artifact, :junit, job: build) }
+
+ it 'yields job artifact blob that matches the type' do
+ expect { |b| build.each_report(report_types, &b) }.to yield_with_args(coverage.file_type, String, coverage)
+ end
+ end
+
describe '#report_artifacts' do
subject { build.report_artifacts }
@@ -4947,6 +5018,18 @@ RSpec.describe Ci::Build do
build.execute_hooks
end
+
+ context 'with blocked users' do
+ before do
+ allow(build).to receive(:user) { FactoryBot.build(:user, :blocked) }
+ end
+
+ it 'does not call project.execute_hooks' do
+ expect(build.project).not_to receive(:execute_hooks)
+
+ build.execute_hooks
+ end
+ end
end
context 'without project hooks' do
@@ -5410,6 +5493,19 @@ RSpec.describe Ci::Build do
subject
end
+ context 'with deployment' do
+ let(:environment) { create(:environment) }
+ let(:build) { create(:ci_build, :with_deployment, environment: environment.name, pipeline: pipeline) }
+
+ it 'updates the deployment status', :aggregate_failures do
+ expect(build.deployment).to receive(:sync_status_with).with(build).and_call_original
+
+ subject
+
+ expect(build.deployment.reload.status).to eq("failed")
+ end
+ end
+
context 'with queued builds' do
let(:traits) { [:queued] }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 24265242172..b9cac6c3f99 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -33,10 +33,10 @@ RSpec.describe Ci::JobArtifact do
end
end
- describe '.with_reports' do
+ describe '.all_reports' do
let!(:artifact) { create(:ci_job_artifact, :archive) }
- subject { described_class.with_reports }
+ subject { described_class.all_reports }
it { is_expected.to be_empty }
@@ -302,6 +302,42 @@ RSpec.describe Ci::JobArtifact do
end
end
+ describe '.created_at_before' do
+ it 'returns artifacts' do
+ artifact1 = create(:ci_job_artifact, created_at: 1.day.ago)
+ _artifact2 = create(:ci_job_artifact, created_at: 1.day.from_now)
+
+ expect(described_class.created_at_before(Time.current)).to match_array([artifact1])
+ end
+ end
+
+ describe '.id_before' do
+ it 'returns artifacts' do
+ artifact1 = create(:ci_job_artifact)
+ artifact2 = create(:ci_job_artifact)
+
+ expect(described_class.id_before(artifact2.id)).to match_array([artifact1, artifact2])
+ end
+ end
+
+ describe '.id_after' do
+ it 'returns artifacts' do
+ artifact1 = create(:ci_job_artifact)
+ artifact2 = create(:ci_job_artifact)
+
+ expect(described_class.id_after(artifact1.id)).to match_array([artifact2])
+ end
+ end
+
+ describe '.ordered_by_id' do
+ it 'returns artifacts in asc order' do
+ artifact1 = create(:ci_job_artifact)
+ artifact2 = create(:ci_job_artifact)
+
+ expect(described_class.ordered_by_id).to eq([artifact1, artifact2])
+ end
+ end
+
describe 'callbacks' do
describe '#schedule_background_upload' do
subject { create(:ci_job_artifact, :archive) }
diff --git a/spec/models/ci/namespace_mirror_spec.rb b/spec/models/ci/namespace_mirror_spec.rb
index 9b4e86916b8..3e77c349ccb 100644
--- a/spec/models/ci/namespace_mirror_spec.rb
+++ b/spec/models/ci/namespace_mirror_spec.rb
@@ -151,10 +151,9 @@ RSpec.describe Ci::NamespaceMirror do
it_behaves_like 'changing the middle namespace'
- context 'when the FFs sync_traversal_ids, use_traversal_ids and use_traversal_ids_for_ancestors are disabled' do
+ context 'when the FFs use_traversal_ids and use_traversal_ids_for_ancestors are disabled' do
before do
- stub_feature_flags(sync_traversal_ids: false,
- use_traversal_ids: false,
+ stub_feature_flags(use_traversal_ids: false,
use_traversal_ids_for_ancestors: false)
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 8dc041814fa..31752f300f4 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -73,6 +73,17 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#latest_successful_builds' do
+ it 'has a one to many relationship with its latest successful builds' do
+ _old_build = create(:ci_build, :retried, pipeline: pipeline)
+ _expired_build = create(:ci_build, :expired, pipeline: pipeline)
+ _failed_builds = create_list(:ci_build, 2, :failed, pipeline: pipeline)
+ successful_builds = create_list(:ci_build, 2, :success, pipeline: pipeline)
+
+ expect(pipeline.latest_successful_builds).to contain_exactly(successful_builds.first, successful_builds.second)
+ end
+ end
+
describe '#downloadable_artifacts' do
let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
let_it_be(:downloadable_artifact) { create(:ci_job_artifact, :codequality, job: build) }
@@ -3045,7 +3056,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe 'hooks trigerring' do
- let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
+ let_it_be_with_reload(:pipeline) { create(:ci_empty_pipeline, :created) }
%i[
enqueue
@@ -3065,7 +3076,19 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it 'schedules a new PipelineHooksWorker job' do
expect(PipelineHooksWorker).to receive(:perform_async).with(pipeline.id)
- pipeline.reload.public_send(pipeline_action)
+ pipeline.public_send(pipeline_action)
+ end
+
+ context 'with blocked users' do
+ before do
+ allow(pipeline).to receive(:user) { build(:user, :blocked) }
+ end
+
+ it 'does not schedule a new PipelineHooksWorker job' do
+ expect(PipelineHooksWorker).not_to receive(:perform_async)
+
+ pipeline.public_send(pipeline_action)
+ end
end
end
end
@@ -3625,6 +3648,18 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
pipeline.succeed!
end
end
+
+ context 'when the user is blocked' do
+ before do
+ pipeline.user.block!
+ end
+
+ it 'does not enqueue PipelineNotificationWorker' do
+ expect(PipelineNotificationWorker).not_to receive(:perform_async)
+
+ pipeline.succeed
+ end
+ end
end
context 'with failed pipeline' do
@@ -3645,6 +3680,18 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
pipeline.drop
end
+
+ context 'when the user is blocked' do
+ before do
+ pipeline.user.block!
+ end
+
+ it 'does not enqueue PipelineNotificationWorker' do
+ expect(PipelineNotificationWorker).not_to receive(:perform_async)
+
+ pipeline.drop
+ end
+ end
end
context 'with skipped pipeline' do
@@ -3842,6 +3889,34 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#latest_report_builds_in_self_and_descendants' do
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let_it_be(:grandchild_pipeline) { create(:ci_pipeline, child_of: child_pipeline) }
+
+ it 'returns builds with reports artifacts from pipelines in the hierarcy' do
+ parent_build = create(:ci_build, :test_reports, pipeline: pipeline)
+ child_build = create(:ci_build, :coverage_reports, pipeline: child_pipeline)
+ grandchild_build = create(:ci_build, :codequality_reports, pipeline: grandchild_pipeline)
+
+ expect(pipeline.latest_report_builds_in_self_and_descendants).to contain_exactly(parent_build, child_build, grandchild_build)
+ end
+
+ it 'filters builds by scope' do
+ create(:ci_build, :test_reports, pipeline: pipeline)
+ grandchild_build = create(:ci_build, :codequality_reports, pipeline: grandchild_pipeline)
+
+ expect(pipeline.latest_report_builds_in_self_and_descendants(Ci::JobArtifact.codequality_reports)).to contain_exactly(grandchild_build)
+ end
+
+ it 'only returns builds that are not retried' do
+ create(:ci_build, :codequality_reports, :retried, pipeline: grandchild_pipeline)
+ grandchild_build = create(:ci_build, :codequality_reports, pipeline: grandchild_pipeline)
+
+ expect(pipeline.latest_report_builds_in_self_and_descendants).to contain_exactly(grandchild_build)
+ end
+ end
+
describe '#has_reports?' do
subject { pipeline.has_reports?(Ci::JobArtifact.test_reports) }
@@ -3900,38 +3975,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#can_generate_coverage_reports?' do
- subject { pipeline.can_generate_coverage_reports? }
-
- context 'when pipeline has builds with coverage reports' do
- before do
- create(:ci_build, :coverage_reports, pipeline: pipeline)
- end
-
- context 'when pipeline status is running' do
- let(:pipeline) { create(:ci_pipeline, :running) }
-
- it { expect(subject).to be_falsey }
- end
-
- context 'when pipeline status is success' do
- let(:pipeline) { create(:ci_pipeline, :success) }
-
- it { expect(subject).to be_truthy }
- end
- end
-
- context 'when pipeline does not have builds with coverage reports' do
- before do
- create(:ci_build, :artifacts, pipeline: pipeline)
- end
-
- let(:pipeline) { create(:ci_pipeline, :success) }
-
- it { expect(subject).to be_falsey }
- end
- end
-
describe '#has_codequality_mr_diff_report?' do
subject { pipeline.has_codequality_mr_diff_report? }
@@ -4082,55 +4125,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#coverage_reports' do
- subject { pipeline.coverage_reports }
-
- let_it_be(:pipeline) { create(:ci_pipeline) }
-
- context 'when pipeline has multiple builds with coverage reports' do
- let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline) }
- let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: pipeline) }
-
- before do
- create(:ci_job_artifact, :cobertura, job: build_rspec)
- create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang)
- end
-
- it 'returns coverage reports with collected data' do
- expect(subject.files.keys).to match_array([
- "auth/token.go",
- "auth/rpccredentials.go",
- "app/controllers/abuse_reports_controller.rb"
- ])
- end
-
- it 'does not execute N+1 queries' do
- single_build_pipeline = create(:ci_empty_pipeline, :created)
- single_rspec = create(:ci_build, :success, name: 'rspec', pipeline: single_build_pipeline)
- create(:ci_job_artifact, :cobertura, job: single_rspec, project: project)
-
- control = ActiveRecord::QueryRecorder.new { single_build_pipeline.coverage_reports }
-
- expect { subject }.not_to exceed_query_limit(control)
- end
-
- context 'when builds are retried' do
- let!(:build_rspec) { create(:ci_build, :retried, :success, name: 'rspec', pipeline: pipeline) }
- let!(:build_golang) { create(:ci_build, :retried, :success, name: 'golang', pipeline: pipeline) }
-
- it 'does not take retried builds into account' do
- expect(subject.files).to eql({})
- end
- end
- end
-
- context 'when pipeline does not have any builds with coverage reports' do
- it 'returns empty coverage reports' do
- expect(subject.files).to eql({})
- end
- end
- end
-
describe '#codequality_reports' do
subject(:codequality_reports) { pipeline.codequality_reports }
@@ -4839,9 +4833,9 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe '#has_expired_test_reports?' do
- subject { pipeline_with_test_report.has_expired_test_reports? }
+ subject { pipeline.has_expired_test_reports? }
- let(:pipeline_with_test_report) { create(:ci_pipeline, :with_test_reports) }
+ let(:pipeline) { create(:ci_pipeline, :success, :with_test_reports) }
context 'when artifacts are not expired' do
it { is_expected.to be_falsey }
@@ -4849,11 +4843,23 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
context 'when artifacts are expired' do
before do
- pipeline_with_test_report.job_artifacts.first.update!(expire_at: Date.yesterday)
+ pipeline.job_artifacts.first.update!(expire_at: Date.yesterday)
end
it { is_expected.to be_truthy }
end
+
+ context 'when the pipeline is still running' do
+ let(:pipeline) { create(:ci_pipeline, :running) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when the pipeline is completed without test reports' do
+ let(:pipeline) { create(:ci_pipeline, :success) }
+
+ it { is_expected.to be_falsey }
+ end
end
it_behaves_like 'it has loose foreign keys' do
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 8a1dcbfbdeb..74d8b012b29 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -1002,8 +1002,11 @@ RSpec.describe Ci::Runner do
describe '#heartbeat' do
let(:runner) { create(:ci_runner, :project) }
let(:executor) { 'shell' }
+ let(:version) { '15.0.1' }
- subject { runner.heartbeat(architecture: '18-bit', config: { gpus: "all" }, executor: executor) }
+ subject(:heartbeat) do
+ runner.heartbeat(architecture: '18-bit', config: { gpus: "all" }, executor: executor, version: version)
+ end
context 'when database was updated recently' do
before do
@@ -1013,7 +1016,7 @@ RSpec.describe Ci::Runner do
it 'updates cache' do
expect_redis_update
- subject
+ heartbeat
end
end
@@ -1047,7 +1050,7 @@ RSpec.describe Ci::Runner do
it 'updates with expected executor type' do
expect_redis_update
- subject
+ heartbeat
expect(runner.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
end
@@ -1059,6 +1062,18 @@ RSpec.describe Ci::Runner do
end
end
end
+
+ context 'with updated version' do
+ before do
+ runner.version = '1.2.3'
+ end
+
+ it 'updates version components with new version' do
+ heartbeat
+
+ expect(runner.reload.read_attribute(:semver)).to eq '15.0.1'
+ end
+ end
end
def expect_redis_update
@@ -1069,10 +1084,11 @@ RSpec.describe Ci::Runner do
end
def does_db_update
- expect { subject }.to change { runner.reload.read_attribute(:contacted_at) }
+ expect { heartbeat }.to change { runner.reload.read_attribute(:contacted_at) }
.and change { runner.reload.read_attribute(:architecture) }
.and change { runner.reload.read_attribute(:config) }
.and change { runner.reload.read_attribute(:executor_type) }
+ .and change { runner.reload.read_attribute(:semver) }
end
end
@@ -1683,4 +1699,42 @@ RSpec.describe Ci::Runner do
end
end
end
+
+ describe '.save' do
+ context 'with initial value' do
+ let(:runner) { create(:ci_runner, version: 'v1.2.3') }
+
+ it 'updates semver column' do
+ expect(runner.semver).to eq '1.2.3'
+ end
+ end
+
+ context 'with no initial version value' do
+ let(:runner) { build(:ci_runner) }
+
+ context 'with version change' do
+ subject(:update_version) { runner.update!(version: new_version) }
+
+ context 'to invalid version' do
+ let(:new_version) { 'invalid version' }
+
+ it 'updates semver column to nil' do
+ update_version
+
+ expect(runner.reload.semver).to be_nil
+ end
+ end
+
+ context 'to v14.10.1' do
+ let(:new_version) { 'v14.10.1' }
+
+ it 'updates semver column' do
+ update_version
+
+ expect(runner.reload.semver).to eq '14.10.1'
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/secure_file_spec.rb b/spec/models/ci/secure_file_spec.rb
index 40ddafad013..a3f1c7b7ef7 100644
--- a/spec/models/ci/secure_file_spec.rb
+++ b/spec/models/ci/secure_file_spec.rb
@@ -48,6 +48,21 @@ RSpec.describe Ci::SecureFile do
end
end
+ describe 'ordered scope' do
+ it 'returns the newest item first' do
+ project = create(:project)
+ file1 = create(:ci_secure_file, created_at: 1.week.ago, project: project)
+ file2 = create(:ci_secure_file, created_at: 2.days.ago, project: project)
+ file3 = create(:ci_secure_file, created_at: 1.day.ago, project: project)
+
+ files = project.secure_files.order_by_created_at
+
+ expect(files[0]).to eq(file3)
+ expect(files[1]).to eq(file2)
+ expect(files[2]).to eq(file1)
+ end
+ end
+
describe '#checksum' do
it 'computes SHA256 checksum on the file before encrypted' do
expect(subject.checksum).to eq(Digest::SHA256.hexdigest(sample_file))
diff --git a/spec/models/ci/sources/pipeline_spec.rb b/spec/models/ci/sources/pipeline_spec.rb
index 73f7cfa739f..732dd5c3df3 100644
--- a/spec/models/ci/sources/pipeline_spec.rb
+++ b/spec/models/ci/sources/pipeline_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Ci::Sources::Pipeline do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:pipeline) }
- it { is_expected.to belong_to(:source_project) }
+ it { is_expected.to belong_to(:source_project).class_name('::Project') }
it { is_expected.to belong_to(:source_job) }
it { is_expected.to belong_to(:source_bridge) }
it { is_expected.to belong_to(:source_pipeline) }
diff --git a/spec/models/clusters/agent_spec.rb b/spec/models/clusters/agent_spec.rb
index f10e0cc8fa7..de67bdb32aa 100644
--- a/spec/models/clusters/agent_spec.rb
+++ b/spec/models/clusters/agent_spec.rb
@@ -7,8 +7,7 @@ RSpec.describe Clusters::Agent do
it { is_expected.to belong_to(:created_by_user).class_name('User').optional }
it { is_expected.to belong_to(:project).class_name('::Project') }
- it { is_expected.to have_many(:agent_tokens).class_name('Clusters::AgentToken') }
- it { is_expected.to have_many(:last_used_agent_tokens).class_name('Clusters::AgentToken') }
+ it { is_expected.to have_many(:agent_tokens).class_name('Clusters::AgentToken').order(Clusters::AgentToken.arel_table[:last_used_at].desc.nulls_last) }
it { is_expected.to have_many(:group_authorizations).class_name('Clusters::Agents::GroupAuthorization') }
it { is_expected.to have_many(:authorized_groups).through(:group_authorizations) }
it { is_expected.to have_many(:project_authorizations).class_name('Clusters::Agents::ProjectAuthorization') }
@@ -41,6 +40,39 @@ RSpec.describe Clusters::Agent do
it { is_expected.to contain_exactly(matching_name) }
end
+
+ describe '.has_vulnerabilities' do
+ let_it_be(:without_vulnerabilities) { create(:cluster_agent, has_vulnerabilities: false) }
+ let_it_be(:with_vulnerabilities) { create(:cluster_agent, has_vulnerabilities: true) }
+
+ context 'when value is not provided' do
+ subject { described_class.has_vulnerabilities }
+
+ it 'returns agents which have vulnerabilities' do
+ is_expected.to contain_exactly(with_vulnerabilities)
+ end
+ end
+
+ context 'when value is provided' do
+ subject { described_class.has_vulnerabilities(value) }
+
+ context 'as true' do
+ let(:value) { true }
+
+ it 'returns agents which have vulnerabilities' do
+ is_expected.to contain_exactly(with_vulnerabilities)
+ end
+ end
+
+ context 'as false' do
+ let(:value) { false }
+
+ it 'returns agents which do not have vulnerabilities' do
+ is_expected.to contain_exactly(without_vulnerabilities)
+ end
+ end
+ end
+ end
end
describe 'validation' do
@@ -117,23 +149,6 @@ RSpec.describe Clusters::Agent do
end
end
- describe '#last_used_agent_tokens' do
- let_it_be(:agent) { create(:cluster_agent) }
-
- subject { agent.last_used_agent_tokens }
-
- context 'agent has no tokens' do
- it { is_expected.to be_empty }
- end
-
- context 'agent has active and inactive tokens' do
- let!(:active_token) { create(:cluster_agent_token, agent: agent, last_used_at: 1.minute.ago) }
- let!(:inactive_token) { create(:cluster_agent_token, agent: agent, last_used_at: 2.hours.ago) }
-
- it { is_expected.to contain_exactly(active_token, inactive_token) }
- end
- end
-
describe '#activity_event_deletion_cutoff' do
let_it_be(:agent) { create(:cluster_agent) }
let_it_be(:event1) { create(:agent_activity_event, agent: agent, recorded_at: 1.hour.ago) }
diff --git a/spec/models/clusters/cluster_enabled_grant_spec.rb b/spec/models/clusters/cluster_enabled_grant_spec.rb
new file mode 100644
index 00000000000..1418d854b41
--- /dev/null
+++ b/spec/models/clusters/cluster_enabled_grant_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::ClusterEnabledGrant do
+ it { is_expected.to belong_to :namespace }
+end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index d61bed80aaa..30591a3ff5d 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -50,6 +50,10 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to respond_to :project }
it { is_expected.to be_namespace_per_environment }
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :cluster }
+ end
+
describe 'applications have inverse_of: :cluster option' do
let(:cluster) { create(:cluster) }
let!(:helm) { create(:clusters_applications_helm, cluster: cluster) }
diff --git a/spec/models/clusters/integrations/prometheus_spec.rb b/spec/models/clusters/integrations/prometheus_spec.rb
index e529c751889..d1e40fffee0 100644
--- a/spec/models/clusters/integrations/prometheus_spec.rb
+++ b/spec/models/clusters/integrations/prometheus_spec.rb
@@ -21,11 +21,24 @@ RSpec.describe Clusters::Integrations::Prometheus do
let(:cluster) { create(:cluster, :with_installed_helm) }
it 'deactivates prometheus_integration' do
- expect(Clusters::Applications::DeactivateServiceWorker)
+ expect(Clusters::Applications::DeactivateIntegrationWorker)
.to receive(:perform_async).with(cluster.id, 'prometheus')
integration.destroy!
end
+
+ context 'when the FF :rename_integrations_workers is disabled' do
+ before do
+ stub_feature_flags(rename_integrations_workers: false)
+ end
+
+ it 'uses the old worker' do
+ expect(Clusters::Applications::DeactivateServiceWorker)
+ .to receive(:perform_async).with(cluster.id, 'prometheus')
+
+ integration.destroy!
+ end
+ end
end
describe 'after_save' do
@@ -38,10 +51,10 @@ RSpec.describe Clusters::Integrations::Prometheus do
it 'does not touch project integrations' do
integration # ensure integration exists before we set the expectations
- expect(Clusters::Applications::DeactivateServiceWorker)
+ expect(Clusters::Applications::DeactivateIntegrationWorker)
.not_to receive(:perform_async)
- expect(Clusters::Applications::ActivateServiceWorker)
+ expect(Clusters::Applications::ActivateIntegrationWorker)
.not_to receive(:perform_async)
integration.update!(enabled: enabled)
@@ -51,19 +64,32 @@ RSpec.describe Clusters::Integrations::Prometheus do
context 'when enabling' do
let(:enabled) { false }
- it 'deactivates prometheus_integration' do
- expect(Clusters::Applications::ActivateServiceWorker)
+ it 'activates prometheus_integration' do
+ expect(Clusters::Applications::ActivateIntegrationWorker)
.to receive(:perform_async).with(cluster.id, 'prometheus')
integration.update!(enabled: true)
end
+
+ context 'when the FF :rename_integrations_workers is disabled' do
+ before do
+ stub_feature_flags(rename_integrations_workers: false)
+ end
+
+ it 'uses the old worker' do
+ expect(Clusters::Applications::ActivateServiceWorker)
+ .to receive(:perform_async).with(cluster.id, 'prometheus')
+
+ integration.update!(enabled: true)
+ end
+ end
end
context 'when disabling' do
let(:enabled) { true }
it 'activates prometheus_integration' do
- expect(Clusters::Applications::DeactivateServiceWorker)
+ expect(Clusters::Applications::DeactivateIntegrationWorker)
.to receive(:perform_async).with(cluster.id, 'prometheus')
integration.update!(enabled: false)
diff --git a/spec/models/commit_signatures/gpg_signature_spec.rb b/spec/models/commit_signatures/gpg_signature_spec.rb
index 9646e974f40..6ae2a202b72 100644
--- a/spec/models/commit_signatures/gpg_signature_spec.rb
+++ b/spec/models/commit_signatures/gpg_signature_spec.rb
@@ -3,17 +3,26 @@
require 'spec_helper'
RSpec.describe CommitSignatures::GpgSignature do
+ # This commit is seeded from https://gitlab.com/gitlab-org/gitlab-test
+ # For instructions on how to add more seed data, see the project README
let(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
let!(:project) { create(:project, :repository, path: 'sample-project') }
let!(:commit) { create(:commit, project: project, sha: commit_sha) }
- let(:gpg_signature) { create(:gpg_signature, commit_sha: commit_sha) }
+ let(:signature) { create(:gpg_signature, commit_sha: commit_sha) }
let(:gpg_key) { create(:gpg_key) }
let(:gpg_key_subkey) { create(:gpg_key_subkey) }
+ let(:attributes) do
+ {
+ commit_sha: commit_sha,
+ project: project,
+ gpg_key_primary_keyid: gpg_key.keyid
+ }
+ end
it_behaves_like 'having unique enum values'
+ it_behaves_like 'commit signature'
describe 'associations' do
- it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:gpg_key) }
it { is_expected.to belong_to(:gpg_key_subkey) }
end
@@ -22,104 +31,56 @@ RSpec.describe CommitSignatures::GpgSignature do
subject { described_class.new }
it { is_expected.to validate_presence_of(:commit_sha) }
- it { is_expected.to validate_presence_of(:project_id) }
it { is_expected.to validate_presence_of(:gpg_key_primary_keyid) }
end
- describe '.safe_create!' do
- let(:attributes) do
- {
- commit_sha: commit_sha,
- project: project,
- gpg_key_primary_keyid: gpg_key.keyid
- }
- end
-
- it 'finds a signature by commit sha if it existed' do
- gpg_signature
-
- expect(described_class.safe_create!(commit_sha: commit_sha)).to eq(gpg_signature)
- end
-
- it 'creates a new signature if it was not found' do
- expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
- end
-
- it 'assigns the correct attributes when creating' do
- signature = described_class.safe_create!(attributes)
-
- expect(signature.project).to eq(project)
- expect(signature.commit_sha).to eq(commit_sha)
- expect(signature.gpg_key_primary_keyid).to eq(gpg_key.keyid)
- end
-
- it 'does not raise an error in case of a race condition' do
- expect(described_class).to receive(:find_by).and_return(nil, double(described_class, persisted?: true))
-
- expect(described_class).to receive(:create).and_raise(ActiveRecord::RecordNotUnique)
- allow(described_class).to receive(:create).and_call_original
-
- described_class.safe_create!(attributes)
- end
- end
-
describe '.by_commit_sha scope' do
let(:gpg_key) { create(:gpg_key, key: GpgHelpers::User2.public_key) }
let!(:another_gpg_signature) { create(:gpg_signature, gpg_key: gpg_key) }
it 'returns all gpg signatures by sha' do
- expect(described_class.by_commit_sha(commit_sha)).to eq([gpg_signature])
+ expect(described_class.by_commit_sha(commit_sha)).to match_array([signature])
expect(
described_class.by_commit_sha([commit_sha, another_gpg_signature.commit_sha])
- ).to contain_exactly(gpg_signature, another_gpg_signature)
- end
- end
-
- describe '#commit' do
- it 'fetches the commit through the project' do
- expect_next_instance_of(Project) do |instance|
- expect(instance).to receive(:commit).with(commit_sha).and_return(commit)
- end
-
- gpg_signature.commit
+ ).to contain_exactly(signature, another_gpg_signature)
end
end
describe '#gpg_key=' do
it 'supports the assignment of a GpgKey' do
- gpg_signature = create(:gpg_signature, gpg_key: gpg_key)
+ signature = create(:gpg_signature, gpg_key: gpg_key)
- expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKey)
+ expect(signature.gpg_key).to be_an_instance_of(GpgKey)
end
it 'supports the assignment of a GpgKeySubkey' do
- gpg_signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
+ signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
- expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKeySubkey)
+ expect(signature.gpg_key).to be_an_instance_of(GpgKeySubkey)
end
it 'clears gpg_key and gpg_key_subkey_id when passing nil' do
- gpg_signature.update_attribute(:gpg_key, nil)
+ signature.update_attribute(:gpg_key, nil)
- expect(gpg_signature.gpg_key_id).to be_nil
- expect(gpg_signature.gpg_key_subkey_id).to be_nil
+ expect(signature.gpg_key_id).to be_nil
+ expect(signature.gpg_key_subkey_id).to be_nil
end
end
describe '#gpg_commit' do
context 'when commit does not exist' do
it 'returns nil' do
- allow(gpg_signature).to receive(:commit).and_return(nil)
+ allow(signature).to receive(:commit).and_return(nil)
- expect(gpg_signature.gpg_commit).to be_nil
+ expect(signature.gpg_commit).to be_nil
end
end
context 'when commit exists' do
it 'returns an instance of Gitlab::Gpg::Commit' do
- allow(gpg_signature).to receive(:commit).and_return(commit)
+ allow(signature).to receive(:commit).and_return(commit)
- expect(gpg_signature.gpg_commit).to be_an_instance_of(Gitlab::Gpg::Commit)
+ expect(signature.gpg_commit).to be_an_instance_of(Gitlab::Gpg::Commit)
end
end
end
diff --git a/spec/models/commit_signatures/ssh_signature_spec.rb b/spec/models/commit_signatures/ssh_signature_spec.rb
new file mode 100644
index 00000000000..ac4496e9d8c
--- /dev/null
+++ b/spec/models/commit_signatures/ssh_signature_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CommitSignatures::SshSignature do
+ # This commit is seeded from https://gitlab.com/gitlab-org/gitlab-test
+ # For instructions on how to add more seed data, see the project README
+ let(:commit_sha) { '7b5160f9bb23a3d58a0accdbe89da13b96b1ece9' }
+ let!(:project) { create(:project, :repository, path: 'sample-project') }
+ let!(:commit) { create(:commit, project: project, sha: commit_sha) }
+ let(:signature) { create(:ssh_signature, commit_sha: commit_sha) }
+ let(:ssh_key) { create(:ed25519_key_256) }
+ let(:attributes) do
+ {
+ commit_sha: commit_sha,
+ project: project,
+ key: ssh_key
+ }
+ end
+
+ it_behaves_like 'having unique enum values'
+ it_behaves_like 'commit signature'
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:key).required }
+ end
+
+ describe '.by_commit_sha scope' do
+ let!(:another_signature) { create(:ssh_signature, commit_sha: '0000000000000000000000000000000000000001') }
+
+ it 'returns all signatures by sha' do
+ expect(described_class.by_commit_sha(commit_sha)).to match_array([signature])
+ expect(
+ described_class.by_commit_sha([commit_sha, another_signature.commit_sha])
+ ).to contain_exactly(signature, another_signature)
+ end
+ end
+end
diff --git a/spec/models/commit_signatures/x509_commit_signature_spec.rb b/spec/models/commit_signatures/x509_commit_signature_spec.rb
index 076f209e1b7..beb101cdd89 100644
--- a/spec/models/commit_signatures/x509_commit_signature_spec.rb
+++ b/spec/models/commit_signatures/x509_commit_signature_spec.rb
@@ -3,11 +3,13 @@
require 'spec_helper'
RSpec.describe CommitSignatures::X509CommitSignature do
+ # This commit is seeded from https://gitlab.com/gitlab-org/gitlab-test
+ # For instructions on how to add more seed data, see the project README
let(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' }
let(:project) { create(:project, :public, :repository) }
let!(:commit) { create(:commit, project: project, sha: commit_sha) }
let(:x509_certificate) { create(:x509_certificate) }
- let(:x509_signature) { create(:x509_commit_signature, commit_sha: commit_sha) }
+ let(:signature) { create(:x509_commit_signature, commit_sha: commit_sha) }
let(:attributes) do
{
@@ -19,38 +21,16 @@ RSpec.describe CommitSignatures::X509CommitSignature do
end
it_behaves_like 'having unique enum values'
+ it_behaves_like 'commit signature'
describe 'validation' do
- it { is_expected.to validate_presence_of(:commit_sha) }
- it { is_expected.to validate_presence_of(:project_id) }
it { is_expected.to validate_presence_of(:x509_certificate_id) }
end
describe 'associations' do
- it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:x509_certificate).required }
end
- describe '.safe_create!' do
- it 'finds a signature by commit sha if it existed' do
- x509_signature
-
- expect(described_class.safe_create!(commit_sha: commit_sha)).to eq(x509_signature)
- end
-
- it 'creates a new signature if it was not found' do
- expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
- end
-
- it 'assigns the correct attributes when creating' do
- signature = described_class.safe_create!(attributes)
-
- expect(signature.project).to eq(project)
- expect(signature.commit_sha).to eq(commit_sha)
- expect(signature.x509_certificate_id).to eq(x509_certificate.id)
- end
- end
-
describe '#user' do
context 'if email is assigned to a user' do
let!(:user) { create(:user, email: X509Helpers::User1.certificate_email) }
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 7c67b9a3d63..187be557064 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -746,7 +746,7 @@ eos
end
end
- describe '#work_in_progress?' do
+ describe '#draft?' do
[
'squash! ', 'fixup! ',
'draft: ', '[Draft] ', '(draft) ', 'Draft: '
@@ -754,21 +754,21 @@ eos
it "detects the '#{draft_prefix}' prefix" do
commit.message = "#{draft_prefix}#{commit.message}"
- expect(commit).to be_work_in_progress
+ expect(commit).to be_draft
end
end
- it "does not detect WIP for a commit just saying 'draft'" do
+ it "does not detect a commit just saying 'draft' as draft? == true" do
commit.message = "draft"
- expect(commit).not_to be_work_in_progress
+ expect(commit).not_to be_draft
end
["FIXUP!", "Draft - ", "Wipeout", "WIP: ", "[WIP] ", "wip: "].each do |draft_prefix|
it "doesn't detect '#{draft_prefix}' at the start of the title as a draft" do
commit.message = "#{draft_prefix} #{commit.message}"
- expect(commit).not_to be_work_in_progress
+ expect(commit).not_to be_draft
end
end
end
diff --git a/spec/models/compare_spec.rb b/spec/models/compare_spec.rb
index 86bab569ab0..0035fb8468a 100644
--- a/spec/models/compare_spec.rb
+++ b/spec/models/compare_spec.rb
@@ -35,6 +35,21 @@ RSpec.describe Compare do
end
end
+ describe '#commits' do
+ subject { compare.commits }
+
+ it 'returns a CommitCollection' do
+ is_expected.to be_kind_of(CommitCollection)
+ end
+
+ it 'returns a list of commits' do
+ commit_ids = subject.map(&:id)
+
+ expect(commit_ids).to include(head_commit.id)
+ expect(commit_ids.length).to eq(6)
+ end
+ end
+
describe '#commit' do
it 'returns raw compare head commit' do
expect(subject.commit.id).to eq(head_commit.id)
diff --git a/spec/models/concerns/as_cte_spec.rb b/spec/models/concerns/as_cte_spec.rb
new file mode 100644
index 00000000000..06d9650ec46
--- /dev/null
+++ b/spec/models/concerns/as_cte_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AsCte do
+ let(:klass) do
+ Class.new(ApplicationRecord) do
+ include AsCte
+
+ self.table_name = 'users'
+ end
+ end
+
+ let(:query) { klass.where(id: [1, 2, 3]) }
+ let(:name) { :klass_cte }
+
+ describe '.as_cte' do
+ subject { query.as_cte(name) }
+
+ it { expect(subject).to be_a(Gitlab::SQL::CTE) }
+ it { expect(subject.query).to eq(query) }
+ it { expect(subject.table.name).to eq(name.to_s) }
+
+ context 'with materialized parameter', if: Gitlab::Database::AsWithMaterialized.materialized_supported? do
+ subject { query.as_cte(name, materialized: materialized).to_arel.to_sql }
+
+ context 'as true' do
+ let(:materialized) { true }
+
+ it { expect(subject).to match /MATERIALIZE/ }
+ end
+
+ context 'as false' do
+ let(:materialized) { false }
+
+ it { expect(subject).not_to match /MATERIALIZE/ }
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index d46f22b2216..a00129b3fdf 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -404,6 +404,16 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
it 'returns false when there are no changes' do
expect(thing.attribute_invalidated?(:description_html)).to eq(false)
end
+
+ it 'returns false if skip_markdown_cache_validation is true' do
+ # invalidates the attribute
+ thing.cached_markdown_version += 1
+ thing.description = updated_markdown
+
+ thing.skip_markdown_cache_validation = true
+
+ expect(thing.attribute_invalidated?(:description_html)).to eq(false)
+ end
end
context 'when cache version is updated' do
diff --git a/spec/models/concerns/ci/artifactable_spec.rb b/spec/models/concerns/ci/artifactable_spec.rb
index 62fc689a9ca..b27a4d0dcc1 100644
--- a/spec/models/concerns/ci/artifactable_spec.rb
+++ b/spec/models/concerns/ci/artifactable_spec.rb
@@ -68,8 +68,8 @@ RSpec.describe Ci::Artifactable do
end
describe '.expired' do
- it 'returns a limited number of expired artifacts' do
- expect(Ci::JobArtifact.expired(1).order_id_asc).to eq([recently_expired_artifact])
+ it 'returns all expired artifacts' do
+ expect(Ci::JobArtifact.expired).to contain_exactly(recently_expired_artifact, later_expired_artifact)
end
end
diff --git a/spec/models/concerns/integrations/has_data_fields_spec.rb b/spec/models/concerns/integrations/has_data_fields_spec.rb
index b28fef571c6..374c5c33b50 100644
--- a/spec/models/concerns/integrations/has_data_fields_spec.rb
+++ b/spec/models/concerns/integrations/has_data_fields_spec.rb
@@ -6,84 +6,84 @@ RSpec.describe Integrations::HasDataFields do
let(:url) { 'http://url.com' }
let(:username) { 'username_one' }
let(:properties) do
- { url: url, username: username }
+ { url: url, username: username, jira_issue_transition_automatic: false }
end
shared_examples 'data fields' do
describe '#arg' do
- it 'returns an argument correctly' do
- expect(service.url).to eq(url)
+ it 'returns the expected values' do
+ expect(integration).to have_attributes(properties)
end
end
describe '{arg}_changed?' do
it 'returns false when the property has not been assigned a new value' do
- service.username = 'new_username'
- service.validate
- expect(service.url_changed?).to be_falsy
+ integration.username = 'new_username'
+ integration.validate
+ expect(integration.url_changed?).to be_falsy
end
it 'returns true when the property has been assigned a different value' do
- service.url = "http://example.com"
- service.validate
- expect(service.url_changed?).to be_truthy
+ integration.url = "http://example.com"
+ integration.validate
+ expect(integration.url_changed?).to be_truthy
end
it 'returns true when the property has been assigned a different value twice' do
- service.url = "http://example.com"
- service.url = "http://example.com"
- service.validate
- expect(service.url_changed?).to be_truthy
+ integration.url = "http://example.com"
+ integration.url = "http://example.com"
+ integration.validate
+ expect(integration.url_changed?).to be_truthy
end
it 'returns false when the property has been re-assigned the same value' do
- service.url = 'http://url.com'
- service.validate
- expect(service.url_changed?).to be_falsy
+ integration.url = 'http://url.com'
+ integration.validate
+ expect(integration.url_changed?).to be_falsy
end
end
describe '{arg}_touched?' do
it 'returns false when the property has not been assigned a new value' do
- service.username = 'new_username'
- service.validate
- expect(service.url_changed?).to be_falsy
+ integration.username = 'new_username'
+ integration.validate
+ expect(integration.url_changed?).to be_falsy
end
it 'returns true when the property has been assigned a different value' do
- service.url = "http://example.com"
- service.validate
- expect(service.url_changed?).to be_truthy
+ integration.url = "http://example.com"
+ integration.validate
+ expect(integration.url_changed?).to be_truthy
end
it 'returns true when the property has been assigned a different value twice' do
- service.url = "http://example.com"
- service.url = "http://example.com"
- service.validate
- expect(service.url_changed?).to be_truthy
+ integration.url = "http://example.com"
+ integration.url = "http://example.com"
+ integration.validate
+ expect(integration.url_changed?).to be_truthy
end
it 'returns true when the property has been re-assigned the same value' do
- service.url = 'http://url.com'
- expect(service.url_touched?).to be_truthy
+ integration.url = 'http://url.com'
+ expect(integration.url_touched?).to be_truthy
end
it 'returns false when the property has been re-assigned the same value' do
- service.url = 'http://url.com'
- service.validate
- expect(service.url_changed?).to be_falsy
+ integration.url = 'http://url.com'
+ integration.validate
+ expect(integration.url_changed?).to be_falsy
end
end
describe 'data_fields_present?' do
- it 'returns true from the issue tracker service' do
- expect(service.data_fields_present?).to be true
+ it 'returns true from the issue tracker integration' do
+ expect(integration.data_fields_present?).to be true
end
end
end
context 'when data are stored in data_fields' do
- let(:service) do
+ let(:integration) do
create(:jira_integration, url: url, username: username)
end
@@ -91,21 +91,21 @@ RSpec.describe Integrations::HasDataFields do
describe '{arg}_was?' do
it 'returns nil' do
- service.url = 'http://example.com'
- service.validate
- expect(service.url_was).to be_nil
+ integration.url = 'http://example.com'
+ integration.validate
+ expect(integration.url_was).to be_nil
end
end
end
- context 'when service and data_fields are not persisted' do
- let(:service) do
+ context 'when integration and data_fields are not persisted' do
+ let(:integration) do
Integrations::Jira.new
end
describe 'data_fields_present?' do
it 'returns true' do
- expect(service.data_fields_present?).to be true
+ expect(integration.data_fields_present?).to be true
end
end
end
@@ -113,9 +113,7 @@ RSpec.describe Integrations::HasDataFields do
context 'when data are stored in properties' do
let(:integration) { create(:jira_integration, :without_properties_callback, properties: properties) }
- it_behaves_like 'data fields' do
- let(:service) { integration }
- end
+ it_behaves_like 'data fields'
describe '{arg}_was?' do
it 'returns nil when the property has not been assigned a new value' do
@@ -148,9 +146,7 @@ RSpec.describe Integrations::HasDataFields do
end
end
- it_behaves_like 'data fields' do
- let(:service) { integration }
- end
+ it_behaves_like 'data fields'
describe '{arg}_was?' do
it 'returns nil' do
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index e8e9c263d23..87821de3cf5 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -982,14 +982,6 @@ RSpec.describe Issuable do
subject { issuable.supports_escalation? }
it { is_expected.to eq(supports_escalation) }
-
- context 'with feature disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
end
diff --git a/spec/models/concerns/limitable_spec.rb b/spec/models/concerns/limitable_spec.rb
index 850282d54c7..c0a6aea2075 100644
--- a/spec/models/concerns/limitable_spec.rb
+++ b/spec/models/concerns/limitable_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Limitable do
it 'triggers scoped validations' do
instance = MinimalTestClass.new
- expect(instance).to receive(:validate_scoped_plan_limit_not_exceeded)
+ expect(instance).to receive(:scoped_plan_limits)
instance.valid?(:create)
end
@@ -94,7 +94,7 @@ RSpec.describe Limitable do
it 'triggers scoped validations' do
instance = MinimalTestClass.new
- expect(instance).to receive(:validate_global_plan_limit_not_exceeded)
+ expect(instance).to receive(:global_plan_limits)
instance.valid?(:create)
end
diff --git a/spec/models/concerns/pg_full_text_searchable_spec.rb b/spec/models/concerns/pg_full_text_searchable_spec.rb
index b6da481024a..84209999ab2 100644
--- a/spec/models/concerns/pg_full_text_searchable_spec.rb
+++ b/spec/models/concerns/pg_full_text_searchable_spec.rb
@@ -99,6 +99,17 @@ RSpec.describe PgFullTextSearchable do
it 'does not support searching by non-Latin characters' do
expect(model_class.pg_full_text_search('日本')).to be_empty
end
+
+ context 'when search term has a URL' do
+ let(:with_url) { model_class.create!(project: project, title: 'issue with url', description: 'sample url,https://gitlab.com/gitlab-org/gitlab') }
+
+ it 'allows searching by full URL, ignoring the scheme' do
+ with_url.update_search_data!
+
+ expect(model_class.pg_full_text_search('https://gitlab.com/gitlab-org/gitlab')).to contain_exactly(with_url)
+ expect(model_class.pg_full_text_search('gopher://gitlab.com/gitlab-org/gitlab')).to contain_exactly(with_url)
+ end
+ end
end
describe '#update_search_data!' do
diff --git a/spec/models/concerns/project_features_compatibility_spec.rb b/spec/models/concerns/project_features_compatibility_spec.rb
index 62c9a041a85..f2dc8464e86 100644
--- a/spec/models/concerns/project_features_compatibility_spec.rb
+++ b/spec/models/concerns/project_features_compatibility_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe ProjectFeaturesCompatibility do
let(:project) { create(:project) }
let(:features_enabled) { %w(issues wiki builds merge_requests snippets security_and_compliance) }
- let(:features) { features_enabled + %w(repository pages operations container_registry) }
+ let(:features) { features_enabled + %w(repository pages operations container_registry package_registry) }
# We had issues_enabled, snippets_enabled, builds_enabled, merge_requests_enabled and issues_enabled fields on projects table
# All those fields got moved to a new table called project_feature and are now integers instead of booleans
diff --git a/spec/models/concerns/sensitive_serializable_hash_spec.rb b/spec/models/concerns/sensitive_serializable_hash_spec.rb
index c864ecb4eec..3c9199ce18f 100644
--- a/spec/models/concerns/sensitive_serializable_hash_spec.rb
+++ b/spec/models/concerns/sensitive_serializable_hash_spec.rb
@@ -4,11 +4,15 @@ require 'spec_helper'
RSpec.describe SensitiveSerializableHash do
describe '.prevent_from_serialization' do
- let(:test_class) do
+ let(:base_class) do
Class.new do
include ActiveModel::Serialization
include SensitiveSerializableHash
+ end
+ end
+ let(:test_class) do
+ Class.new(base_class) do
attr_accessor :name, :super_secret
prevent_from_serialization :super_secret
@@ -19,6 +23,12 @@ RSpec.describe SensitiveSerializableHash do
end
end
+ let(:another_class) do
+ Class.new(base_class) do
+ prevent_from_serialization :sub_secret
+ end
+ end
+
let(:model) { test_class.new }
it 'does not include the field in serializable_hash' do
@@ -30,6 +40,11 @@ RSpec.describe SensitiveSerializableHash do
expect(model.serializable_hash(unsafe_serialization_hash: true)).to include('super_secret')
end
end
+
+ it 'does not change parent class attributes_exempt_from_serializable_hash' do
+ expect(test_class.attributes_exempt_from_serializable_hash).to contain_exactly(:super_secret)
+ expect(another_class.attributes_exempt_from_serializable_hash).to contain_exactly(:sub_secret)
+ end
end
describe '#serializable_hash' do
@@ -56,6 +71,9 @@ RSpec.describe SensitiveSerializableHash do
attributes.each do |attribute|
expect(model.attributes).to include(attribute) # double-check the attribute does exist
+ # Do not expect binary columns to appear in JSON
+ next if klass.columns_hash[attribute]&.type == :binary
+
expect(model.serializable_hash(unsafe_serialization_hash: true)).to include(attribute)
expect(model.to_json(unsafe_serialization_hash: true)).to include(attribute)
expect(model.as_json(unsafe_serialization_hash: true)).to include(attribute)
@@ -65,8 +83,12 @@ RSpec.describe SensitiveSerializableHash do
end
end
- it_behaves_like 'attr_encrypted attribute', WebHook, 'token' do
+ context 'for a web hook' do
let_it_be(:model) { create(:system_hook) }
+
+ it_behaves_like 'attr_encrypted attribute', WebHook, 'token'
+ it_behaves_like 'attr_encrypted attribute', WebHook, 'url'
+ it_behaves_like 'attr_encrypted attribute', WebHook, 'url_variables'
end
it_behaves_like 'attr_encrypted attribute', Ci::InstanceVariable, 'value' do
diff --git a/spec/models/container_registry/event_spec.rb b/spec/models/container_registry/event_spec.rb
index 6b544c95cc8..e0194a07f46 100644
--- a/spec/models/container_registry/event_spec.rb
+++ b/spec/models/container_registry/event_spec.rb
@@ -46,6 +46,12 @@ RSpec.describe ContainerRegistry::Event do
handle!
end
+ it 'clears the cache for the namespace container repositories size' do
+ expect(Rails.cache).to receive(:delete).with(group.container_repositories_size_cache_key)
+
+ handle!
+ end
+
shared_examples 'event without project statistics update' do
it 'does not queue a project statistics update' do
expect(ProjectCacheWorker).not_to receive(:perform_async)
@@ -54,14 +60,6 @@ RSpec.describe ContainerRegistry::Event do
end
end
- context 'with :container_registry_project_statistics feature flag disabled' do
- before do
- stub_feature_flags(container_registry_project_statistics: false)
- end
-
- it_behaves_like 'event without project statistics update'
- end
-
context 'with no target tag' do
let(:target) { super().without('tag') }
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index af4e40cecb7..7d0dfad91b2 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -208,23 +208,9 @@ RSpec.describe ContainerRepository, :aggregate_failures do
shared_examples 'queueing the next import' do
it 'starts the worker' do
expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_async)
- expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_in)
subject
end
-
- context 'enqueue_twice feature flag disabled' do
- before do
- stub_feature_flags(container_registry_migration_phase2_enqueue_twice: false)
- end
-
- it 'starts the worker only once' do
- expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_async)
- expect(::ContainerRegistry::Migration::EnqueuerWorker).not_to receive(:perform_in)
-
- subject
- end
- end
end
describe '#start_pre_import' do
diff --git a/spec/models/customer_relations/contact_spec.rb b/spec/models/customer_relations/contact_spec.rb
index 86f868b269e..f91546f5240 100644
--- a/spec/models/customer_relations/contact_spec.rb
+++ b/spec/models/customer_relations/contact_spec.rb
@@ -142,4 +142,99 @@ RSpec.describe CustomerRelations::Contact, type: :model do
expect(issue_contact2.reload.contact_id).to eq(dupe_contact1.id)
end
end
+
+ describe '.search' do
+ let_it_be(:contact_a) do
+ create(
+ :contact,
+ group: group,
+ first_name: "ABC",
+ last_name: "DEF",
+ email: "ghi@test.com",
+ description: "LMNO",
+ state: "inactive"
+ )
+ end
+
+ let_it_be(:contact_b) do
+ create(
+ :contact,
+ group: group,
+ first_name: "PQR",
+ last_name: "STU",
+ email: "vwx@test.com",
+ description: "YZ",
+ state: "active"
+ )
+ end
+
+ subject(:found_contacts) { group.contacts.search(search_term) }
+
+ context 'when search term is empty' do
+ let(:search_term) { "" }
+
+ it 'returns all group contacts' do
+ expect(found_contacts).to contain_exactly(contact_a, contact_b)
+ end
+ end
+
+ context 'when search term is not empty' do
+ context 'when searching for first name ignoring casing' do
+ let(:search_term) { "aBc" }
+
+ it { is_expected.to contain_exactly(contact_a) }
+ end
+
+ context 'when searching for last name ignoring casing' do
+ let(:search_term) { "StU" }
+
+ it { is_expected.to contain_exactly(contact_b) }
+ end
+
+ context 'when searching for email' do
+ let(:search_term) { "ghi" }
+
+ it { is_expected.to contain_exactly(contact_a) }
+ end
+
+ context 'when searching description ignoring casing' do
+ let(:search_term) { "Yz" }
+
+ it { is_expected.to contain_exactly(contact_b) }
+ end
+
+ context 'when fuzzy searching for email and last name' do
+ let(:search_term) { "s" }
+
+ it { is_expected.to contain_exactly(contact_a, contact_b) }
+ end
+ end
+ end
+
+ describe '.search_by_state' do
+ let_it_be(:contact_a) { create(:contact, group: group, state: "inactive") }
+ let_it_be(:contact_b) { create(:contact, group: group, state: "active") }
+
+ context 'when searching for contacts state' do
+ it 'returns only inactive contacts' do
+ expect(group.contacts.search_by_state(:inactive)).to contain_exactly(contact_a)
+ end
+
+ it 'returns only active contacts' do
+ expect(group.contacts.search_by_state(:active)).to contain_exactly(contact_b)
+ end
+ end
+ end
+
+ describe '.sort_by_name' do
+ let_it_be(:contact_a) { create(:contact, group: group, first_name: "c", last_name: "d") }
+ let_it_be(:contact_b) { create(:contact, group: group, first_name: "a", last_name: "b") }
+ let_it_be(:contact_c) { create(:contact, group: group, first_name: "e", last_name: "d") }
+
+ context 'when sorting the contacts' do
+ it 'sorts them by last name then first name in ascendent order' do
+ expect(group.contacts.sort_by_name).to eq([contact_b, contact_a, contact_c])
+ end
+ end
+ end
end
diff --git a/spec/models/customer_relations/organization_spec.rb b/spec/models/customer_relations/organization_spec.rb
index 06ba9c5b7ad..1833fcf5385 100644
--- a/spec/models/customer_relations/organization_spec.rb
+++ b/spec/models/customer_relations/organization_spec.rb
@@ -78,4 +78,83 @@ RSpec.describe CustomerRelations::Organization, type: :model do
expect(contact2.reload.organization_id).to eq(dupe_organization1.id)
end
end
+
+ describe '.search' do
+ let_it_be(:organization_a) do
+ create(
+ :organization,
+ group: group,
+ name: "DEF",
+ description: "ghi_st",
+ state: "inactive"
+ )
+ end
+
+ let_it_be(:organization_b) do
+ create(
+ :organization,
+ group: group,
+ name: "ABC_st",
+ description: "JKL",
+ state: "active"
+ )
+ end
+
+ subject(:found_organizations) { group.organizations.search(search_term) }
+
+ context 'when search term is empty' do
+ let(:search_term) { "" }
+
+ it 'returns all group organizations' do
+ expect(found_organizations).to contain_exactly(organization_a, organization_b)
+ end
+ end
+
+ context 'when search term is not empty' do
+ context 'when searching for name' do
+ let(:search_term) { "aBc" }
+
+ it { is_expected.to contain_exactly(organization_b) }
+ end
+
+ context 'when searching for description' do
+ let(:search_term) { "ghI" }
+
+ it { is_expected.to contain_exactly(organization_a) }
+ end
+
+ context 'when searching for name and description' do
+ let(:search_term) { "_st" }
+
+ it { is_expected.to contain_exactly(organization_a, organization_b) }
+ end
+ end
+ end
+
+ describe '.search_by_state' do
+ let_it_be(:organization_a) { create(:organization, group: group, state: "inactive") }
+ let_it_be(:organization_b) { create(:organization, group: group, state: "active") }
+
+ context 'when searching for organizations state' do
+ it 'returns only inactive organizations' do
+ expect(group.organizations.search_by_state(:inactive)).to contain_exactly(organization_a)
+ end
+
+ it 'returns only active organizations' do
+ expect(group.organizations.search_by_state(:active)).to contain_exactly(organization_b)
+ end
+ end
+ end
+
+ describe '.sort_by_name' do
+ let_it_be(:organization_a) { create(:organization, group: group, name: "c") }
+ let_it_be(:organization_b) { create(:organization, group: group, name: "a") }
+ let_it_be(:organization_c) { create(:organization, group: group, name: "b") }
+
+ context 'when sorting the organizations' do
+ it 'sorts them by name in ascendent order' do
+ expect(group.organizations.sort_by_name).to eq([organization_b, organization_c, organization_a])
+ end
+ end
+ end
end
diff --git a/spec/models/data_list_spec.rb b/spec/models/data_list_spec.rb
index d2f15386808..67db2730a78 100644
--- a/spec/models/data_list_spec.rb
+++ b/spec/models/data_list_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe DataList do
end
def data_list(integration)
- DataList.new([integration], integration.to_data_fields_hash, integration.data_fields.class).to_array
+ DataList.new([integration], integration.to_database_hash, integration.data_fields.class).to_array
end
it 'returns current data' do
diff --git a/spec/models/deployment_cluster_spec.rb b/spec/models/deployment_cluster_spec.rb
index dc9cbe4b082..b0564def946 100644
--- a/spec/models/deployment_cluster_spec.rb
+++ b/spec/models/deployment_cluster_spec.rb
@@ -19,4 +19,11 @@ RSpec.describe DeploymentCluster do
kubernetes_namespace: kubernetes_namespace
)
end
+
+ context 'loose foreign key on deployment_clusters.cluster_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:parent) { create(:cluster) }
+ let!(:model) { create(:deployment_cluster, cluster: parent) }
+ end
+ end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 409353bdbcf..a58d32dfe5d 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe Deployment do
it { is_expected.to delegate_method(:name).to(:environment).with_prefix }
it { is_expected.to delegate_method(:commit).to(:project) }
it { is_expected.to delegate_method(:commit_title).to(:commit).as(:try) }
- it { is_expected.to delegate_method(:manual_actions).to(:deployable).as(:try) }
it { is_expected.to delegate_method(:kubernetes_namespace).to(:deployment_cluster).as(:kubernetes_namespace) }
it { is_expected.to validate_presence_of(:ref) }
@@ -25,20 +24,23 @@ RSpec.describe Deployment do
it_behaves_like 'having unique enum values'
- describe '#scheduled_actions' do
- subject { deployment.scheduled_actions }
+ describe '#manual_actions' do
+ let(:deployment) { create(:deployment) }
- let(:project) { create(:project, :repository) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build) { create(:ci_build, :success, pipeline: pipeline) }
- let(:deployment) { create(:deployment, deployable: build) }
+ it 'delegates to environment_manual_actions' do
+ expect(deployment.deployable).to receive(:environment_manual_actions).and_call_original
- it 'delegates to other_scheduled_actions' do
- expect_next_instance_of(Ci::Build) do |instance|
- expect(instance).to receive(:other_scheduled_actions)
- end
+ deployment.manual_actions
+ end
+ end
- subject
+ describe '#scheduled_actions' do
+ let(:deployment) { create(:deployment) }
+
+ it 'delegates to environment_scheduled_actions' do
+ expect(deployment.deployable).to receive(:environment_scheduled_actions).and_call_original
+
+ deployment.scheduled_actions
end
end
@@ -137,15 +139,29 @@ RSpec.describe Deployment do
end
end
- it 'executes Deployments::HooksWorker asynchronously' do
+ it 'executes deployment hooks' do
freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+ expect(deployment).to receive(:execute_hooks).with(Time.current)
deployment.run!
end
end
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ deployment.run!
+ end
+ end
+ end
+
it 'executes Deployments::DropOlderDeploymentsWorker asynchronously' do
expect(Deployments::DropOlderDeploymentsWorker)
.to receive(:perform_async).once.with(deployment.id)
@@ -173,14 +189,28 @@ RSpec.describe Deployment do
deployment.succeed!
end
- it 'executes Deployments::HooksWorker asynchronously' do
+ it 'executes deployment hooks' do
freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+ expect(deployment).to receive(:execute_hooks).with(Time.current)
deployment.succeed!
end
end
+
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ deployment.succeed!
+ end
+ end
+ end
end
context 'when deployment failed' do
@@ -202,14 +232,28 @@ RSpec.describe Deployment do
deployment.drop!
end
- it 'executes Deployments::HooksWorker asynchronously' do
+ it 'executes deployment hooks' do
freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+ expect(deployment).to receive(:execute_hooks).with(Time.current)
deployment.drop!
end
end
+
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ deployment.drop!
+ end
+ end
+ end
end
context 'when deployment was canceled' do
@@ -231,14 +275,28 @@ RSpec.describe Deployment do
deployment.cancel!
end
- it 'executes Deployments::HooksWorker asynchronously' do
+ it 'executes deployment hooks' do
freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+ expect(deployment).to receive(:execute_hooks).with(Time.current)
deployment.cancel!
end
end
+
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ deployment.cancel!
+ end
+ end
+ end
end
context 'when deployment was skipped' do
@@ -266,6 +324,12 @@ RSpec.describe Deployment do
deployment.skip!
end
end
+
+ it 'does not execute deployment hooks' do
+ expect(deployment).not_to receive(:execute_hooks)
+
+ deployment.skip!
+ end
end
context 'when deployment is blocked' do
@@ -289,6 +353,12 @@ RSpec.describe Deployment do
deployment.block!
end
+
+ it 'does not execute deployment hooks' do
+ expect(deployment).not_to receive(:execute_hooks)
+
+ deployment.block!
+ end
end
describe 'synching status to Jira' do
@@ -550,6 +620,143 @@ RSpec.describe Deployment do
is_expected.to contain_exactly(deployment1, deployment2)
end
end
+
+ describe 'last_deployment_group_for_environment' do
+ def subject_method(environment)
+ described_class.last_deployment_group_for_environment(environment)
+ end
+
+ let!(:project) { create(:project, :repository) }
+ let!(:environment) { create(:environment, project: project) }
+
+ context 'when there are no deployments and builds' do
+ it do
+ expect(subject_method(environment)).to eq(Deployment.none)
+ end
+ end
+
+ context 'when there are no successful builds' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:ci_build) { create(:ci_build, :running, project: project, pipeline: pipeline) }
+
+ before do
+ create(:deployment, :success, environment: environment, project: project, deployable: ci_build)
+ end
+
+ it do
+ expect(subject_method(environment)).to eq(Deployment.none)
+ end
+ end
+
+ context 'when there are deployments for multiple pipelines' do
+ let(:pipeline_a) { create(:ci_pipeline, project: project) }
+ let(:pipeline_b) { create(:ci_pipeline, project: project) }
+ let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_b) { create(:ci_build, :failed, project: project, pipeline: pipeline_b) }
+ let(:ci_build_c) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_d) { create(:ci_build, :failed, project: project, pipeline: pipeline_a) }
+
+ # Successful deployments for pipeline_a
+ let!(:deployment_a) do
+ create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
+ end
+
+ let!(:deployment_b) do
+ create(:deployment, :success, project: project, environment: environment, deployable: ci_build_c)
+ end
+
+ before do
+ # Failed deployment for pipeline_a
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_d)
+
+ # Failed deployment for pipeline_b
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
+ end
+
+ it 'returns the successful deployment jobs for the last deployment pipeline' do
+ expect(subject_method(environment).pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
+ end
+ end
+
+ context 'when there are many environments' do
+ let(:environment_b) { create(:environment, project: project) }
+
+ let(:pipeline_a) { create(:ci_pipeline, project: project) }
+ let(:pipeline_b) { create(:ci_pipeline, project: project) }
+ let(:pipeline_c) { create(:ci_pipeline, project: project) }
+ let(:pipeline_d) { create(:ci_pipeline, project: project) }
+
+ # Builds for first environment: 'environment' with pipeline_a and pipeline_b
+ let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_b) { create(:ci_build, :failed, project: project, pipeline: pipeline_b) }
+ let(:ci_build_c) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_d) { create(:ci_build, :failed, project: project, pipeline: pipeline_a) }
+ let!(:stop_env_a) { create(:ci_build, :manual, project: project, pipeline: pipeline_a, name: 'stop_env_a') }
+
+ # Builds for second environment: 'environment_b' with pipeline_c and pipeline_d
+ let(:ci_build_e) { create(:ci_build, :success, project: project, pipeline: pipeline_c) }
+ let(:ci_build_f) { create(:ci_build, :failed, project: project, pipeline: pipeline_d) }
+ let(:ci_build_g) { create(:ci_build, :success, project: project, pipeline: pipeline_c) }
+ let(:ci_build_h) { create(:ci_build, :failed, project: project, pipeline: pipeline_c) }
+ let!(:stop_env_b) { create(:ci_build, :manual, project: project, pipeline: pipeline_c, name: 'stop_env_b') }
+
+ # Successful deployments for 'environment' from pipeline_a
+ let!(:deployment_a) do
+ create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
+ end
+
+ let!(:deployment_b) do
+ create(:deployment, :success,
+ project: project, environment: environment, deployable: ci_build_c, on_stop: 'stop_env_a')
+ end
+
+ # Successful deployments for 'environment_b' from pipeline_c
+ let!(:deployment_c) do
+ create(:deployment, :success, project: project, environment: environment_b, deployable: ci_build_e)
+ end
+
+ let!(:deployment_d) do
+ create(:deployment, :success,
+ project: project, environment: environment_b, deployable: ci_build_g, on_stop: 'stop_env_b')
+ end
+
+ before do
+ # Failed deployment for 'environment' from pipeline_a and pipeline_b
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_d)
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
+
+ # Failed deployment for 'environment_b' from pipeline_c and pipeline_d
+ create(:deployment, :failed, project: project, environment: environment_b, deployable: ci_build_h)
+ create(:deployment, :failed, project: project, environment: environment_b, deployable: ci_build_f)
+ end
+
+ it 'batch loads for environments' do
+ environments = [environment, environment_b]
+
+ # Loads Batch loader
+ environments.each do |env|
+ subject_method(env)
+ end
+
+ expect(subject_method(environments.first).pluck(:id))
+ .to contain_exactly(deployment_a.id, deployment_b.id)
+
+ expect { subject_method(environments.second).pluck(:id) }.not_to exceed_query_limit(0)
+
+ expect(subject_method(environments.second).pluck(:id))
+ .to contain_exactly(deployment_c.id, deployment_d.id)
+
+ expect(subject_method(environments.first).map(&:stop_action).compact)
+ .to contain_exactly(stop_env_a)
+
+ expect { subject_method(environments.second).map(&:stop_action) }
+ .not_to exceed_query_limit(0)
+
+ expect(subject_method(environments.second).map(&:stop_action).compact)
+ .to contain_exactly(stop_env_b)
+ end
+ end
+ end
end
describe 'latest_for_sha' do
@@ -845,11 +1052,30 @@ RSpec.describe Deployment do
expect(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
expect(Deployments::ArchiveInProjectWorker).to receive(:perform_async)
- expect(Deployments::HooksWorker).to receive(:perform_async)
expect(deploy.update_status('success')).to eq(true)
end
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'schedules `Deployments::HooksWorker` when finishing a deploy' do
+ expect(Deployments::HooksWorker).to receive(:perform_async)
+
+ deploy.update_status('success')
+ end
+ end
+
+ it 'executes deployment hooks when finishing a deploy' do
+ freeze_time do
+ expect(deploy).to receive(:execute_hooks).with(Time.current)
+
+ deploy.update_status('success')
+ end
+ end
+
it 'updates finished_at when transitioning to a finished status' do
freeze_time do
deploy.update_status('success')
@@ -1173,4 +1399,11 @@ RSpec.describe Deployment do
end
end
end
+
+ context 'loose foreign key on deployments.cluster_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:parent) { create(:cluster) }
+ let!(:model) { create(:deployment, cluster: parent) }
+ end
+ end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 34dfc7a1fce..fd89a3a2e22 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -479,7 +479,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
context 'when matching action is defined' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, :success) }
let!(:deployment) do
create(:deployment, :success,
@@ -549,7 +549,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
context 'when matching action is defined' do
let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build_a) { create(:ci_build, pipeline: pipeline) }
+ let(:build_a) { create(:ci_build, :success, pipeline: pipeline) }
before do
create(:deployment, :success,
@@ -586,6 +586,12 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
expect(action).to eq(close_action)
expect(action.user).to eq(user)
end
+
+ it 'environment is not stopped' do
+ subject
+
+ expect(environment).not_to be_stopped
+ end
end
context 'if action did finish' do
@@ -632,8 +638,8 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
context 'when there are more then one stop action for the environment' do
let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build_a) { create(:ci_build, pipeline: pipeline) }
- let(:build_b) { create(:ci_build, pipeline: pipeline) }
+ let(:build_a) { create(:ci_build, :success, pipeline: pipeline) }
+ let(:build_b) { create(:ci_build, :success, pipeline: pipeline) }
let!(:close_actions) do
[
@@ -666,9 +672,9 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
expect(actions.pluck(:user)).to match_array(close_actions.pluck(:user))
end
- context 'when there are failed deployment jobs' do
+ context 'when there are failed builds' do
before do
- create(:ci_build, pipeline: pipeline, name: 'close_app_c')
+ create(:ci_build, :failed, pipeline: pipeline, name: 'close_app_c')
create(:deployment, :failed,
environment: environment,
@@ -676,11 +682,11 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
on_stop: 'close_app_c')
end
- it 'returns only stop actions from successful deployment jobs' do
+ it 'returns only stop actions from successful builds' do
actions = subject
expect(actions).to match_array(close_actions)
- expect(actions.count).to eq(environment.successful_deployments.count)
+ expect(actions.count).to eq(pipeline.latest_successful_builds.count)
end
end
end
@@ -697,8 +703,8 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
context 'when there are multiple deployments with actions' do
let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:ci_build_a) { create(:ci_build, project: project, pipeline: pipeline) }
- let(:ci_build_b) { create(:ci_build, project: project, pipeline: pipeline) }
+ let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline) }
+ let(:ci_build_b) { create(:ci_build, :success, project: project, pipeline: pipeline) }
let!(:ci_build_c) { create(:ci_build, :manual, project: project, pipeline: pipeline, name: 'close_app_a') }
let!(:ci_build_d) { create(:ci_build, :manual, project: project, pipeline: pipeline, name: 'close_app_b') }
@@ -714,7 +720,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
before do
# Create failed deployment without stop_action.
- build = create(:ci_build, project: project, pipeline: pipeline)
+ build = create(:ci_build, :failed, project: project, pipeline: pipeline)
create(:deployment, :failed, project: project, environment: environment, deployable: build)
end
@@ -736,10 +742,10 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
context 'when there are deployments for multiple pipelines' do
let(:pipeline_a) { create(:ci_pipeline, project: project) }
let(:pipeline_b) { create(:ci_pipeline, project: project) }
- let(:ci_build_a) { create(:ci_build, project: project, pipeline: pipeline_a) }
- let(:ci_build_b) { create(:ci_build, project: project, pipeline: pipeline_b) }
- let(:ci_build_c) { create(:ci_build, project: project, pipeline: pipeline_a) }
- let(:ci_build_d) { create(:ci_build, project: project, pipeline: pipeline_a) }
+ let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_b) { create(:ci_build, :failed, project: project, pipeline: pipeline_b) }
+ let(:ci_build_c) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_d) { create(:ci_build, :failed, project: project, pipeline: pipeline_a) }
# Successful deployments for pipeline_a
let!(:deployment_a) { create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a) }
@@ -756,6 +762,16 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it 'returns the successful deployment jobs for the last deployment pipeline' do
expect(subject.pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(batch_load_environment_last_deployment_group: false)
+ end
+
+ it 'returns the successful deployment jobs for the last deployment pipeline' do
+ expect(subject.pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
+ end
+ end
end
end
@@ -1730,17 +1746,17 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
let!(:environment3) { create(:environment, project: project, state: 'stopped') }
it 'returns the environments count grouped by state' do
- expect(project.environments.count_by_state).to eq({ stopped: 2, available: 1 })
+ expect(project.environments.count_by_state).to eq({ stopped: 2, available: 1, stopping: 0 })
end
it 'returns the environments count grouped by state with zero value' do
environment2.update!(state: 'stopped')
- expect(project.environments.count_by_state).to eq({ stopped: 3, available: 0 })
+ expect(project.environments.count_by_state).to eq({ stopped: 3, available: 0, stopping: 0 })
end
end
it 'returns zero state counts when environments are empty' do
- expect(project.environments.count_by_state).to eq({ stopped: 0, available: 0 })
+ expect(project.environments.count_by_state).to eq({ stopped: 0, available: 0, stopping: 0 })
end
end
diff --git a/spec/models/error_tracking/error_event_spec.rb b/spec/models/error_tracking/error_event_spec.rb
index 9cf5a405e74..6dab8fbf757 100644
--- a/spec/models/error_tracking/error_event_spec.rb
+++ b/spec/models/error_tracking/error_event_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe ErrorTracking::ErrorEvent, type: :model do
+RSpec.describe ErrorTracking::ErrorEvent do
+ include AfterNextHelpers
+
let_it_be(:event) { create(:error_tracking_error_event) }
describe 'relationships' do
@@ -18,44 +20,12 @@ RSpec.describe ErrorTracking::ErrorEvent, type: :model do
end
describe '#stacktrace' do
- it 'generates a correct stacktrace in expected format' do
- expected_context = [
- [132, " end\n"],
- [133, "\n"],
- [134, " begin\n"],
- [135, " block.call(work, *extra)\n"],
- [136, " rescue Exception => e\n"],
- [137, " STDERR.puts \"Error reached top of thread-pool: #\{e.message\} (#\{e.class\})\"\n"],
- [138, " end\n"]
- ]
-
- expected_entry = {
- 'lineNo' => 135,
- 'context' => expected_context,
- 'filename' => 'puma/thread_pool.rb',
- 'function' => 'block in spawn_thread',
- 'colNo' => 0
- }
+ it 'builds a stacktrace' do
+ expect_next(ErrorTracking::StacktraceBuilder, event.payload)
+ .to receive(:stacktrace).and_call_original
expect(event.stacktrace).to be_kind_of(Array)
- expect(event.stacktrace.first).to eq(expected_entry)
- end
-
- context 'error context is missing' do
- let(:event) { create(:error_tracking_error_event, :browser) }
-
- it 'generates a stacktrace without context' do
- expected_entry = {
- 'lineNo' => 6395,
- 'context' => [],
- 'filename' => 'webpack-internal:///./node_modules/vue/dist/vue.runtime.esm.js',
- 'function' => 'hydrate',
- 'colNo' => 0
- }
-
- expect(event.stacktrace).to be_kind_of(Array)
- expect(event.stacktrace.first).to eq(expected_entry)
- end
+ expect(event.stacktrace).not_to be_empty
end
end
diff --git a/spec/models/factories_spec.rb b/spec/models/factories_spec.rb
new file mode 100644
index 00000000000..45c3f93e6cf
--- /dev/null
+++ b/spec/models/factories_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'factories' do
+ include Database::DatabaseHelpers
+
+ # https://gitlab.com/groups/gitlab-org/-/epics/5464 tracks the remaining
+ # skipped traits.
+ #
+ # Consider adding a code comment if a trait cannot produce a valid object.
+ def skipped_traits
+ [
+ [:audit_event, :unauthenticated],
+ [:ci_build_trace_chunk, :fog_with_data],
+ [:ci_job_artifact, :remote_store],
+ [:ci_job_artifact, :raw],
+ [:ci_job_artifact, :gzip],
+ [:ci_job_artifact, :correct_checksum],
+ [:environment, :non_playable],
+ [:composer_cache_file, :object_storage],
+ [:debian_project_component_file, :object_storage],
+ [:debian_project_distribution, :object_storage],
+ [:debian_file_metadatum, :unknown],
+ [:issue_customer_relations_contact, :for_contact],
+ [:issue_customer_relations_contact, :for_issue],
+ [:package_file, :object_storage],
+ [:pages_domain, :without_certificate],
+ [:pages_domain, :without_key],
+ [:pages_domain, :with_missing_chain],
+ [:pages_domain, :with_trusted_chain],
+ [:pages_domain, :with_trusted_expired_chain],
+ [:pages_domain, :explicit_ecdsa],
+ [:project_member, :blocked],
+ [:remote_mirror, :ssh],
+ [:user_preference, :only_comments],
+ [:ci_pipeline_artifact, :remote_store]
+ ]
+ end
+
+ shared_examples 'factory' do |factory|
+ describe "#{factory.name} factory" do
+ it 'does not raise error when built' do
+ expect { build(factory.name) }.not_to raise_error
+ end
+
+ it 'does not raise error when created' do
+ expect { create(factory.name) }.not_to raise_error # rubocop:disable Rails/SaveBang
+ end
+
+ factory.definition.defined_traits.map(&:name).each do |trait_name|
+ describe "linting :#{trait_name} trait" do
+ it 'does not raise error when created' do
+ if skipped_traits.include?([factory.name, trait_name.to_sym])
+ pending("Trait skipped linting due to legacy error")
+ end
+
+ expect { create(factory.name, trait_name) }.not_to raise_error
+ end
+ end
+ end
+ end
+ end
+
+ # FactoryDefault speed up specs by creating associations only once
+ # and reuse them in other factories.
+ #
+ # However, for some factories we cannot use FactoryDefault because the
+ # associations must be unique and cannot be reused, or the factory default
+ # is being mutated.
+ skip_factory_defaults = %i[
+ ci_job_token_project_scope_link
+ evidence
+ exported_protected_branch
+ fork_network_member
+ group_member
+ import_state
+ issue_customer_relations_contact
+ member_task
+ milestone_release
+ namespace
+ project_broken_repo
+ project_namespace
+ project_repository
+ prometheus_alert
+ prometheus_alert_event
+ prometheus_metric
+ protected_branch
+ protected_branch_merge_access_level
+ protected_branch_push_access_level
+ protected_tag
+ release
+ release_link
+ self_managed_prometheus_alert_event
+ shard
+ users_star_project
+ wiki_page
+ wiki_page_meta
+ ].to_set.freeze
+
+ # Some factories and their corresponding models are based on
+ # database views. In order to use those, we have to swap the
+ # view out with a table of the same structure.
+ factories_based_on_view = %i[
+ postgres_index
+ postgres_index_bloat_estimate
+ ].to_set.freeze
+
+ without_fd, with_fd = FactoryBot.factories
+ .partition { |factory| skip_factory_defaults.include?(factory.name) }
+
+ context 'with factory defaults', factory_default: :keep do
+ let_it_be(:namespace) { create_default(:namespace).freeze }
+ let_it_be(:project) { create_default(:project, :repository).freeze }
+ let_it_be(:user) { create_default(:user).freeze }
+
+ before do
+ factories_based_on_view.each do |factory|
+ view = build(factory).class.table_name
+ swapout_view_for_table(view)
+ end
+ end
+
+ with_fd.each do |factory|
+ it_behaves_like 'factory', factory
+ end
+ end
+
+ context 'without factory defaults' do
+ without_fd.each do |factory|
+ it_behaves_like 'factory', factory
+ end
+ end
+end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 0ca1fe1c8a6..d47f43a630d 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -991,6 +991,14 @@ RSpec.describe Group do
it { expect(group.last_owner?(@members[:owner])).to be_truthy }
+ context 'there is also a project_bot owner' do
+ before do
+ group.add_user(create(:user, :project_bot), GroupMember::OWNER)
+ end
+
+ it { expect(group.last_owner?(@members[:owner])).to be_truthy }
+ end
+
context 'with two owners' do
before do
create(:group_member, :owner, group: group)
@@ -1116,35 +1124,58 @@ RSpec.describe Group do
end
end
- describe '#single_owner?' do
+ describe '#all_owners_excluding_project_bots' do
let_it_be(:user) { create(:user) }
context 'when there is only one owner' do
- before do
+ let!(:owner) do
group.add_user(user, GroupMember::OWNER)
end
- it 'returns true' do
- expect(group.single_owner?).to eq(true)
+ it 'returns the owner' do
+ expect(group.all_owners_excluding_project_bots).to contain_exactly(owner)
+ end
+
+ context 'and there is also a project_bot owner' do
+ before do
+ group.add_user(create(:user, :project_bot), GroupMember::OWNER)
+ end
+
+ it 'returns only the human owner' do
+ expect(group.all_owners_excluding_project_bots).to contain_exactly(owner)
+ end
end
end
context 'when there are multiple owners' do
let_it_be(:user_2) { create(:user) }
- before do
+ let!(:owner) do
group.add_user(user, GroupMember::OWNER)
+ end
+
+ let!(:owner2) do
group.add_user(user_2, GroupMember::OWNER)
end
- it 'returns true' do
- expect(group.single_owner?).to eq(false)
+ it 'returns both owners' do
+ expect(group.all_owners_excluding_project_bots).to contain_exactly(owner, owner2)
+ end
+
+ context 'and there is also a project_bot owner' do
+ before do
+ group.add_user(create(:user, :project_bot), GroupMember::OWNER)
+ end
+
+ it 'returns only the human owners' do
+ expect(group.all_owners_excluding_project_bots).to contain_exactly(owner, owner2)
+ end
end
end
context 'when there are no owners' do
it 'returns false' do
- expect(group.single_owner?).to eq(false)
+ expect(group.all_owners_excluding_project_bots).to be_empty
end
end
end
@@ -2393,19 +2424,6 @@ RSpec.describe Group do
fetch_config
end
-
- context 'when traversal ID feature flags are disabled' do
- before do
- stub_feature_flags(sync_traversal_ids: false)
- end
-
- it 'caches the parent config when group auto_devops_enabled is nil' do
- cache_key = "namespaces:{first_auto_devops_config}:#{group.id}"
- define_cache_expectations(cache_key)
-
- fetch_config
- end
- end
end
context 'cache expiration' do
@@ -2433,14 +2451,6 @@ RSpec.describe Group do
group.update!(auto_devops_enabled: true)
end
-
- it 'does not clear cache when the feature is disabled' do
- stub_feature_flags(namespaces_cache_first_auto_devops_config: false)
-
- expect(Rails.cache).not_to receive(:delete_multi)
-
- parent.update!(auto_devops_enabled: true)
- end
end
end
end
@@ -3417,4 +3427,42 @@ RSpec.describe Group do
end
end
end
+
+ describe '#gitlab_deploy_token' do
+ subject(:gitlab_deploy_token) { group.gitlab_deploy_token }
+
+ context 'when there is a gitlab deploy token associated' do
+ let!(:deploy_token) { create(:deploy_token, :group, :gitlab_deploy_token, groups: [group]) }
+
+ it { is_expected.to eq(deploy_token) }
+ end
+
+ context 'when there is no a gitlab deploy token associated' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when there is a gitlab deploy token associated but is has been revoked' do
+ let!(:deploy_token) { create(:deploy_token, :group, :gitlab_deploy_token, :revoked, groups: [group]) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when there is a gitlab deploy token associated but it is expired' do
+ let!(:deploy_token) { create(:deploy_token, :group, :gitlab_deploy_token, :expired, groups: [group]) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when there is a deploy token associated with a different name' do
+ let!(:deploy_token) { create(:deploy_token, :group, groups: [group]) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when there is a gitlab deploy token associated to a different group' do
+ let!(:deploy_token) { create(:deploy_token, :group, :gitlab_deploy_token, groups: [create(:group)]) }
+
+ it { is_expected.to be_nil }
+ end
+ end
end
diff --git a/spec/models/hooks/project_hook_spec.rb b/spec/models/hooks/project_hook_spec.rb
index ec2eca96755..4253686b843 100644
--- a/spec/models/hooks/project_hook_spec.rb
+++ b/spec/models/hooks/project_hook_spec.rb
@@ -31,15 +31,6 @@ RSpec.describe ProjectHook do
end
end
- describe '#rate_limit' do
- let_it_be(:plan_limits) { create(:plan_limits, :default_plan, web_hook_calls: 100) }
- let_it_be(:hook) { create(:project_hook) }
-
- it 'returns the default limit' do
- expect(hook.rate_limit).to be(100)
- end
- end
-
describe '#parent' do
it 'returns the associated project' do
project = build(:project)
diff --git a/spec/models/hooks/service_hook_spec.rb b/spec/models/hooks/service_hook_spec.rb
index 0d65fe302e1..68c284a913c 100644
--- a/spec/models/hooks/service_hook_spec.rb
+++ b/spec/models/hooks/service_hook_spec.rb
@@ -23,14 +23,6 @@ RSpec.describe ServiceHook do
end
end
- describe '#rate_limit' do
- let(:hook) { build(:service_hook) }
-
- it 'returns nil' do
- expect(hook.rate_limit).to be_nil
- end
- end
-
describe '#parent' do
let(:hook) { build(:service_hook, integration: integration) }
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index bf69c7219a8..9f5f81dd6c0 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -185,14 +185,6 @@ RSpec.describe SystemHook do
end
end
- describe '#rate_limit' do
- let(:hook) { build(:system_hook) }
-
- it 'returns nil' do
- expect(hook.rate_limit).to be_nil
- end
- end
-
describe '#application_context' do
let(:hook) { build(:system_hook) }
diff --git a/spec/models/hooks/web_hook_log_spec.rb b/spec/models/hooks/web_hook_log_spec.rb
index 9cfbb14e087..e1fea3318f6 100644
--- a/spec/models/hooks/web_hook_log_spec.rb
+++ b/spec/models/hooks/web_hook_log_spec.rb
@@ -48,6 +48,62 @@ RSpec.describe WebHookLog do
end
end
+ describe '.delete_batch_for' do
+ let(:hook) { create(:project_hook) }
+
+ before do
+ create_list(:web_hook_log, 3, web_hook: hook)
+ create_list(:web_hook_log, 3)
+ end
+
+ subject { described_class.delete_batch_for(hook, batch_size: batch_size) }
+
+ shared_examples 'deletes batch of web hook logs' do
+ it { is_expected.to be(batch_size <= 3) }
+
+ it 'deletes min(batch_size, total) records' do
+ deleted = [batch_size, 3].min
+
+ expect { subject }.to change(described_class, :count).by(-deleted)
+ end
+ end
+
+ context 'when the batch size is less than one' do
+ let(:batch_size) { 0 }
+
+ it 'raises an argument error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when the batch size is smaller than the total' do
+ let(:batch_size) { 2 }
+
+ include_examples 'deletes batch of web hook logs'
+ end
+
+ context 'when the batch size is equal to the total' do
+ let(:batch_size) { 3 }
+
+ include_examples 'deletes batch of web hook logs'
+ end
+
+ context 'when the batch size is greater than the total' do
+ let(:batch_size) { 1000 }
+
+ include_examples 'deletes batch of web hook logs'
+ end
+
+ it 'does not loop forever' do
+ batches = 0
+ batches += 1 while described_class.delete_batch_for(hook, batch_size: 1)
+
+ expect(hook.web_hook_logs).to be_none
+ expect(described_class.count).to eq 3
+ expect(batches).to eq 3 # true three times, stops at first false
+ end
+ end
+
describe '#success?' do
let(:web_hook_log) { build(:web_hook_log, response_status: status) }
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index dd954e08156..fb4d1cee606 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -24,6 +24,29 @@ RSpec.describe WebHook do
describe 'validations' do
it { is_expected.to validate_presence_of(:url) }
+ describe 'url_variables' do
+ it { is_expected.to allow_value({}).for(:url_variables) }
+ it { is_expected.to allow_value({ 'foo' => 'bar' }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'FOO' => 'bar' }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'MY_TOKEN' => 'bar' }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'foo2' => 'bar' }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'x' => 'y' }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'x' => ('a' * 100) }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'foo' => 'bar', 'bar' => 'baz' }).for(:url_variables) }
+ it { is_expected.to allow_value((1..20).to_h { ["k#{_1}", 'value'] }).for(:url_variables) }
+
+ it { is_expected.not_to allow_value([]).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'foo' => 1 }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'bar' => :baz }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'bar' => nil }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'foo' => '' }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'foo' => ('a' * 101) }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'has spaces' => 'foo' }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ '' => 'foo' }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ '1foo' => 'foo' }).for(:url_variables) }
+ it { is_expected.not_to allow_value((1..21).to_h { ["k#{_1}", 'value'] }).for(:url_variables) }
+ end
+
describe 'url' do
it { is_expected.to allow_value('http://example.com').for(:url) }
it { is_expected.to allow_value('https://example.com').for(:url) }
@@ -87,7 +110,7 @@ RSpec.describe WebHook do
describe 'encrypted attributes' do
subject { described_class.encrypted_attributes.keys }
- it { is_expected.to contain_exactly(:token, :url) }
+ it { is_expected.to contain_exactly(:token, :url, :url_variables) }
end
describe 'execute' do
@@ -130,11 +153,11 @@ RSpec.describe WebHook do
end
describe '#destroy' do
- it 'cascades to web_hook_logs' do
+ it 'does not cascade to web_hook_logs' do
web_hook = create(:project_hook)
create_list(:web_hook_log, 3, web_hook: web_hook)
- expect { web_hook.destroy! }.to change(web_hook.web_hook_logs, :count).by(-3)
+ expect { web_hook.destroy! }.not_to change(web_hook.web_hook_logs, :count)
end
end
@@ -470,31 +493,70 @@ RSpec.describe WebHook do
end
describe '#rate_limited?' do
- context 'when there are rate limits' do
- before do
- allow(hook).to receive(:rate_limit).and_return(3)
+ it 'is false when hook has not been rate limited' do
+ expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
+ expect(rate_limiter).to receive(:rate_limited?).and_return(false)
end
- it 'is false when hook has not been rate limited' do
- expect(Gitlab::ApplicationRateLimiter).to receive(:peek).and_return(false)
- expect(hook).not_to be_rate_limited
+ expect(hook).not_to be_rate_limited
+ end
+
+ it 'is true when hook has been rate limited' do
+ expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
+ expect(rate_limiter).to receive(:rate_limited?).and_return(true)
end
- it 'is true when hook has been rate limited' do
- expect(Gitlab::ApplicationRateLimiter).to receive(:peek).and_return(true)
- expect(hook).to be_rate_limited
+ expect(hook).to be_rate_limited
+ end
+ end
+
+ describe '#rate_limit' do
+ it 'returns the hook rate limit' do
+ expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
+ expect(rate_limiter).to receive(:limit).and_return(10)
end
+
+ expect(hook.rate_limit).to eq(10)
end
+ end
+
+ describe '#alert_status' do
+ subject(:status) { hook.alert_status }
- context 'when there are no rate limits' do
+ it { is_expected.to eq :executable }
+
+ context 'when hook has been disabled' do
before do
- allow(hook).to receive(:rate_limit).and_return(nil)
+ hook.disable!
end
- it 'does not call Gitlab::ApplicationRateLimiter, and is false' do
- expect(Gitlab::ApplicationRateLimiter).not_to receive(:peek)
- expect(hook).not_to be_rate_limited
+ it { is_expected.to eq :disabled }
+ end
+
+ context 'when hook has been backed off' do
+ before do
+ hook.disabled_until = 1.hour.from_now
end
+
+ it { is_expected.to eq :temporarily_disabled }
+ end
+ end
+
+ describe '#to_json' do
+ it 'does not error' do
+ expect { hook.to_json }.not_to raise_error
+ end
+
+ it 'does not error, when serializing unsafe attributes' do
+ expect { hook.to_json(unsafe_serialization_hash: true) }.not_to raise_error
+ end
+
+ it 'does not contain binary attributes' do
+ expect(hook.to_json).not_to include('encrypted_url_variables')
+ end
+
+ it 'does not contain binary attributes, even when serializing unsafe attributes' do
+ expect(hook.to_json(unsafe_serialization_hash: true)).not_to include('encrypted_url_variables')
end
end
end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 0567a8bd386..038018fbd0c 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -250,7 +250,7 @@ RSpec.describe Integration do
context 'with all existing instances' do
def integration_hash(type)
- Integration.new(instance: true, type: type).to_integration_hash
+ Integration.new(instance: true, type: type).to_database_hash
end
before do
@@ -812,14 +812,14 @@ RSpec.describe Integration do
Class.new(Integration) do
def fields
[
- { name: 'token' },
- { name: 'api_token' },
- { name: 'token_api' },
- { name: 'safe_token' },
- { name: 'key' },
- { name: 'api_key' },
- { name: 'password' },
- { name: 'password_field' },
+ { name: 'token', type: 'password' },
+ { name: 'api_token', type: 'password' },
+ { name: 'token_api', type: 'password' },
+ { name: 'safe_token', type: 'password' },
+ { name: 'key', type: 'password' },
+ { name: 'api_key', type: 'password' },
+ { name: 'password', type: 'password' },
+ { name: 'password_field', type: 'password' },
{ name: 'some_safe_field' },
{ name: 'safe_field' },
{ name: 'url' },
@@ -837,15 +837,14 @@ RSpec.describe Integration do
context 'when the class uses the field DSL' do
let(:fake_integration) do
Class.new(described_class) do
- field :token
- field :token
- field :api_token
- field :token_api
- field :safe_token
- field :key
- field :api_key
- field :password
- field :password_field
+ field :token, type: 'password'
+ field :api_token, type: 'password'
+ field :token_api, type: 'password'
+ field :safe_token, type: 'password'
+ field :key, type: 'password'
+ field :api_key, type: 'password'
+ field :password, type: 'password'
+ field :password_field, type: 'password'
field :some_safe_field
field :safe_field
field :url
@@ -977,19 +976,25 @@ RSpec.describe Integration do
end
end
- describe '#to_integration_hash' do
+ describe '#to_database_hash' do
let(:properties) { { foo: 1, bar: true } }
let(:db_props) { properties.stringify_keys }
let(:record) { create(:integration, :instance, properties: properties) }
it 'does not include the properties key' do
- hash = record.to_integration_hash
+ hash = record.to_database_hash
expect(hash).not_to have_key('properties')
end
+ it 'does not include certain attributes' do
+ hash = record.to_database_hash
+
+ expect(hash.keys).not_to include('id', 'instance', 'project_id', 'group_id', 'created_at', 'updated_at')
+ end
+
it 'saves correctly using insert_all' do
- hash = record.to_integration_hash
+ hash = record.to_database_hash
hash[:project_id] = project.id
expect do
@@ -999,8 +1004,8 @@ RSpec.describe Integration do
expect(described_class.last).to have_attributes(properties: db_props)
end
- it 'is part of the to_integration_hash' do
- hash = record.to_integration_hash
+ it 'decrypts encrypted properties correctly' do
+ hash = record.to_database_hash
expect(hash).to include('encrypted_properties' => be_present, 'encrypted_properties_iv' => be_present)
expect(hash['encrypted_properties']).not_to eq(record.encrypted_properties)
@@ -1016,14 +1021,14 @@ RSpec.describe Integration do
context 'when the properties are empty' do
let(:properties) { {} }
- it 'is part of the to_integration_hash' do
- hash = record.to_integration_hash
+ it 'is part of the to_database_hash' do
+ hash = record.to_database_hash
expect(hash).to include('encrypted_properties' => be_nil, 'encrypted_properties_iv' => be_nil)
end
it 'saves correctly using insert_all' do
- hash = record.to_integration_hash
+ hash = record.to_database_hash
hash[:project_id] = project
expect do
@@ -1199,4 +1204,46 @@ RSpec.describe Integration do
end
end
end
+
+ describe '#async_execute' do
+ let(:integration) { described_class.new(id: 123) }
+ let(:data) { { object_kind: 'push' } }
+ let(:supported_events) { %w[push] }
+
+ subject(:async_execute) { integration.async_execute(data) }
+
+ before do
+ allow(integration).to receive(:supported_events).and_return(supported_events)
+ end
+
+ it 'queues a Integrations::ExecuteWorker' do
+ expect(Integrations::ExecuteWorker).to receive(:perform_async).with(integration.id, data)
+ expect(ProjectServiceWorker).not_to receive(:perform_async)
+
+ async_execute
+ end
+
+ context 'when the event is not supported' do
+ let(:supported_events) { %w[issue] }
+
+ it 'does not queue a worker' do
+ expect(Integrations::ExecuteWorker).not_to receive(:perform_async)
+
+ async_execute
+ end
+ end
+
+ context 'when the FF :rename_integration_workers is disabled' do
+ before do
+ stub_feature_flags(rename_integrations_workers: false)
+ end
+
+ it 'queues a ProjectServiceWorker' do
+ expect(ProjectServiceWorker).to receive(:perform_async).with(integration.id, data)
+ expect(Integrations::ExecuteWorker).not_to receive(:perform_async)
+
+ async_execute
+ end
+ end
+ end
end
diff --git a/spec/models/integrations/campfire_spec.rb b/spec/models/integrations/campfire_spec.rb
index 0044e6fae21..405a9ff4b3f 100644
--- a/spec/models/integrations/campfire_spec.rb
+++ b/spec/models/integrations/campfire_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Integrations::Campfire do
it "calls Campfire API to get a list of rooms and speak in a room" do
# make sure a valid list of rooms is returned
- body = File.read(Rails.root + 'spec/fixtures/project_services/campfire/rooms.json')
+ body = File.read(Rails.root + 'spec/fixtures/integrations/campfire/rooms.json')
stub_full_request(@rooms_url).with(basic_auth: @auth).to_return(
body: body,
@@ -65,7 +65,7 @@ RSpec.describe Integrations::Campfire do
it "calls Campfire API to get a list of rooms but shouldn't speak in a room" do
# return a list of rooms that do not contain a room named 'test-room'
- body = File.read(Rails.root + 'spec/fixtures/project_services/campfire/rooms2.json')
+ body = File.read(Rails.root + 'spec/fixtures/integrations/campfire/rooms2.json')
stub_full_request(@rooms_url).with(basic_auth: @auth).to_return(
body: body,
status: 200,
diff --git a/spec/models/integrations/field_spec.rb b/spec/models/integrations/field_spec.rb
index c8caf831191..6b1ce7fcbde 100644
--- a/spec/models/integrations/field_spec.rb
+++ b/spec/models/integrations/field_spec.rb
@@ -5,7 +5,14 @@ require 'spec_helper'
RSpec.describe ::Integrations::Field do
subject(:field) { described_class.new(**attrs) }
- let(:attrs) { { name: nil } }
+ let(:attrs) { { name: nil, integration_class: test_integration } }
+ let(:test_integration) do
+ Class.new(Integration) do
+ def self.default_placeholder
+ 'my placeholder'
+ end
+ end
+ end
describe '#name' do
before do
@@ -68,11 +75,8 @@ RSpec.describe ::Integrations::Field do
end
context 'when set to a dynamic value' do
- before do
- attrs[name] = -> { Time.current }
- end
-
it 'is computed' do
+ attrs[name] = -> { Time.current }
start = Time.current
travel_to(start + 1.minute) do
@@ -80,6 +84,13 @@ RSpec.describe ::Integrations::Field do
expect(field.send(name)).to be_after(start)
end
end
+
+ it 'is executed in the class scope' do
+ attrs[name] = -> { default_placeholder }
+
+ expect(field[name]).to eq('my placeholder')
+ expect(field.send(name)).to eq('my placeholder')
+ end
end
end
end
diff --git a/spec/models/integrations/harbor_spec.rb b/spec/models/integrations/harbor_spec.rb
index 4a6eb27d63a..9e3d4b524a6 100644
--- a/spec/models/integrations/harbor_spec.rb
+++ b/spec/models/integrations/harbor_spec.rb
@@ -67,6 +67,16 @@ RSpec.describe Integrations::Harbor do
harbor_integration.update!(active: false)
expect(harbor_integration.ci_variables).to match_array([])
end
+
+ context 'with robot username' do
+ it 'returns username variable with $$' do
+ harbor_integration.username = 'robot$project+user'
+
+ expect(harbor_integration.ci_variables).to include(
+ { key: 'HARBOR_USERNAME', value: 'robot$$project+user' }
+ )
+ end
+ end
end
describe 'before_validation :reset_username_and_password' do
diff --git a/spec/models/integrations/irker_spec.rb b/spec/models/integrations/irker_spec.rb
index 8aea2c26dc5..16487aa36e7 100644
--- a/spec/models/integrations/irker_spec.rb
+++ b/spec/models/integrations/irker_spec.rb
@@ -25,9 +25,11 @@ RSpec.describe Integrations::Irker do
end
describe 'Execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:irker) { described_class.new }
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ let(:irker_server) { TCPServer.new('localhost', 0) }
let(:sample_data) do
Gitlab::DataBuilder::Push.build_sample(project, user)
end
@@ -36,15 +38,13 @@ RSpec.describe Integrations::Irker do
let(:colorize_messages) { '1' }
before do
- @irker_server = TCPServer.new 'localhost', 0
-
allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(true)
allow(irker).to receive_messages(
active: true,
project: project,
project_id: project.id,
- server_host: @irker_server.addr[2],
- server_port: @irker_server.addr[1],
+ server_host: irker_server.addr[2],
+ server_port: irker_server.addr[1],
default_irc_uri: 'irc://chat.freenode.net/',
recipients: recipients,
colorize_messages: colorize_messages)
@@ -53,18 +53,22 @@ RSpec.describe Integrations::Irker do
end
after do
- @irker_server.close
+ irker_server.close
end
it 'sends valid JSON messages to an Irker listener', :sidekiq_might_not_need_inline do
+ expect(Integrations::IrkerWorker).to receive(:perform_async)
+ .with(project.id, irker.channels, colorize_messages, sample_data, irker.settings)
+ .and_call_original
+
irker.execute(sample_data)
- conn = @irker_server.accept
+ conn = irker_server.accept
Timeout.timeout(5) do
conn.each_line do |line|
msg = Gitlab::Json.parse(line.chomp("\n"))
- expect(msg.keys).to match_array(%w(to privmsg))
+ expect(msg.keys).to match_array(%w[to privmsg])
expect(msg['to']).to match_array(["irc://chat.freenode.net/#commits",
"irc://test.net/#test"])
end
@@ -72,5 +76,19 @@ RSpec.describe Integrations::Irker do
ensure
conn.close if conn
end
+
+ context 'when the FF :rename_integrations_workers is disabled' do
+ before do
+ stub_feature_flags(rename_integrations_workers: false)
+ end
+
+ it 'queues a IrkerWorker' do
+ expect(::IrkerWorker).to receive(:perform_async)
+ .with(project.id, irker.channels, colorize_messages, sample_data, irker.settings)
+ expect(Integrations::IrkerWorker).not_to receive(:perform_async)
+
+ irker.execute(sample_data)
+ end
+ end
end
end
diff --git a/spec/models/integrations/issue_tracker_data_spec.rb b/spec/models/integrations/issue_tracker_data_spec.rb
index 597df237c67..233ed7b8475 100644
--- a/spec/models/integrations/issue_tracker_data_spec.rb
+++ b/spec/models/integrations/issue_tracker_data_spec.rb
@@ -3,7 +3,11 @@
require 'spec_helper'
RSpec.describe Integrations::IssueTrackerData do
- describe 'associations' do
- it { is_expected.to belong_to :integration }
+ it_behaves_like Integrations::BaseDataFields
+
+ describe 'encrypted attributes' do
+ subject { described_class.encrypted_attributes.keys }
+
+ it { is_expected.to contain_exactly(:issues_url, :new_issue_url, :project_url) }
end
end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index 061c770a61a..28d97b74adb 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -31,6 +31,61 @@ RSpec.describe Integrations::Jira do
let(:integration) { jira_integration }
end
+ describe 'validations' do
+ subject { jira_integration }
+
+ context 'when integration is active' do
+ before do
+ jira_integration.active = true
+
+ # Don't auto-fill URLs from gitlab.yml
+ stub_config(issues_tracker: {})
+ end
+
+ it { is_expected.to be_valid }
+ it { is_expected.to validate_presence_of(:url) }
+ it { is_expected.to validate_presence_of(:username) }
+ it { is_expected.to validate_presence_of(:password) }
+
+ it_behaves_like 'issue tracker integration URL attribute', :url
+ it_behaves_like 'issue tracker integration URL attribute', :api_url
+ end
+
+ context 'when integration is inactive' do
+ before do
+ jira_integration.active = false
+ end
+
+ it { is_expected.to be_valid }
+ it { is_expected.not_to validate_presence_of(:url) }
+ it { is_expected.not_to validate_presence_of(:username) }
+ it { is_expected.not_to validate_presence_of(:password) }
+ end
+
+ describe 'jira_issue_transition_id' do
+ it 'accepts a blank value' do
+ jira_integration.jira_issue_transition_id = ' '
+
+ expect(jira_integration).to be_valid
+ end
+
+ it 'accepts any string containing numbers' do
+ jira_integration.jira_issue_transition_id = 'foo 23 bar'
+
+ expect(jira_integration).to be_valid
+ end
+
+ it 'does not accept a string without numbers' do
+ jira_integration.jira_issue_transition_id = 'foo bar'
+
+ expect(jira_integration).not_to be_valid
+ expect(jira_integration.errors.full_messages).to eq([
+ 'Jira issue transition IDs must be a list of numbers that can be split with , or ;'
+ ])
+ end
+ end
+ end
+
describe '#options' do
let(:options) do
{
diff --git a/spec/models/integrations/jira_tracker_data_spec.rb b/spec/models/integrations/jira_tracker_data_spec.rb
index 5430dd2eb52..d9f91527fbb 100644
--- a/spec/models/integrations/jira_tracker_data_spec.rb
+++ b/spec/models/integrations/jira_tracker_data_spec.rb
@@ -3,12 +3,12 @@
require 'spec_helper'
RSpec.describe Integrations::JiraTrackerData do
- describe 'associations' do
- it { is_expected.to belong_to(:integration) }
- end
+ it_behaves_like Integrations::BaseDataFields
describe 'deployment_type' do
- it { is_expected.to define_enum_for(:deployment_type).with_values([:unknown, :server, :cloud]).with_prefix(:deployment) }
+ specify do
+ is_expected.to define_enum_for(:deployment_type).with_values([:unknown, :server, :cloud]).with_prefix(:deployment)
+ end
end
describe 'encrypted attributes' do
diff --git a/spec/models/integrations/prometheus_spec.rb b/spec/models/integrations/prometheus_spec.rb
index a7495cb9574..fbeaebfd807 100644
--- a/spec/models/integrations/prometheus_spec.rb
+++ b/spec/models/integrations/prometheus_spec.rb
@@ -145,6 +145,17 @@ RSpec.describe Integrations::Prometheus, :use_clean_rails_memory_store_caching,
expect(req_stub).to have_been_requested
end
end
+
+ context 'when configuration is not valid' do
+ before do
+ integration.api_url = nil
+ end
+
+ it 'returns failure message' do
+ expect(integration.test[:success]).to be_falsy
+ expect(integration.test[:result]).to eq('Prometheus configuration error')
+ end
+ end
end
describe '#prometheus_client' do
diff --git a/spec/models/integrations/zentao_tracker_data_spec.rb b/spec/models/integrations/zentao_tracker_data_spec.rb
index b078c57830b..dca5c4d79ae 100644
--- a/spec/models/integrations/zentao_tracker_data_spec.rb
+++ b/spec/models/integrations/zentao_tracker_data_spec.rb
@@ -3,16 +3,14 @@
require 'spec_helper'
RSpec.describe Integrations::ZentaoTrackerData do
+ it_behaves_like Integrations::BaseDataFields
+
describe 'factory available' do
let(:zentao_tracker_data) { create(:zentao_tracker_data) }
it { expect(zentao_tracker_data.valid?).to eq true }
end
- describe 'associations' do
- it { is_expected.to belong_to(:integration) }
- end
-
describe 'encrypted attributes' do
subject { described_class.encrypted_attributes.keys }
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index c77c0a5504a..d45a23a7ef8 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -455,7 +455,7 @@ RSpec.describe Issue do
end
end
- describe '#related_issues' do
+ describe '#related_issues to relate incidents and issues' do
let_it_be(:authorized_project) { create(:project) }
let_it_be(:authorized_project2) { create(:project) }
let_it_be(:unauthorized_project) { create(:project) }
@@ -463,12 +463,14 @@ RSpec.describe Issue do
let_it_be(:authorized_issue_a) { create(:issue, project: authorized_project) }
let_it_be(:authorized_issue_b) { create(:issue, project: authorized_project) }
let_it_be(:authorized_issue_c) { create(:issue, project: authorized_project2) }
+ let_it_be(:authorized_incident_a) { create(:incident, project: authorized_project )}
let_it_be(:unauthorized_issue) { create(:issue, project: unauthorized_project) }
let_it_be(:issue_link_a) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_b) }
let_it_be(:issue_link_b) { create(:issue_link, source: authorized_issue_a, target: unauthorized_issue) }
let_it_be(:issue_link_c) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_c) }
+ let_it_be(:issue_incident_link_a) { create(:issue_link, source: authorized_issue_a, target: authorized_incident_a) }
before_all do
authorized_project.add_developer(user)
@@ -477,7 +479,7 @@ RSpec.describe Issue do
it 'returns only authorized related issues for given user' do
expect(authorized_issue_a.related_issues(user))
- .to contain_exactly(authorized_issue_b, authorized_issue_c)
+ .to contain_exactly(authorized_issue_b, authorized_issue_c, authorized_incident_a)
end
it 'returns issues with valid issue_link_type' do
@@ -507,7 +509,7 @@ RSpec.describe Issue do
expect(Ability).to receive(:allowed?).with(user, :read_cross_project).and_return(false)
expect(authorized_issue_a.related_issues(user))
- .to contain_exactly(authorized_issue_b)
+ .to contain_exactly(authorized_issue_b, authorized_incident_a)
end
end
end
@@ -1565,4 +1567,64 @@ RSpec.describe Issue do
expect(issue.escalation_status).to eq(escalation_status)
end
end
+
+ describe '#expire_etag_cache' do
+ let_it_be(:issue) { create(:issue) }
+
+ subject(:expire_cache) { issue.expire_etag_cache }
+
+ it 'touches the etag cache store' do
+ key = Gitlab::Routing.url_helpers.realtime_changes_project_issue_path(issue.project, issue)
+
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |cache_store|
+ expect(cache_store).to receive(:touch).with(key)
+ end
+
+ expire_cache
+ end
+ end
+
+ describe '#link_reference_pattern' do
+ let(:match_data) { described_class.link_reference_pattern.match(link_reference_url) }
+
+ context 'with issue url' do
+ let(:link_reference_url) { 'http://localhost/namespace/project/-/issues/1' }
+
+ it 'matches with expected attributes' do
+ expect(match_data['namespace']).to eq('namespace')
+ expect(match_data['project']).to eq('project')
+ expect(match_data['issue']).to eq('1')
+ end
+ end
+
+ context 'with incident url' do
+ let(:link_reference_url) { 'http://localhost/namespace1/project1/-/issues/incident/2' }
+
+ it 'matches with expected attributes' do
+ expect(match_data['namespace']).to eq('namespace1')
+ expect(match_data['project']).to eq('project1')
+ expect(match_data['issue']).to eq('2')
+ end
+ end
+ end
+
+ context 'order by closed_at' do
+ let!(:issue_a) { create(:issue, closed_at: 1.day.ago) }
+ let!(:issue_b) { create(:issue, closed_at: 5.days.ago) }
+ let!(:issue_c_nil) { create(:issue, closed_at: nil) }
+ let!(:issue_d) { create(:issue, closed_at: 3.days.ago) }
+ let!(:issue_e_nil) { create(:issue, closed_at: nil) }
+
+ describe '.order_closed_at_asc' do
+ it 'orders on closed at' do
+ expect(described_class.order_closed_at_asc.to_a).to eq([issue_b, issue_d, issue_a, issue_c_nil, issue_e_nil])
+ end
+ end
+
+ describe '.order_closed_at_desc' do
+ it 'orders on closed at' do
+ expect(described_class.order_closed_at_desc.to_a).to eq([issue_a, issue_d, issue_b, issue_c_nil, issue_e_nil])
+ end
+ end
+ end
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index 225c9714187..a9d1a8a5ef2 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -45,6 +45,157 @@ RSpec.describe Key, :mailer do
end
end
end
+
+ describe 'validation of banned keys' do
+ let_it_be(:user) { create(:user) }
+
+ let(:key) { build(:key) }
+ let(:banned_keys) do
+ [
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAwRIdDlHaIqZXND/l1vFT7ue3rc/DvXh2y' \
+ 'x5EFtuxGQRHVxGMazDhV4vj5ANGXDQwUYI0iZh6aOVrDy8I/y9/y+YDGCvsnqrDbuPDjW' \
+ '26s2bBXWgUPiC93T3TA6L2KOxhVcl7mljEOIYACRHPpJNYVGhinCxDUH9LxMrdNXgP5Ok= mateidu@localhost',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIBnZQ+6nhlPX/JnX5i5hXpljJ89bSnnrsSs51' \
+ 'hSPuoJGmoKowBddISK7s10AIpO0xAWGcr8PUr2FOjEBbDHqlRxoXF0Ocms9xv3ql9EYUQ5' \
+ '+U+M6BymWhNTFPOs6gFHUl8Bw3t6c+SRKBpfRFB0yzBj9d093gSdfTAFoz+yLo4vRw==',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAvIhC5skTzxyHif/7iy3yhxuK6/OB13hjPq' \
+ 'rskogkYFrcW8OK4VJT+5+Fx7wd4sQCnVn8rNqahw/x6sfcOMDI/Xvn4yKU4t8TnYf2MpUV' \
+ 'r4ndz39L5Ds1n7Si1m2suUNxWbKv58I8+NMhlt2ITraSuTU0NGymWOc8+LNi+MHXdLk= SCCP Superuser',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr' \
+ '+kz4TjGYe7gHzIw+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6' \
+ 'IedplqoPkcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5' \
+ 'y2hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NOTd0j' \
+ 'MZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcWyLbIbEgE98' \
+ 'OHlnVYCzRdK8jlqm8tehUc9c9WhQ== vagrant insecure public key',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAwRIdDlHaIqZXND/l1vFT7ue3rc/DvXh2yx' \
+ '5EFtuxGQRHVxGMazDhV4vj5ANGXDQwUYI0iZh6aOVrDy8I/y9/y+YDGCvsnqrDbuPDjW26' \
+ 's2bBXWgUPiC93T3TA6L2KOxhVcl7mljEOIYACRHPpJNYVGhinCxDUH9LxMrdNXgP5Ok= mateidu@localhost',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAn8LoId2N5i28cNKuEWWea3yt0I/LdT/NRO' \
+ 'rF44WZewtxch+DIwteQhM1qL6EKUSqz3Q2geX1crpOsNnyh67xy5lNo086u/QewOCSRAUG' \
+ 'rQCXqFQ4JU8ny/qugWALQHjbIaPHj/3zMK09r4cpTSeAU7CW5nQyTKGmh7v9CAfWfcs= adam@localhost.localdomain',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAJTDsX+8olPZeyr58g9XE0L8PKT5030NZBPlE7np4h' \
+ 'Bqx36HoWarWq1Csn8M57dWN9StKbs03k2ggY6sYJK5AW2EWar70um3pYjKQHiZq7mITmit' \
+ 'sozFN/K7wu2e2iKRgquUwH5SuYoOJ29n7uhaILXiKZP4/H/dDudqPRSY6tJPAAAAFQDtuW' \
+ 'H90mDbU2L/Ms2lfl/cja/wHwAAAIAMBwSHZt2ysOHCFe1WLUvdwVDHUqk3QHTskuuAnMlw' \
+ 'MtSvCaUxSatdHahsMZ9VCHjoQUx6j+TcgRLDbMlRLnwUlb6wpniehLBFk+qakGcREqks5N' \
+ 'xYzFTJXwROzP72jPvVgQyOZHWq81gCild/ljL7hmrduCqYwxDIz4o7U92UKQAAAIBmhSl9' \
+ 'CVPgVMv1xO8DAHVhM1huIIK8mNFrzMJz+JXzBx81ms1kWSeQOC/nraaXFTBlqiQsvB8tzr' \
+ '4xZdbaI/QzVLKNAF5C8BJ4ScNlTIx1aZJwyMil8Nzb+0YAsw5Ja+bEZZvEVlAYnd10qRWr' \
+ 'PeEY1txLMmX3wDa+JvJL7fmuBg==',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAMq5EcIFdfCjJakyQnP/BBp9oc6mpaZVguf0Znp5C4' \
+ '0twiG1lASQJZlM1qOB/hkBWYeBCHUkcOLEnVXSZzB62L+W/LGKodqnsiQPRr57AA6jPc6m' \
+ 'NBnejHai8cSdAl9n/0s2IQjdcrxM8CPq2uEyfm0J3AV6Lrbbxr5NgE5xxM+DAAAAFQCmFk' \
+ '/M7Rx2jexsJ9COpHkHwUjcNQAAAIAdg18oByp/tjjDKhWhmmv+HbVIROkRqSxBvuEZEmcW' \
+ 'lg38mLIT1bydfpSou/V4rI5ctxwCfJ1rRr66pw6GwCrz4fXmyVlhrj7TrktyQ9+zRXhynF' \
+ '4wdNPWErhNHb8tGlSOFiOBcUTlouX3V/ka6Dkd6ZQrZLQFaH+gjfyTZZ82HQAAAIEArsJg' \
+ 'p7RLPOsCeLqoia/eljseBFVDazO5Q0ysUotTw9wgXGGVWREwm8wNggFNb9eCiBAAUfVZVf' \
+ 'hVAtFT0pBf/eIVLPXyaMw3prBt7LqeBrbagODc3WAAdMTPIdYYcOKgv+YvTXa51zG64v6p' \
+ 'QOfS8WXgKCzDl44puXfYeDk5lVQ=',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAKwKBw7D4OA1H/uD4htdh04TBIHdbSjeXUSnWJsce8' \
+ 'C0tvoB01Yarjv9TFj+tfeDYVWtUK1DA1JkyqSuoAtDANJzF4I6Isyd0KPrW3dHFTcg6Xlz' \
+ '8d3KEaHokY93NOmB/xWEkhme8b7Q0U2iZie2pgWbTLXV0FA+lhskTtPHW3+VAAAAFQDRya' \
+ 'yUlVZKXEweF3bUe03zt9e8VQAAAIAEPK1k3Y6ErAbIl96dnUCnZjuWQ7xXy062pf63QuRW' \
+ 'I6LYSscm3f1pEknWUNFr/erQ02pkfi2eP9uHl1TI1ql+UmJX3g3frfssLNZwWXAW0m8PbY' \
+ '3HZSs+f5hevM3ua32pnKDmbQ2WpvKNyycKHi81hSI14xMcdblJolhN5iY8/wAAAIAjEe5+' \
+ '0m/TlBtVkqQbUit+s/g+eB+PFQ+raaQdL1uztW3etntXAPH1MjxsAC/vthWYSTYXORkDFM' \
+ 'hrO5ssE2rfg9io0NDyTIZt+VRQMGdi++dH8ptU+ldl2ZejLFdTJFwFgcfXz+iQ1mx6h9TP' \
+ 'X1crE1KoMAVOj3yKVfKpLB1EkA== root@lbslave',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAN3AITryJMQyOKZjAky+mQ/8pOHIlu4q8pzmR0qotK' \
+ 'aLm2yye5a0PY2rOaQRAzi7EPheBXbqTb8a8TrHhGXI5P7GUHaJho5HhEnw+5TwAvP72L7L' \
+ 'cPwxMxj/rLcR/jV+uLMsVeJVWjwJcUv83yzPXoVjK0hrIm+RLLeuTM+gTylHAAAAFQD5gB' \
+ 'dXsXAiTz1atzMg3xDFF1zlowAAAIAlLy6TCMlOBM0IcPsvP/9bEjDj0M8YZazdqt4amO2I' \
+ 'aNUPYt9/sIsLOQfxIj8myDK1TOp8NyRJep7V5aICG4f3Q+XktlmLzdWn3sjvbWuIAXe1op' \
+ 'jG2T69YhxfHZr8Wn7P4tpCgyqM4uHmUKrfnBzQQ9vkUUWsZoUXM2Z7vUXVfQAAAIAU6eNl' \
+ 'phQWDwx0KOBiiYhF9BM6kDbQlyw8333rAG3G4CcjI2G8eYGtpBNliaD185UjCEsjPiudhG' \
+ 'il/j4Zt/+VY3aGOLoi8kqXBBc8ZAML9bbkXpyhQhMgwiywx3ciFmvSn2UAin8yurStYPQx' \
+ 'tXauZN5PYbdwCHPS7ApIStdpMA== wood@endec1',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAISAE3CAX4hsxTw0dRc0gx8nQ41r3Vkj9OmG6LGeKW' \
+ 'Rmpy7C6vaExuupjxid76fd4aS56lCUEEoRlJ3zE93qoK9acI6EGqGQFLuDZ0fqMyRSX+il' \
+ 'f+1HDo/TRyuraggxp9Hj9LMpZVbpFATMm0+d9Xs7eLmaJjuMsowNlOf8NFdHAAAAFQCwdv' \
+ 'qOAkR6QhuiAapQ/9iVuR0UAQAAAIBpLMo4dhSeWkChfv659WLPftxRrX/HR8YMD/jqa3R4' \
+ 'PsVM2g6dQ1191nHugtdV7uaMeOqOJ/QRWeYM+UYwT0Zgx2LqvgVSjNDfdjk+ZRY8x3SmEx' \
+ 'Fi62mKFoTGSOCXfcAfuanjaoF+sepnaiLUd+SoJShGYHoqR2QWiysTRqknlwAAAIBLEgYm' \
+ 'r9XCSqjENFDVQPFELYKT7Zs9J87PjPS1AP0qF1OoRGZ5mefK6X/6VivPAUWmmmev/BuAs8' \
+ 'M1HtfGeGGzMzDIiU/WZQ3bScLB1Ykrcjk7TOFD6xrnk/inYAp5l29hjidoAONcXoHmUAMY' \
+ 'OKqn63Q2AsDpExVcmfj99/BlpQ=='
+ ]
+ end
+
+ context 'when ssh_banned_key feature flag is enabled with a user' do
+ before do
+ stub_feature_flags(ssh_banned_key: user)
+ end
+
+ where(:key_content) { banned_keys }
+
+ with_them do
+ it 'does not allow banned keys' do
+ key.key = key_content
+ key.user = user
+
+ expect(key).to be_invalid
+ expect(key.errors[:key]).to include(
+ _('cannot be used because it belongs to a compromised private key. Stop using this key and generate a new one.'))
+ end
+
+ it 'allows when the user is a ghost user' do
+ key.key = key_content
+ key.user = User.ghost
+
+ expect(key).to be_valid
+ end
+
+ it 'allows when the user is nil' do
+ key.key = key_content
+ key.user = nil
+
+ expect(key).to be_valid
+ end
+ end
+
+ it 'allows other keys' do
+ key.user = user
+
+ expect(key).to be_valid
+ end
+
+ it 'allows other users' do
+ key.user = User.ghost
+
+ expect(key).to be_valid
+ end
+ end
+
+ context 'when ssh_banned_key feature flag is disabled' do
+ before do
+ stub_feature_flags(ssh_banned_key: false)
+ end
+
+ where(:key_content) { banned_keys }
+
+ with_them do
+ it 'allows banned keys' do
+ key.key = key_content
+
+ expect(key).to be_valid
+ end
+ end
+
+ it 'allows other keys' do
+ expect(key).to be_valid
+ end
+ end
+ end
end
describe "Methods" do
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 92f9099d04d..f93c2d36966 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -47,6 +47,16 @@ RSpec.describe GroupMember do
end
end
+ describe '#permissible_access_level_roles' do
+ let_it_be(:group) { create(:group) }
+
+ it 'returns Gitlab::Access.options_with_owner' do
+ result = described_class.permissible_access_level_roles(group.first_owner, group)
+
+ expect(result).to eq(Gitlab::Access.options_with_owner)
+ end
+ end
+
it_behaves_like 'members notifications', :group
describe '#namespace_id' do
@@ -148,4 +158,135 @@ RSpec.describe GroupMember do
end
end
end
+
+ context 'authorization refresh on addition/updation/deletion' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project_a) { create(:project, group: group) }
+ let_it_be(:project_b) { create(:project, group: group) }
+ let_it_be(:project_c) { create(:project, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:affected_project_ids) { Project.id_in([project_a, project_b, project_c]).ids }
+
+ before do
+ stub_const(
+ "#{described_class.name}::THRESHOLD_FOR_REFRESHING_AUTHORIZATIONS_VIA_PROJECTS",
+ affected_project_ids.size - 1)
+ end
+
+ shared_examples_for 'calls UserProjectAccessChangedService to recalculate authorizations' do
+ it 'calls UserProjectAccessChangedService to recalculate authorizations' do
+ expect_next_instance_of(UserProjectAccessChangedService, user.id) do |service|
+ expect(service).to receive(:execute).with(blocking: blocking)
+ end
+
+ action
+ end
+ end
+
+ shared_examples_for 'tries to update permissions via refreshing authorizations for the affected projects' do
+ context 'when the number of affected projects exceeds the set threshold' do
+ it 'updates permissions via refreshing authorizations for the affected projects asynchronously' do
+ expect_next_instance_of(
+ AuthorizedProjectUpdate::ProjectAccessChangedService, affected_project_ids
+ ) do |service|
+ expect(service).to receive(:execute).with(blocking: false)
+ end
+
+ action
+ end
+
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay as a safety net' do
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
+
+ action
+ end
+ end
+
+ context 'when the number of affected projects does not exceed the set threshold' do
+ before do
+ stub_const(
+ "#{described_class.name}::THRESHOLD_FOR_REFRESHING_AUTHORIZATIONS_VIA_PROJECTS",
+ affected_project_ids.size + 1)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+ end
+
+ context 'on create' do
+ let(:action) { group.add_user(user, Gitlab::Access::GUEST) }
+ let(:blocking) { true }
+
+ it 'changes access level', :sidekiq_inline do
+ expect { action }.to change { user.can?(:guest_access, project_a) }.from(false).to(true)
+ .and change { user.can?(:guest_access, project_b) }.from(false).to(true)
+ .and change { user.can?(:guest_access, project_c) }.from(false).to(true)
+ end
+
+ it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
+
+ context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
+ before do
+ stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+ end
+
+ context 'on update' do
+ before do
+ group.add_user(user, Gitlab::Access::GUEST)
+ end
+
+ let(:action) { group.members.find_by(user: user).update!(access_level: Gitlab::Access::DEVELOPER) }
+ let(:blocking) { true }
+
+ it 'changes access level', :sidekiq_inline do
+ expect { action }.to change { user.can?(:developer_access, project_a) }.from(false).to(true)
+ .and change { user.can?(:developer_access, project_b) }.from(false).to(true)
+ .and change { user.can?(:developer_access, project_c) }.from(false).to(true)
+ end
+
+ it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
+
+ context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
+ before do
+ stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+ end
+
+ context 'on destroy' do
+ before do
+ group.add_user(user, Gitlab::Access::GUEST)
+ end
+
+ let(:action) { group.members.find_by(user: user).destroy! }
+ let(:blocking) { false }
+
+ it 'changes access level', :sidekiq_inline do
+ expect { action }.to change { user.can?(:guest_access, project_a) }.from(true).to(false)
+ .and change { user.can?(:guest_access, project_b) }.from(true).to(false)
+ .and change { user.can?(:guest_access, project_c) }.from(true).to(false)
+ end
+
+ it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
+
+ context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
+ before do
+ stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+ end
+ end
end
diff --git a/spec/models/members/last_group_owner_assigner_spec.rb b/spec/models/members/last_group_owner_assigner_spec.rb
index bb0f751e7d5..429cf4190cf 100644
--- a/spec/models/members/last_group_owner_assigner_spec.rb
+++ b/spec/models/members/last_group_owner_assigner_spec.rb
@@ -94,5 +94,18 @@ RSpec.describe LastGroupOwnerAssigner do
end
end
end
+
+ context 'when there are bot members' do
+ context 'with a bot owner' do
+ specify do
+ create(:group_member, :owner, source: group, user: create(:user, :project_bot))
+
+ expect { assigner.execute }.to change(group_member, :last_owner)
+ .from(nil).to(true)
+ .and change(group_member, :last_blocked_owner)
+ .from(nil).to(false)
+ end
+ end
+ end
end
end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 3923f4161cc..8c989f5aaca 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -23,6 +23,30 @@ RSpec.describe ProjectMember do
end
end
+ describe '#permissible_access_level_roles' do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ before do
+ project.add_owner(owner)
+ project.add_maintainer(maintainer)
+ end
+
+ context 'when member can manage owners' do
+ it 'returns Gitlab::Access.options_with_owner' do
+ expect(described_class.permissible_access_level_roles(owner, project)).to eq(Gitlab::Access.options_with_owner)
+ end
+ end
+
+ context 'when member cannot manage owners' do
+ it 'returns Gitlab::Access.options' do
+ expect(described_class.permissible_access_level_roles(maintainer, project)).to eq(Gitlab::Access.options)
+ end
+ end
+ end
+
describe '#real_source_type' do
subject { create(:project_member).real_source_type }
diff --git a/spec/models/merge_request/cleanup_schedule_spec.rb b/spec/models/merge_request/cleanup_schedule_spec.rb
index 85208f901fd..9c50b64f2bd 100644
--- a/spec/models/merge_request/cleanup_schedule_spec.rb
+++ b/spec/models/merge_request/cleanup_schedule_spec.rb
@@ -120,6 +120,41 @@ RSpec.describe MergeRequest::CleanupSchedule do
end
end
+ describe '.stuck' do
+ let!(:cleanup_schedule_1) { create(:merge_request_cleanup_schedule, updated_at: 1.day.ago) }
+ let!(:cleanup_schedule_2) { create(:merge_request_cleanup_schedule, :running, updated_at: 5.hours.ago) }
+ let!(:cleanup_schedule_3) { create(:merge_request_cleanup_schedule, :running, updated_at: 7.hours.ago) }
+ let!(:cleanup_schedule_4) { create(:merge_request_cleanup_schedule, :completed, updated_at: 1.day.ago) }
+ let!(:cleanup_schedule_5) { create(:merge_request_cleanup_schedule, :failed, updated_at: 1.day.ago) }
+
+ it 'returns records that has been in running state for more than 6 hours' do
+ expect(described_class.stuck).to match_array([cleanup_schedule_3])
+ end
+ end
+
+ describe '.stuck_retry!' do
+ let!(:cleanup_schedule_1) { create(:merge_request_cleanup_schedule, :running, updated_at: 5.hours.ago) }
+ let!(:cleanup_schedule_2) { create(:merge_request_cleanup_schedule, :running, updated_at: 7.hours.ago) }
+
+ it 'sets stuck records to unstarted' do
+ expect { described_class.stuck_retry! }.to change { cleanup_schedule_2.reload.unstarted? }.from(false).to(true)
+ end
+
+ context 'when there are more than 5 stuck schedules' do
+ before do
+ create_list(:merge_request_cleanup_schedule, 5, :running, updated_at: 1.day.ago)
+ end
+
+ it 'only retries 5 stuck schedules at once' do
+ expect(described_class.stuck.count).to eq 6
+
+ described_class.stuck_retry!
+
+ expect(described_class.stuck.count).to eq 1
+ end
+ end
+ end
+
describe '.start_next' do
let!(:cleanup_schedule_1) { create(:merge_request_cleanup_schedule, :completed, scheduled_at: 1.day.ago) }
let!(:cleanup_schedule_2) { create(:merge_request_cleanup_schedule, scheduled_at: 2.days.ago) }
diff --git a/spec/models/merge_request_diff_file_spec.rb b/spec/models/merge_request_diff_file_spec.rb
index 5a48438adab..c9bcb900eca 100644
--- a/spec/models/merge_request_diff_file_spec.rb
+++ b/spec/models/merge_request_diff_file_spec.rb
@@ -85,5 +85,13 @@ RSpec.describe MergeRequestDiffFile do
expect { subject.utf8_diff }.not_to raise_error
end
+
+ it 'calls #diff once' do
+ allow(subject).to receive(:diff).and_return('test')
+
+ expect(subject).to receive(:diff).once
+
+ subject.utf8_diff
+ end
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index d40c78b5b60..381eccf2376 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1381,7 +1381,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe "#work_in_progress?" do
+ describe "#draft?" do
subject { build_stubbed(:merge_request) }
[
@@ -1390,102 +1390,89 @@ RSpec.describe MergeRequest, factory_default: :keep do
it "detects the '#{draft_prefix}' prefix" do
subject.title = "#{draft_prefix}#{subject.title}"
- expect(subject.work_in_progress?).to eq true
+ expect(subject.draft?).to eq true
end
end
- [
- 'WIP:', 'WIP: ', '[WIP]', '[WIP] ', ' [WIP] WIP: [WIP] WIP:',
- "WIP ", "(WIP)",
- "draft", "Draft", "Draft -", "draft - ", "Draft ", "draft "
- ].each do |draft_prefix|
- it "doesn't detect '#{draft_prefix}' at the start of the title as a draft" do
- subject.title = "#{draft_prefix}#{subject.title}"
+ context "returns false" do
+ # We have removed support for variations of "WIP", and additionally need
+ # to test unsupported variations of "Draft" that we have seen users
+ # attempt.
+ #
+ [
+ 'WIP:', 'WIP: ', '[WIP]', '[WIP] ', ' [WIP] WIP: [WIP] WIP:',
+ "WIP ", "(WIP)",
+ "draft", "Draft", "Draft -", "draft - ", "Draft ", "draft "
+ ].each do |trigger|
+ it "when '#{trigger}' prefixes the title" do
+ subject.title = "#{trigger}#{subject.title}"
- expect(subject.work_in_progress?).to eq false
+ expect(subject.draft?).to eq false
+ end
end
- end
-
- it "doesn't detect merge request title just saying 'wip'" do
- subject.title = "wip"
- expect(subject.work_in_progress?).to eq false
- end
+ ["WIP", "Draft"].each do |trigger| # rubocop:disable Style/WordArray
+ it "when merge request title is simply '#{trigger}'" do
+ subject.title = trigger
- it "does not detect merge request title just saying 'draft'" do
- subject.title = "draft"
-
- expect(subject.work_in_progress?).to eq false
- end
-
- it 'does not detect WIP in the middle of the title' do
- subject.title = 'Something with WIP in the middle'
-
- expect(subject.work_in_progress?).to eq false
- end
-
- it 'does not detect Draft in the middle of the title' do
- subject.title = 'Something with Draft in the middle'
+ expect(subject.draft?).to eq false
+ end
- expect(subject.work_in_progress?).to eq false
- end
+ it "when #{trigger} is in the middle of the title" do
+ subject.title = "Something with #{trigger} in the middle"
- it 'does not detect Draft: in the middle of the title' do
- subject.title = 'Something with Draft: in the middle'
+ expect(subject.draft?).to eq false
+ end
- expect(subject.work_in_progress?).to eq false
- end
+ it "when #{trigger} is at the end of the title" do
+ subject.title = "Something ends with #{trigger}"
- it 'does not detect WIP at the end of the title' do
- subject.title = 'Something ends with WIP'
+ expect(subject.draft?).to eq false
+ end
- expect(subject.work_in_progress?).to eq false
- end
+ it "when title contains words starting with #{trigger}" do
+ subject.title = "#{trigger}foo #{subject.title}"
- it 'does not detect Draft at the end of the title' do
- subject.title = 'Something ends with Draft'
+ expect(subject.draft?).to eq false
+ end
- expect(subject.work_in_progress?).to eq false
- end
+ it "when title contains words containing with #{trigger}" do
+ subject.title = "Foo#{trigger}Bar #{subject.title}"
- it "doesn't detect WIP for words starting with WIP" do
- subject.title = "Wipwap #{subject.title}"
- expect(subject.work_in_progress?).to eq false
- end
+ expect(subject.draft?).to eq false
+ end
+ end
- it "doesn't detect WIP for words containing with WIP" do
- subject.title = "WupWipwap #{subject.title}"
- expect(subject.work_in_progress?).to eq false
- end
+ it 'when Draft: in the middle of the title' do
+ subject.title = 'Something with Draft: in the middle'
- it "doesn't detect draft for words containing with draft" do
- subject.title = "Drafting #{subject.title}"
- expect(subject.work_in_progress?).to eq false
- end
+ expect(subject.draft?).to eq false
+ end
- it "doesn't detect WIP by default" do
- expect(subject.work_in_progress?).to eq false
- end
+ it "when the title does not contain draft" do
+ expect(subject.draft?).to eq false
+ end
- it "is aliased to #draft?" do
- expect(subject.method(:work_in_progress?)).to eq(subject.method(:draft?))
+ it "is aliased to #draft?" do
+ expect(subject.method(:work_in_progress?)).to eq(subject.method(:draft?))
+ end
end
end
- describe "#wipless_title" do
+ describe "#draftless_title" do
subject { build_stubbed(:merge_request) }
['draft:', 'Draft: ', '[Draft]', '[DRAFT] '].each do |draft_prefix|
it "removes a '#{draft_prefix}' prefix" do
- wipless_title = subject.title
+ draftless_title = subject.title
subject.title = "#{draft_prefix}#{subject.title}"
- expect(subject.wipless_title).to eq wipless_title
+ expect(subject.draftless_title).to eq draftless_title
end
it "is satisfies the #work_in_progress? method" do
subject.title = "#{draft_prefix}#{subject.title}"
- subject.title = subject.wipless_title
+ subject.title = subject.draftless_title
expect(subject.work_in_progress?).to eq false
end
@@ -1497,58 +1484,58 @@ RSpec.describe MergeRequest, factory_default: :keep do
it "doesn't remove a '#{wip_prefix}' prefix" do
subject.title = "#{wip_prefix}#{subject.title}"
- expect(subject.wipless_title).to eq subject.title
+ expect(subject.draftless_title).to eq subject.title
end
end
it 'removes only draft prefix from the MR title' do
subject.title = 'Draft: Implement feature called draft'
- expect(subject.wipless_title).to eq 'Implement feature called draft'
+ expect(subject.draftless_title).to eq 'Implement feature called draft'
end
it 'does not remove WIP in the middle of the title' do
subject.title = 'Something with WIP in the middle'
- expect(subject.wipless_title).to eq subject.title
+ expect(subject.draftless_title).to eq subject.title
end
it 'does not remove Draft in the middle of the title' do
subject.title = 'Something with Draft in the middle'
- expect(subject.wipless_title).to eq subject.title
+ expect(subject.draftless_title).to eq subject.title
end
it 'does not remove WIP at the end of the title' do
subject.title = 'Something ends with WIP'
- expect(subject.wipless_title).to eq subject.title
+ expect(subject.draftless_title).to eq subject.title
end
it 'does not remove Draft at the end of the title' do
subject.title = 'Something ends with Draft'
- expect(subject.wipless_title).to eq subject.title
+ expect(subject.draftless_title).to eq subject.title
end
end
- describe "#wip_title" do
+ describe "#draft_title" do
it "adds the Draft: prefix to the title" do
- wip_title = "Draft: #{subject.title}"
+ draft_title = "Draft: #{subject.title}"
- expect(subject.wip_title).to eq wip_title
+ expect(subject.draft_title).to eq draft_title
end
it "does not add the Draft: prefix multiple times" do
- wip_title = "Draft: #{subject.title}"
- subject.title = subject.wip_title
- subject.title = subject.wip_title
+ draft_title = "Draft: #{subject.title}"
+ subject.title = subject.draft_title
+ subject.title = subject.draft_title
- expect(subject.wip_title).to eq wip_title
+ expect(subject.draft_title).to eq draft_title
end
it "is satisfies the #work_in_progress? method" do
- subject.title = subject.wip_title
+ subject.title = subject.draft_title
expect(subject.work_in_progress?).to eq true
end
@@ -5077,4 +5064,75 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(assignees).to match_array([subject.merge_request_assignees[0]])
end
end
+
+ describe '#recent_diff_head_shas' do
+ let_it_be(:merge_request_with_diffs) do
+ params = {
+ target_project: project,
+ source_project: project,
+ target_branch: 'master',
+ source_branch: 'feature'
+ }
+
+ create(:merge_request, params).tap do |mr|
+ 4.times { mr.merge_request_diffs.create! }
+ mr.create_merge_head_diff
+ end
+ end
+
+ let(:shas) do
+ # re-find to avoid caching the association
+ described_class.find(merge_request_with_diffs.id).merge_request_diffs.order(id: :desc).pluck(:head_commit_sha)
+ end
+
+ shared_examples 'correctly sorted and limited diff_head_shas' do
+ it 'has up to MAX_RECENT_DIFF_HEAD_SHAS, ordered most recent first' do
+ stub_const('MergeRequest::MAX_RECENT_DIFF_HEAD_SHAS', 3)
+
+ expect(subject.recent_diff_head_shas).to eq(shas.first(3))
+ end
+
+ it 'supports limits' do
+ expect(subject.recent_diff_head_shas(2)).to eq(shas.first(2))
+ end
+ end
+
+ context 'when the association is not loaded' do
+ subject(:mr) { merge_request_with_diffs }
+
+ include_examples 'correctly sorted and limited diff_head_shas'
+ end
+
+ context 'when the association is loaded' do
+ subject(:mr) do
+ described_class.where(id: merge_request_with_diffs.id).preload(:merge_request_diffs).first
+ end
+
+ include_examples 'correctly sorted and limited diff_head_shas'
+
+ it 'does not issue any queries' do
+ expect(subject).to be_a(described_class) # preload here
+
+ expect { subject.recent_diff_head_shas }.not_to exceed_query_limit(0)
+ end
+ end
+ end
+
+ describe '#target_default_branch?' do
+ let_it_be(:merge_request) { build(:merge_request, project: project) }
+
+ it 'returns false' do
+ expect(merge_request.target_default_branch?).to be false
+ end
+
+ context 'with target_branch equal project default branch' do
+ before do
+ merge_request.target_branch = "master"
+ end
+
+ it 'returns false' do
+ expect(merge_request.target_default_branch?).to be true
+ end
+ end
+ end
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index 06044cf53cc..72a57b6076a 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -82,14 +82,16 @@ RSpec.describe Milestone do
context 'when it is tied to a release for another project' do
it 'creates a validation error' do
other_project = create(:project)
- milestone.releases << build(:release, project: other_project)
+ milestone.releases << build(:release,
+ project: other_project, author_id: other_project.members.first.user_id)
expect(milestone).not_to be_valid
end
end
context 'when it is tied to a release for the same project' do
it 'is valid' do
- milestone.releases << build(:release, project: project)
+ milestone.releases << build(:release,
+ project: project, author_id: project.members.first.user_id)
expect(milestone).to be_valid
end
end
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index c399a0084fb..d2ee0b40ed6 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -58,6 +58,19 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
expect(root_storage_statistics.uploads_size).to eq(total_uploads_size)
end
+ it 'aggregates container_repositories_size and storage_size' do
+ allow(namespace).to receive(:container_repositories_size).and_return(999)
+
+ root_storage_statistics.recalculate!
+
+ root_storage_statistics.reload
+
+ total_storage_size = project_stat1.storage_size + project_stat2.storage_size + 999
+
+ expect(root_storage_statistics.container_registry_size).to eq(999)
+ expect(root_storage_statistics.storage_size).to eq(total_storage_size)
+ end
+
it 'works when there are no projects' do
Project.delete_all
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index c9f8a1bcdc2..25234db5734 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe NamespaceSetting, type: :model do
end
it { is_expected.to define_enum_for(:jobs_to_be_done).with_values([:basics, :move_repository, :code_storage, :exploring, :ci, :other]).with_suffix }
+ it { is_expected.to define_enum_for(:enabled_git_access_protocol).with_values([:all, :ssh, :http]).with_suffix }
describe "validations" do
describe "#default_branch_name_content" do
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 4373d9a0b24..96e06e617d5 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe Namespace do
it { is_expected.to have_many :pending_builds }
it { is_expected.to have_one :namespace_route }
it { is_expected.to have_many :namespace_members }
+ it { is_expected.to have_one :cluster_enabled_grant }
it do
is_expected.to have_one(:ci_cd_settings).class_name('NamespaceCiCdSetting').inverse_of(:namespace).autosave(true)
@@ -374,6 +375,14 @@ RSpec.describe Namespace do
context 'linear' do
it_behaves_like 'namespace traversal scopes'
+
+ context 'without inner join ancestors query' do
+ before do
+ stub_feature_flags(use_traversal_ids_for_ancestor_scopes_with_inner_join: false)
+ end
+
+ it_behaves_like 'namespace traversal scopes'
+ end
end
shared_examples 'makes recursive queries' do
@@ -574,6 +583,107 @@ RSpec.describe Namespace do
end
end
+ describe '#container_repositories_size_cache_key' do
+ it 'returns the correct cache key' do
+ expect(namespace.container_repositories_size_cache_key).to eq "namespaces:#{namespace.id}:container_repositories_size"
+ end
+ end
+
+ describe '#container_repositories_size', :clean_gitlab_redis_cache do
+ let(:project_namespace) { create(:namespace) }
+
+ subject { project_namespace.container_repositories_size }
+
+ context 'on gitlab.com' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:gitlab_api_supported, :no_container_repositories, :all_migrated, :returned_size, :expected_result) do
+ nil | nil | nil | nil | nil
+ false | nil | nil | nil | nil
+ true | true | nil | nil | 0
+ true | false | false | nil | nil
+ true | false | true | 555 | 555
+ true | false | true | nil | nil
+ end
+
+ with_them do
+ before do
+ stub_container_registry_config(enabled: true, api_url: 'http://container-registry', key: 'spec/fixtures/x509_certificate_pk.key')
+ allow(Gitlab).to receive(:com?).and_return(true)
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(gitlab_api_supported)
+ allow(project_namespace).to receive_message_chain(:all_container_repositories, :empty?).and_return(no_container_repositories)
+ allow(project_namespace).to receive_message_chain(:all_container_repositories, :all_migrated?).and_return(all_migrated)
+ allow(ContainerRegistry::GitlabApiClient).to receive(:deduplicated_size).with(project_namespace.full_path).and_return(returned_size)
+ end
+
+ it { is_expected.to eq(expected_result) }
+
+ it 'caches the result when all migrated' do
+ if all_migrated
+ expect(Rails.cache)
+ .to receive(:fetch)
+ .with(project_namespace.container_repositories_size_cache_key, expires_in: 7.days)
+
+ subject
+ end
+ end
+ end
+ end
+
+ context 'not on gitlab.com' do
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'for a sub-group' do
+ let(:parent_namespace) { create(:group) }
+ let(:project_namespace) { create(:group, parent: parent_namespace) }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
+
+ describe '#all_container_repositories' do
+ context 'with personal namespace' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_namespace) { user.namespace }
+
+ context 'with no project' do
+ it { expect(project_namespace.all_container_repositories).to match_array([]) }
+ end
+
+ context 'with projects' do
+ it "returns container repositories" do
+ project = create(:project, namespace: project_namespace)
+ rep = create(:container_repository, project: project)
+
+ expect(project_namespace.all_container_repositories).to match_array([rep])
+ end
+ end
+ end
+
+ context 'with subgroups' do
+ let_it_be(:project_namespace) { create(:group) }
+ let_it_be(:subgroup1) { create(:group, parent: project_namespace) }
+ let_it_be(:subgroup2) { create(:group, parent: subgroup1) }
+
+ context 'with no project' do
+ it { expect(project_namespace.all_container_repositories).to match_array([]) }
+ end
+
+ context 'with projects' do
+ it "returns container repositories" do
+ subgrp1_project = create(:project, namespace: subgroup1)
+ rep1 = create(:container_repository, project: subgrp1_project)
+
+ subgrp2_project = create(:project, namespace: subgroup2)
+ rep2 = create(:container_repository, project: subgrp2_project)
+
+ expect(project_namespace.all_container_repositories).to match_array([rep1, rep2])
+ end
+ end
+ end
+ end
+
describe '.search' do
let_it_be(:first_group) { create(:group, name: 'my first namespace', path: 'old-path') }
let_it_be(:parent_group) { create(:group, name: 'my parent namespace', path: 'parent-path') }
@@ -894,24 +1004,6 @@ RSpec.describe Namespace do
expect_project_directories_at('parent/renamed', with_pages: false)
end
end
-
- context 'when the project has pages deployed' do
- before do
- project.pages_metadatum.update!(deployed: true)
- end
-
- it 'correctly moves the repository, uploads and pages', :sidekiq_inline do
- child.update!(path: 'renamed')
-
- expect_project_directories_at('parent/renamed')
- end
-
- it 'performs the move async of pages async' do
- expect(PagesTransferWorker).to receive(:perform_async).with('rename_namespace', ['parent/child', 'parent/renamed'])
-
- child.update!(path: 'renamed')
- end
- end
end
context 'renaming parent' do
@@ -923,24 +1015,6 @@ RSpec.describe Namespace do
expect_project_directories_at('renamed/child', with_pages: false)
end
end
-
- context 'when the project has pages deployed' do
- before do
- project.pages_metadatum.update!(deployed: true)
- end
-
- it 'correctly moves the repository, uploads and pages', :sidekiq_inline do
- parent.update!(path: 'renamed')
-
- expect_project_directories_at('renamed/child')
- end
-
- it 'performs the move async of pages async' do
- expect(PagesTransferWorker).to receive(:perform_async).with('rename_namespace', %w(parent renamed))
-
- parent.update!(path: 'renamed')
- end
- end
end
context 'moving from one parent to another' do
@@ -952,24 +1026,6 @@ RSpec.describe Namespace do
expect_project_directories_at('new_parent/child', with_pages: false)
end
end
-
- context 'when the project has pages deployed' do
- before do
- project.pages_metadatum.update!(deployed: true)
- end
-
- it 'correctly moves the repository, uploads and pages', :sidekiq_inline do
- child.update!(parent: new_parent)
-
- expect_project_directories_at('new_parent/child')
- end
-
- it 'performs the move async of pages async' do
- expect(PagesTransferWorker).to receive(:perform_async).with('move_namespace', %w(child parent new_parent))
-
- child.update!(parent: new_parent)
- end
- end
end
context 'moving from having a parent to root' do
@@ -981,24 +1037,6 @@ RSpec.describe Namespace do
expect_project_directories_at('child', with_pages: false)
end
end
-
- context 'when the project has pages deployed' do
- before do
- project.pages_metadatum.update!(deployed: true)
- end
-
- it 'correctly moves the repository, uploads and pages', :sidekiq_inline do
- child.update!(parent: nil)
-
- expect_project_directories_at('child')
- end
-
- it 'performs the move async of pages async' do
- expect(PagesTransferWorker).to receive(:perform_async).with('move_namespace', ['child', 'parent', nil])
-
- child.update!(parent: nil)
- end
- end
end
context 'moving from root to having a parent' do
@@ -1010,24 +1048,6 @@ RSpec.describe Namespace do
expect_project_directories_at('new_parent/parent/child', with_pages: false)
end
end
-
- context 'when the project has pages deployed' do
- before do
- project.pages_metadatum.update!(deployed: true)
- end
-
- it 'correctly moves the repository, uploads and pages', :sidekiq_inline do
- parent.update!(parent: new_parent)
-
- expect_project_directories_at('new_parent/parent/child')
- end
-
- it 'performs the move async of pages async' do
- expect(PagesTransferWorker).to receive(:perform_async).with('move_namespace', ['parent', nil, 'new_parent'])
-
- parent.update!(parent: new_parent)
- end
- end
end
end
end
@@ -2238,10 +2258,24 @@ RSpec.describe Namespace do
describe 'storage_enforcement_date' do
let_it_be(:namespace) { create(:group) }
+ before do
+ stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
+ end
+
# Date TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
- it 'returns false' do
+ it 'returns nil' do
expect(namespace.storage_enforcement_date).to be(nil)
end
+
+ context 'when :storage_banner_bypass_date_check is enabled' do
+ before do
+ stub_feature_flags(namespace_storage_limit_bypass_date_check: true)
+ end
+
+ it 'returns the current date', :freeze_time do
+ expect(namespace.storage_enforcement_date).to eq(Date.current)
+ end
+ end
end
describe 'serialization' do
@@ -2251,27 +2285,23 @@ RSpec.describe Namespace do
end
describe '#certificate_based_clusters_enabled?' do
- it 'does not call Feature.enabled? twice with request_store', :request_store do
- expect(Feature).to receive(:enabled?).once
-
- namespace.certificate_based_clusters_enabled?
- namespace.certificate_based_clusters_enabled?
- end
-
- it 'call Feature.enabled? twice without request_store' do
- expect(Feature).to receive(:enabled?).twice
-
- namespace.certificate_based_clusters_enabled?
- namespace.certificate_based_clusters_enabled?
- end
-
context 'with ff disabled' do
before do
stub_feature_flags(certificate_based_clusters: false)
end
- it 'is truthy' do
- expect(namespace.certificate_based_clusters_enabled?).to be_falsy
+ context 'with a cluster_enabled_grant' do
+ it 'is truthy' do
+ create(:cluster_enabled_grant, namespace: namespace)
+
+ expect(namespace.certificate_based_clusters_enabled?).to be_truthy
+ end
+ end
+
+ context 'without a cluster_enabled_grant' do
+ it 'is falsy' do
+ expect(namespace.certificate_based_clusters_enabled?).to be_falsy
+ end
end
end
@@ -2280,8 +2310,18 @@ RSpec.describe Namespace do
stub_feature_flags(certificate_based_clusters: true)
end
- it 'is truthy' do
- expect(namespace.certificate_based_clusters_enabled?).to be_truthy
+ context 'with a cluster_enabled_grant' do
+ it 'is truthy' do
+ create(:cluster_enabled_grant, namespace: namespace)
+
+ expect(namespace.certificate_based_clusters_enabled?).to be_truthy
+ end
+ end
+
+ context 'without a cluster_enabled_grant' do
+ it 'is truthy' do
+ expect(namespace.certificate_based_clusters_enabled?).to be_truthy
+ end
end
end
end
diff --git a/spec/models/packages/cleanup/policy_spec.rb b/spec/models/packages/cleanup/policy_spec.rb
index 972071aa0ad..c08ae4aa7e7 100644
--- a/spec/models/packages/cleanup/policy_spec.rb
+++ b/spec/models/packages/cleanup/policy_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Packages::Cleanup::Policy, type: :model do
is_expected
.to validate_inclusion_of(:keep_n_duplicated_package_files)
.in_array(described_class::KEEP_N_DUPLICATED_PACKAGE_FILES_VALUES)
- .with_message('keep_n_duplicated_package_files is invalid')
+ .with_message('is invalid')
end
end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index a9ed811e77d..06f02f021cf 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -1336,4 +1336,24 @@ RSpec.describe Packages::Package, type: :model do
end
end
end
+
+ describe '#normalized_pypi_name' do
+ let_it_be(:package) { create(:pypi_package) }
+
+ subject { package.normalized_pypi_name }
+
+ where(:package_name, :normalized_name) do
+ 'ASDF' | 'asdf'
+ 'a.B_c-d' | 'a-b-c-d'
+ 'a-------b....c___d' | 'a-b-c-d'
+ end
+
+ with_them do
+ before do
+ package.update_column(:name, package_name)
+ end
+
+ it { is_expected.to eq(normalized_name) }
+ end
+ end
end
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index 381e42978f4..f9c458b2c80 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -213,8 +213,11 @@ RSpec.describe PlanLimits do
storage_size_limit
daily_invites
web_hook_calls
+ web_hook_calls_mid
+ web_hook_calls_low
ci_daily_pipeline_schedule_triggers
repository_size
+ security_policy_scan_execution_schedules
] + disabled_max_artifact_size_columns
end
diff --git a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
index eefe5bfc6c4..7d4268f74e9 100644
--- a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
+++ b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
@@ -7,8 +7,10 @@ RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader do
let_it_be(:project_1) { create(:project) }
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
+ let_it_be(:project_4) { create(:project) }
+ let_it_be(:project_5) { create(:project) }
- let(:projects) { [project_1, project_2, project_3] }
+ let(:projects) { [project_1, project_2, project_3, project_4, project_5] }
let(:query) { projects.each { |project| user.can?(:read_project, project) } }
before do
@@ -17,8 +19,11 @@ RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader do
end
context 'without preloader' do
- it 'runs N queries' do
- expect { query }.to make_queries(projects.size)
+ it 'runs some queries' do
+ # we have an existing N+1, one for each project for which user is not a member
+ # in this spec, project_3, project_4, project_5
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/362890
+ expect { query }.to make_queries(projects.size + 3)
end
end
@@ -34,7 +39,7 @@ RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader do
end
context 'when projects is an array of IDs' do
- let(:projects_arg) { [project_1.id, project_2.id, project_3.id] }
+ let(:projects_arg) { projects.map(&:id) }
it 'avoids N+1 queries' do
expect { query }.not_to make_queries
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index 941f6c0a49d..dae0f84eda3 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe ProjectFeature do
end
end
- it_behaves_like 'access level validation', ProjectFeature::FEATURES - %i(pages) do
+ it_behaves_like 'access level validation', ProjectFeature::FEATURES - %i(pages package_registry) do
let(:container_features) { project.project_feature }
end
@@ -170,6 +170,10 @@ RSpec.describe ProjectFeature do
expect(described_class.required_minimum_access_level(:repository)).to eq(Gitlab::Access::GUEST)
end
+ it 'handles package registry' do
+ expect(described_class.required_minimum_access_level(:package_registry)).to eq(Gitlab::Access::REPORTER)
+ end
+
it 'raises error if feature is invalid' do
expect do
described_class.required_minimum_access_level(:foos)
@@ -243,6 +247,50 @@ RSpec.describe ProjectFeature do
end
end
+ describe 'package_registry_access_level' do
+ context 'with default value' do
+ where(:config_packages_enabled, :expected_result) do
+ false | ProjectFeature::DISABLED
+ true | ProjectFeature::ENABLED
+ nil | ProjectFeature::DISABLED
+ end
+
+ with_them do
+ it 'creates project_feature with correct package_registry_access_level' do
+ stub_packages_setting(enabled: config_packages_enabled)
+ project = Project.new
+
+ expect(project.project_feature.package_registry_access_level).to eq(expected_result)
+ end
+ end
+ end
+
+ context 'sync packages_enabled' do
+ # rubocop:disable Lint/BinaryOperatorWithIdenticalOperands
+ where(:initial_value, :new_value, :expected_result) do
+ ProjectFeature::DISABLED | ProjectFeature::DISABLED | false
+ ProjectFeature::DISABLED | ProjectFeature::ENABLED | true
+ ProjectFeature::DISABLED | ProjectFeature::PUBLIC | true
+ ProjectFeature::ENABLED | ProjectFeature::DISABLED | false
+ ProjectFeature::ENABLED | ProjectFeature::ENABLED | true
+ ProjectFeature::ENABLED | ProjectFeature::PUBLIC | true
+ ProjectFeature::PUBLIC | ProjectFeature::DISABLED | false
+ ProjectFeature::PUBLIC | ProjectFeature::ENABLED | true
+ ProjectFeature::PUBLIC | ProjectFeature::PUBLIC | true
+ end
+ # rubocop:enable Lint/BinaryOperatorWithIdenticalOperands
+
+ with_them do
+ it 'set correct value' do
+ project = create(:project, package_registry_access_level: initial_value)
+
+ project.project_feature.update!(package_registry_access_level: new_value)
+
+ expect(project.packages_enabled).to eq(expected_result)
+ end
+ end
+ end
+ end
# rubocop:disable Gitlab/FeatureAvailableUsage
describe '#feature_available?' do
let(:features) { ProjectFeature::FEATURES }
diff --git a/spec/models/project_group_link_spec.rb b/spec/models/project_group_link_spec.rb
index c925d87170c..8b95b86b14b 100644
--- a/spec/models/project_group_link_spec.rb
+++ b/spec/models/project_group_link_spec.rb
@@ -30,6 +30,12 @@ RSpec.describe ProjectGroupLink do
expect(project_group_link).not_to be_valid
end
+
+ it 'does not allow a project to be shared with `OWNER` access level' do
+ project_group_link.group_access = Gitlab::Access::OWNER
+
+ expect(project_group_link).not_to be_valid
+ end
end
describe 'scopes' do
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index ed5b3d4e0be..2d84c1b843e 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -147,6 +147,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:job_artifacts).dependent(:restrict_with_error) }
it { is_expected.to have_many(:build_trace_chunks).through(:builds).dependent(:restrict_with_error) }
it { is_expected.to have_many(:secure_files).class_name('Ci::SecureFile').dependent(:restrict_with_error) }
+ it { is_expected.to have_one(:build_artifacts_size_refresh).class_name('Projects::BuildArtifactsSizeRefresh') }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -287,41 +288,35 @@ RSpec.describe Project, factory_default: :keep do
end
context 'updating a project' do
- shared_examples 'project update' do
- let_it_be(:project_namespace) { create(:project_namespace) }
- let_it_be(:project) { project_namespace.project }
+ let_it_be(:project_namespace) { create(:project_namespace) }
+ let_it_be(:project) { project_namespace.project }
- context 'when project namespace is not set' do
- before do
- project.update_column(:project_namespace_id, nil)
- project.reload
- end
+ context 'when project has an associated project namespace' do
+ # when FF is disabled creating a project does not create a project_namespace, so we create one
+ it 'project is INVALID when trying to remove project namespace' do
+ project.reload
+ # check that project actually has an associated project namespace
+ expect(project.project_namespace_id).to eq(project_namespace.id)
- it 'updates the project successfully' do
- # pre-check that project does not have a project namespace
- expect(project.project_namespace).to be_nil
+ expect do
+ project.update!(project_namespace_id: nil, path: 'hopefully-valid-path1')
+ end.to raise_error(ActiveRecord::RecordInvalid)
+ expect(project).to be_invalid
+ expect(project.errors.full_messages).to include("Project namespace can't be blank")
+ expect(project.reload.project_namespace).to be_in_sync_with_project(project)
+ end
- project.update!(path: 'hopefully-valid-path2')
+ context 'when same project is being updated in 2 instances' do
+ it 'syncs only changed attributes' do
+ project1 = Project.last
+ project2 = Project.last
- expect(project).to be_persisted
- expect(project).to be_valid
- expect(project.path).to eq('hopefully-valid-path2')
- expect(project.project_namespace).to be_nil
- end
- end
+ project_name = project1.name
+ project_path = project1.path
- context 'when project has an associated project namespace' do
- # when FF is disabled creating a project does not create a project_namespace, so we create one
- it 'project is INVALID when trying to remove project namespace' do
- project.reload
- # check that project actually has an associated project namespace
- expect(project.project_namespace_id).to eq(project_namespace.id)
+ project1.update!(name: project_name + "-1")
+ project2.update!(path: project_path + "-1")
- expect do
- project.update!(project_namespace_id: nil, path: 'hopefully-valid-path1')
- end.to raise_error(ActiveRecord::RecordInvalid)
- expect(project).to be_invalid
- expect(project.errors.full_messages).to include("Project namespace can't be blank")
expect(project.reload.project_namespace).to be_in_sync_with_project(project)
end
end
@@ -4744,8 +4739,7 @@ RSpec.describe Project, factory_default: :keep do
shared_examples 'filter respects visibility' do
it 'respects visibility' do
enable_admin_mode!(user) if admin_mode
- project.update!(visibility_level: Gitlab::VisibilityLevel.level_value(project_level.to_s))
- update_feature_access_level(project, feature_access_level)
+ update_feature_access_level(project, feature_access_level, visibility_level: Gitlab::VisibilityLevel.level_value(project_level.to_s))
expected_objects = expected_count == 1 ? [project] : []
@@ -6225,7 +6219,7 @@ RSpec.describe Project, factory_default: :keep do
describe '#gitlab_deploy_token' do
let(:project) { create(:project) }
- subject { project.gitlab_deploy_token }
+ subject(:gitlab_deploy_token) { project.gitlab_deploy_token }
context 'when there is a gitlab deploy token associated' do
let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, projects: [project]) }
@@ -6257,10 +6251,43 @@ RSpec.describe Project, factory_default: :keep do
context 'when there is a deploy token associated to a different project' do
let(:project_2) { create(:project) }
- let!(:deploy_token) { create(:deploy_token, projects: [project_2]) }
+ let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, projects: [project_2]) }
it { is_expected.to be_nil }
end
+
+ context 'when the project group has a gitlab deploy token associated' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+ let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, :group, groups: [group]) }
+
+ it { is_expected.to eq(deploy_token) }
+
+ context 'when the FF ci_variable_for_group_gitlab_deploy_token is disabled' do
+ before do
+ stub_feature_flags(ci_variable_for_group_gitlab_deploy_token: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ context 'when the project and its group has a gitlab deploy token associated' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+ let!(:project_deploy_token) { create(:deploy_token, :gitlab_deploy_token, projects: [project]) }
+ let!(:group_deploy_token) { create(:deploy_token, :gitlab_deploy_token, :group, groups: [group]) }
+
+ it { is_expected.to eq(project_deploy_token) }
+
+ context 'when the FF ci_variable_for_group_gitlab_deploy_token is disabled' do
+ before do
+ stub_feature_flags(ci_variable_for_group_gitlab_deploy_token: false)
+ end
+
+ it { is_expected.to eq(project_deploy_token) }
+ end
+ end
end
context 'with uploads' do
@@ -6824,50 +6851,46 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#access_request_approvers_to_be_notified' do
- context 'for a personal project' do
- let_it_be(:project) { create(:project) }
- let_it_be(:maintainer) { create(:user) }
+ shared_examples 'returns active, non_invited, non_requested owners/maintainers of the project' do
+ specify do
+ maintainer = create(:project_member, :maintainer, source: project)
- let(:owner_membership) { project.members.owners.find_by(user_id: project.namespace.owner_id) }
+ create(:project_member, :developer, project: project)
+ create(:project_member, :maintainer, :invited, project: project)
+ create(:project_member, :maintainer, :access_request, project: project)
+ create(:project_member, :maintainer, :blocked, project: project)
+ create(:project_member, :owner, :blocked, project: project)
- it 'includes only the owner of the personal project' do
- expect(project.access_request_approvers_to_be_notified.to_a).to eq([owner_membership])
+ expect(project.access_request_approvers_to_be_notified.to_a).to match_array([maintainer, owner])
end
+ end
- it 'includes the maintainers of the personal project, if any' do
- project.add_maintainer(maintainer)
- maintainer_membership = project.members.maintainers.find_by(user_id: maintainer.id)
+ context 'for a personal project' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:owner) { project.members.find_by(user_id: project.first_owner.id) }
- expect(project.access_request_approvers_to_be_notified.to_a).to match_array([owner_membership, maintainer_membership])
- end
+ it_behaves_like 'returns active, non_invited, non_requested owners/maintainers of the project'
end
- let_it_be(:project) { create(:project, group: create(:group, :public)) }
+ context 'for a project in a group' do
+ let_it_be(:project) { create(:project, group: create(:group, :public)) }
+ let_it_be(:owner) { create(:project_member, :owner, source: project) }
- it 'returns a maximum of ten maintainers of the project in recent_sign_in descending order' do
- limit = 2
- stub_const("Member::ACCESS_REQUEST_APPROVERS_TO_BE_NOTIFIED_LIMIT", limit)
- users = create_list(:user, limit + 1, :with_sign_ins)
- active_maintainers = users.map do |user|
- create(:project_member, :maintainer, user: user, project: project)
- end
-
- active_maintainers_in_recent_sign_in_desc_order = project.members_and_requesters
- .id_in(active_maintainers)
- .order_recent_sign_in.limit(limit)
+ it 'returns a maximum of ten maintainers/owners of the project in recent_sign_in descending order' do
+ users = create_list(:user, 11, :with_sign_ins)
- expect(project.access_request_approvers_to_be_notified).to eq(active_maintainers_in_recent_sign_in_desc_order)
- end
+ active_maintainers_and_owners = users.map do |user|
+ create(:project_member, [:maintainer, :owner].sample, user: user, project: project)
+ end
- it 'returns active, non_invited, non_requested maintainers of the project' do
- maintainer = create(:project_member, :maintainer, source: project)
+ active_maintainers_and_owners_in_recent_sign_in_desc_order = project.members
+ .id_in(active_maintainers_and_owners)
+ .order_recent_sign_in.limit(10)
- create(:project_member, :developer, project: project)
- create(:project_member, :maintainer, :invited, project: project)
- create(:project_member, :maintainer, :access_request, project: project)
- create(:project_member, :maintainer, :blocked, project: project)
+ expect(project.access_request_approvers_to_be_notified).to eq(active_maintainers_and_owners_in_recent_sign_in_desc_order)
+ end
- expect(project.access_request_approvers_to_be_notified.to_a).to eq([maintainer])
+ it_behaves_like 'returns active, non_invited, non_requested owners/maintainers of the project'
end
end
@@ -7353,6 +7376,44 @@ RSpec.describe Project, factory_default: :keep do
subject { create(:project).packages_enabled }
it { is_expected.to be true }
+
+ context 'when packages_enabled is enabled' do
+ where(:project_visibility, :expected_result) do
+ Gitlab::VisibilityLevel::PRIVATE | ProjectFeature::PRIVATE
+ Gitlab::VisibilityLevel::INTERNAL | ProjectFeature::ENABLED
+ Gitlab::VisibilityLevel::PUBLIC | ProjectFeature::PUBLIC
+ end
+
+ with_them do
+ it 'set package_registry_access_level to correct value' do
+ project = create(:project,
+ visibility_level: project_visibility,
+ packages_enabled: false,
+ package_registry_access_level: ProjectFeature::DISABLED
+ )
+
+ project.update!(packages_enabled: true)
+
+ expect(project.package_registry_access_level).to eq(expected_result)
+ end
+ end
+ end
+
+ context 'when packages_enabled is disabled' do
+ Gitlab::VisibilityLevel.options.values.each do |project_visibility|
+ it 'set package_registry_access_level to DISABLED' do
+ project = create(:project,
+ visibility_level: project_visibility,
+ packages_enabled: true,
+ package_registry_access_level: ProjectFeature::PUBLIC
+ )
+
+ project.update!(packages_enabled: false)
+
+ expect(project.package_registry_access_level).to eq(ProjectFeature::DISABLED)
+ end
+ end
+ end
end
describe '#related_group_ids' do
@@ -8290,6 +8351,46 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe "#refreshing_build_artifacts_size?" do
+ let_it_be(:project) { create(:project) }
+
+ subject { project.refreshing_build_artifacts_size? }
+
+ context 'when project has no existing refresh record' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when project has existing refresh record' do
+ context 'and refresh has not yet started' do
+ before do
+ allow(project)
+ .to receive_message_chain(:build_artifacts_size_refresh, :started?)
+ .and_return(false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'and refresh has started' do
+ before do
+ allow(project)
+ .to receive_message_chain(:build_artifacts_size_refresh, :started?)
+ .and_return(true)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+
+ describe '#security_training_available?' do
+ subject { build(:project) }
+
+ it 'returns false' do
+ expect(subject.security_training_available?).to eq false
+ end
+ end
+
private
def finish_job(export_job)
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 2c29d4c42f4..53175a2f840 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -54,6 +54,18 @@ RSpec.describe ProjectStatistics do
end
end
+ describe 'namespace relatable columns' do
+ it 'treats the correct columns as namespace relatable' do
+ expect(described_class::NAMESPACE_RELATABLE_COLUMNS).to match_array %i[
+ repository_size
+ wiki_size
+ lfs_objects_size
+ uploads_size
+ container_registry_size
+ ]
+ end
+ end
+
describe '#total_repository_size' do
it "sums repository and LFS object size" do
statistics.repository_size = 2
@@ -346,20 +358,6 @@ RSpec.describe ProjectStatistics do
expect(statistics.container_registry_size).to eq(0)
end
-
- context 'with container_registry_project_statistics FF disabled' do
- before do
- stub_feature_flags(container_registry_project_statistics: false)
- end
-
- it 'does not update the container_registry_size' do
- expect(project).not_to receive(:container_repositories_size)
-
- update_container_registry_size
-
- expect(statistics.container_registry_size).to eq(0)
- end
- end
end
describe '#update_storage_size' do
diff --git a/spec/models/projects/build_artifacts_size_refresh_spec.rb b/spec/models/projects/build_artifacts_size_refresh_spec.rb
index a55e4b31d21..052e654af76 100644
--- a/spec/models/projects/build_artifacts_size_refresh_spec.rb
+++ b/spec/models/projects/build_artifacts_size_refresh_spec.rb
@@ -30,6 +30,12 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
expect(described_class.remaining).to match_array([refresh_1, refresh_3, refresh_4])
end
end
+
+ describe 'processing_queue' do
+ it 'prioritizes pending -> stale -> created' do
+ expect(described_class.processing_queue).to eq([refresh_3, refresh_1, refresh_4])
+ end
+ end
end
describe 'state machine', :clean_gitlab_redis_shared_state do
@@ -49,7 +55,7 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
describe '#process!' do
context 'when refresh state is created' do
- let!(:refresh) do
+ let_it_be_with_reload(:refresh) do
create(
:project_build_artifacts_size_refresh,
:created,
@@ -59,25 +65,31 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
)
end
+ let!(:last_job_artifact_id_on_refresh_start) { create(:ci_job_artifact, project: refresh.project) }
+
before do
stats = create(:project_statistics, project: refresh.project, build_artifacts_size: 120)
stats.increment_counter(:build_artifacts_size, 30)
end
it 'transitions the state to running' do
- expect { refresh.process! }.to change { refresh.reload.state }.to(described_class::STATES[:running])
+ expect { refresh.process! }.to change { refresh.state }.to(described_class::STATES[:running])
end
it 'sets the refresh_started_at' do
- expect { refresh.process! }.to change { refresh.reload.refresh_started_at.to_i }.to(now.to_i)
+ expect { refresh.process! }.to change { refresh.refresh_started_at.to_i }.to(now.to_i)
+ end
+
+ it 'sets last_job_artifact_id_on_refresh_start' do
+ expect { refresh.process! }.to change { refresh.last_job_artifact_id_on_refresh_start.to_i }.to(last_job_artifact_id_on_refresh_start.id)
end
it 'bumps the updated_at' do
- expect { refresh.process! }.to change { refresh.reload.updated_at.to_i }.to(now.to_i)
+ expect { refresh.process! }.to change { refresh.updated_at.to_i }.to(now.to_i)
end
it 'resets the build artifacts size stats' do
- expect { refresh.process! }.to change { refresh.project.statistics.reload.build_artifacts_size }.to(0)
+ expect { refresh.process! }.to change { refresh.project.statistics.build_artifacts_size }.to(0)
end
it 'resets the counter attribute to zero' do
@@ -159,15 +171,13 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
end
describe '.process_next_refresh!' do
- let!(:refresh_running) { create(:project_build_artifacts_size_refresh, :running) }
let!(:refresh_created) { create(:project_build_artifacts_size_refresh, :created) }
- let!(:refresh_stale) { create(:project_build_artifacts_size_refresh, :stale) }
let!(:refresh_pending) { create(:project_build_artifacts_size_refresh, :pending) }
subject(:processed_refresh) { described_class.process_next_refresh! }
it 'picks the first record from the remaining work' do
- expect(processed_refresh).to eq(refresh_created)
+ expect(processed_refresh).to eq(refresh_pending)
expect(processed_refresh.reload).to be_running
end
end
@@ -214,7 +224,8 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
project: project,
updated_at: 2.days.ago,
refresh_started_at: 10.days.ago,
- last_job_artifact_id: artifact_1.id
+ last_job_artifact_id: artifact_1.id,
+ last_job_artifact_id_on_refresh_start: artifact_3.id
)
end
@@ -223,5 +234,35 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
it 'returns the job artifact records that were created not later than the refresh_started_at and IDs greater than the last_job_artifact_id' do
expect(batch).to eq([artifact_2, artifact_3])
end
+
+ context 'when created_at is set before artifact id is persisted' do
+ it 'returns ordered job artifacts' do
+ artifact_3.update!(created_at: artifact_2.created_at)
+
+ expect(batch).to eq([artifact_2, artifact_3])
+ end
+ end
+ end
+
+ describe '#started?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:project) { create(:project) }
+
+ subject { refresh.started? }
+
+ where(:refresh_state, :result) do
+ :created | false
+ :pending | true
+ :running | true
+ end
+
+ with_them do
+ let(:refresh) do
+ create(:project_build_artifacts_size_refresh, refresh_state, project: project)
+ end
+
+ it { is_expected.to eq(result) }
+ end
end
end
diff --git a/spec/models/protected_tag_spec.rb b/spec/models/protected_tag_spec.rb
index e5cee6f18cd..b97954c055d 100644
--- a/spec/models/protected_tag_spec.rb
+++ b/spec/models/protected_tag_spec.rb
@@ -11,4 +11,56 @@ RSpec.describe ProtectedTag do
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:name) }
end
+
+ describe '#protected?' do
+ let(:project) { create(:project, :repository) }
+
+ it 'returns true when the tag matches a protected tag via direct match' do
+ create(:protected_tag, project: project, name: 'foo')
+
+ expect(described_class.protected?(project, 'foo')).to eq(true)
+ end
+
+ it 'returns true when the tag matches a protected tag via wildcard match' do
+ create(:protected_tag, project: project, name: 'production/*')
+
+ expect(described_class.protected?(project, 'production/some-tag')).to eq(true)
+ end
+
+ it 'returns false when the tag does not match a protected tag via direct match' do
+ expect(described_class.protected?(project, 'foo')).to eq(false)
+ end
+
+ it 'returns false when the tag does not match a protected tag via wildcard match' do
+ create(:protected_tag, project: project, name: 'production/*')
+
+ expect(described_class.protected?(project, 'staging/some-tag')).to eq(false)
+ end
+
+ it 'returns false when tag name is nil' do
+ expect(described_class.protected?(project, nil)).to eq(false)
+ end
+
+ context 'with caching', :request_store do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:protected_tag) { create(:protected_tag, project: project, name: 'foo') }
+
+ it 'correctly invalidates a cache' do
+ expect(described_class.protected?(project, 'foo')).to eq(true)
+ expect(described_class.protected?(project, 'bar')).to eq(false)
+
+ create(:protected_tag, project: project, name: 'bar')
+
+ expect(described_class.protected?(project, 'bar')).to eq(true)
+ end
+
+ it 'correctly uses the cached version' do
+ expect(project).to receive(:protected_tags).once.and_call_original
+
+ 2.times do
+ expect(described_class.protected?(project, protected_tag.name)).to eq(true)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 4ae1927dcca..83d7596ff51 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -66,6 +66,32 @@ RSpec.describe Release do
expect { release.milestones << milestone }.to change { MilestoneRelease.count }.by(1)
end
end
+
+ context 'when creating new release' do
+ subject { build(:release, project: project, name: 'Release 1.0') }
+
+ it { is_expected.to validate_presence_of(:author_id) }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(validate_release_with_author: false)
+ end
+
+ it { is_expected.not_to validate_presence_of(:author_id) }
+ end
+ end
+
+ # Mimic releases created before 11.7
+ # See: https://gitlab.com/gitlab-org/gitlab/-/blob/8e5a110b01f842d8b6a702197928757a40ce9009/app/models/release.rb#L14
+ context 'when updating existing release without author' do
+ let(:release) { create(:release, :legacy) }
+
+ it 'updates successfully' do
+ release.description += 'Update'
+
+ expect { release.save! }.not_to raise_error
+ end
+ end
end
describe '#assets_count' do
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index 9f1d1c84da3..d2d7859e726 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -254,8 +254,6 @@ RSpec.describe RemoteMirror, :mailer do
it 'does not remove the remote' do
mirror = create_mirror(url: 'http://foo:bar@test.com')
- expect(RepositoryRemoveRemoteWorker).not_to receive(:perform_async)
-
mirror.destroy!
end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 215f83adf5d..e1d903a40cf 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -418,46 +418,31 @@ RSpec.describe Repository do
end
describe '#new_commits' do
- shared_examples '#new_commits' do
- let_it_be(:project) { create(:project, :repository) }
-
- let(:repository) { project.repository }
-
- subject { repository.new_commits(rev, allow_quarantine: allow_quarantine) }
-
- context 'when there are no new commits' do
- let(:rev) { repository.commit.id }
+ let_it_be(:project) { create(:project, :repository) }
- it 'returns an empty array' do
- expect(subject).to eq([])
- end
- end
+ let(:repository) { project.repository }
- context 'when new commits are found' do
- let(:branch) { 'orphaned-branch' }
- let!(:rev) { repository.commit(branch).id }
- let(:allow_quarantine) { false }
+ subject { repository.new_commits(rev) }
- it 'returns the commits' do
- repository.delete_branch(branch)
+ context 'when there are no new commits' do
+ let(:rev) { repository.commit.id }
- expect(subject).not_to be_empty
- expect(subject).to all( be_a(::Commit) )
- expect(subject.size).to eq(1)
- end
+ it 'returns an empty array' do
+ expect(subject).to eq([])
end
end
- context 'with quarantine' do
- let(:allow_quarantine) { true }
+ context 'when new commits are found' do
+ let(:branch) { 'orphaned-branch' }
+ let!(:rev) { repository.commit(branch).id }
- it_behaves_like '#new_commits'
- end
-
- context 'without quarantine' do
- let(:allow_quarantine) { false }
+ it 'returns the commits' do
+ repository.delete_branch(branch)
- it_behaves_like '#new_commits'
+ expect(subject).not_to be_empty
+ expect(subject).to all( be_a(::Commit) )
+ expect(subject.size).to eq(1)
+ end
end
end
diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb
index 0489a4fb995..929eaca85f7 100644
--- a/spec/models/route_spec.rb
+++ b/spec/models/route_spec.rb
@@ -22,13 +22,6 @@ RSpec.describe Route do
end
describe 'callbacks' do
- context 'before validation' do
- it 'calls #delete_conflicting_orphaned_routes' do
- expect(route).to receive(:delete_conflicting_orphaned_routes)
- route.valid?
- end
- end
-
context 'after update' do
it 'calls #create_redirect_for_old_path' do
expect(route).to receive(:create_redirect_for_old_path)
@@ -44,7 +37,7 @@ RSpec.describe Route do
context 'after create' do
it 'calls #delete_conflicting_redirects' do
route.destroy!
- new_route = described_class.new(source: group, path: group.path)
+ new_route = described_class.new(source: group, path: group.path, namespace: group)
expect(new_route).to receive(:delete_conflicting_redirects)
new_route.save!
end
@@ -275,7 +268,7 @@ RSpec.describe Route do
end
end
- describe '#delete_conflicting_orphaned_routes' do
+ describe 'conflicting routes validation' do
context 'when there is a conflicting route' do
let!(:conflicting_group) { create(:group, path: 'foo') }
@@ -283,47 +276,31 @@ RSpec.describe Route do
route.path = conflicting_group.route.path
end
- context 'when the route is orphaned' do
+ context 'when deleting the conflicting route' do
let!(:offending_route) { conflicting_group.route }
- before do
- Group.delete(conflicting_group) # Orphan the route
- end
+ it 'does not delete the original route' do
+ # before deleting the route, check its there
+ expect(Route.where(path: offending_route.path).count).to eq(1)
- it 'deletes the orphaned route' do
expect do
- route.valid?
- end.to change { described_class.count }.from(2).to(1)
- end
+ Group.delete(conflicting_group) # delete group with conflicting route
+ end.to change { described_class.count }.by(-1)
- it 'passes validation, as usual' do
+ # check the conflicting route is gone
+ expect(Route.where(path: offending_route.path).count).to eq(0)
+ expect(route.path).to eq(offending_route.path)
expect(route.valid?).to be_truthy
end
end
- context 'when the route is not orphaned' do
- it 'does not delete the conflicting route' do
- expect do
- route.valid?
- end.not_to change { described_class.count }
- end
-
- it 'fails validation, as usual' do
- expect(route.valid?).to be_falsey
- end
+ it 'fails validation' do
+ expect(route.valid?).to be_falsey
end
end
context 'when there are no conflicting routes' do
- it 'does not delete any routes' do
- route
-
- expect do
- route.valid?
- end.not_to change { described_class.count }
- end
-
- it 'passes validation, as usual' do
+ it 'passes validation' do
expect(route.valid?).to be_truthy
end
end
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index a113ae37203..a484952bfe9 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -11,8 +11,6 @@ RSpec.describe Terraform::State do
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:project_id) }
- it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id) }
-
describe 'scopes' do
describe '.ordered_by_name' do
let_it_be(:project) { create(:project) }
@@ -40,22 +38,6 @@ RSpec.describe Terraform::State do
end
end
- describe '#destroy' do
- let(:terraform_state) { create(:terraform_state) }
- let(:user) { terraform_state.project.creator }
-
- it 'deletes when the state is unlocked' do
- expect(terraform_state.destroy).to be_truthy
- end
-
- it 'fails to delete when the state is locked', :aggregate_failures do
- terraform_state.update!(lock_xid: SecureRandom.uuid, locked_by_user: user, locked_at: Time.current)
-
- expect(terraform_state.destroy).to be_falsey
- expect(terraform_state.errors.full_messages).to eq(["You cannot remove the State file because it's locked. Unlock the State file first before removing it."])
- end
- end
-
describe '#latest_file' do
let(:terraform_state) { create(:terraform_state, :with_version) }
let(:latest_version) { terraform_state.latest_version }
diff --git a/spec/models/terraform/state_version_spec.rb b/spec/models/terraform/state_version_spec.rb
index 7af9b7897ff..22b1397f30a 100644
--- a/spec/models/terraform/state_version_spec.rb
+++ b/spec/models/terraform/state_version_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Terraform::StateVersion do
it { is_expected.to be_a FileStoreMounter }
+ it { is_expected.to be_a EachBatch }
it { is_expected.to belong_to(:terraform_state).required }
it { is_expected.to belong_to(:created_by_user).class_name('User').optional }
diff --git a/spec/models/time_tracking/timelog_category_spec.rb b/spec/models/time_tracking/timelog_category_spec.rb
new file mode 100644
index 00000000000..d8b938e9d68
--- /dev/null
+++ b/spec/models/time_tracking/timelog_category_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe TimeTracking::TimelogCategory, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:namespace).with_foreign_key('namespace_id') }
+ end
+
+ describe 'validations' do
+ subject { create(:timelog_category) }
+
+ it { is_expected.to validate_presence_of(:namespace) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_uniqueness_of(:name).case_insensitive.scoped_to([:namespace_id]) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_length_of(:description).is_at_most(1024) }
+ it { is_expected.to validate_length_of(:color).is_at_most(7) }
+ end
+
+ describe 'validations when billable' do
+ subject { create(:timelog_category, billable: true, billing_rate: 10.5) }
+
+ it { is_expected.to validate_presence_of(:billing_rate) }
+ it { is_expected.to validate_numericality_of(:billing_rate).is_greater_than(0) }
+ end
+
+ describe '#name' do
+ it 'strips name' do
+ timelog_category = described_class.new(name: ' TimelogCategoryTest ')
+ timelog_category.valid?
+
+ expect(timelog_category.name).to eq('TimelogCategoryTest')
+ end
+ end
+
+ describe '#color' do
+ it 'strips color' do
+ timelog_category = described_class.new(name: 'TimelogCategoryTest', color: ' #fafafa ')
+ timelog_category.valid?
+
+ expect(timelog_category.color).to eq(::Gitlab::Color.of('#fafafa'))
+ end
+ end
+
+ describe '#find_by_name' do
+ let_it_be(:namespace_a) { create(:namespace) }
+ let_it_be(:namespace_b) { create(:namespace) }
+ let_it_be(:timelog_category_a) { create(:timelog_category, namespace: namespace_a, name: 'TimelogCategoryTest') }
+
+ it 'finds the correct timelog category' do
+ expect(described_class.find_by_name(namespace_a.id, 'TIMELOGCATEGORYTest')).to match_array([timelog_category_a])
+ end
+
+ it 'returns empty if not found' do
+ expect(described_class.find_by_name(namespace_b.id, 'TIMELOGCATEGORYTest')).to be_empty
+ end
+ end
+end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index f087fab1ef3..dcf6b224009 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -4374,24 +4374,6 @@ RSpec.describe User do
it_behaves_like '#ci_owned_runners'
end
-
- context 'when FF ci_owned_runners_cross_joins_fix is disabled' do
- before do
- skip_if_multiple_databases_are_setup
-
- stub_feature_flags(ci_owned_runners_cross_joins_fix: false)
- end
-
- it_behaves_like '#ci_owned_runners'
- end
-
- context 'when FF ci_owned_runners_unnest_index is disabled uses GIN index' do
- before do
- stub_feature_flags(ci_owned_runners_unnest_index: false)
- end
-
- it_behaves_like '#ci_owned_runners'
- end
end
describe '#projects_with_reporter_access_limited_to' do
@@ -6655,8 +6637,10 @@ RSpec.describe User do
describe '.with_no_activity' do
it 'returns users with no activity' do
freeze_time do
- not_that_long_ago = (described_class::MINIMUM_INACTIVE_DAYS - 1).days.ago.to_date
- too_long_ago = described_class::MINIMUM_INACTIVE_DAYS.days.ago.to_date
+ active_not_that_long_ago = (described_class::MINIMUM_INACTIVE_DAYS - 1).days.ago.to_date
+ active_too_long_ago = described_class::MINIMUM_INACTIVE_DAYS.days.ago.to_date
+ created_recently = (described_class::MINIMUM_DAYS_CREATED - 1).days.ago.to_date
+ created_not_recently = described_class::MINIMUM_DAYS_CREATED.days.ago.to_date
create(:user, :deactivated, last_activity_on: nil)
@@ -6664,12 +6648,13 @@ RSpec.describe User do
create(:user, state: :active, user_type: user_type, last_activity_on: nil)
end
- create(:user, last_activity_on: not_that_long_ago)
- create(:user, last_activity_on: too_long_ago)
+ create(:user, last_activity_on: active_not_that_long_ago)
+ create(:user, last_activity_on: active_too_long_ago)
+ create(:user, last_activity_on: nil, created_at: created_recently)
- user_with_no_activity = create(:user, last_activity_on: nil)
+ old_enough_user_with_no_activity = create(:user, last_activity_on: nil, created_at: created_not_recently)
- expect(described_class.with_no_activity).to contain_exactly(user_with_no_activity)
+ expect(described_class.with_no_activity).to contain_exactly(old_enough_user_with_no_activity)
end
end
end
diff --git a/spec/models/users/callout_spec.rb b/spec/models/users/callout_spec.rb
index 293f0279e79..14f555863ec 100644
--- a/spec/models/users/callout_spec.rb
+++ b/spec/models/users/callout_spec.rb
@@ -11,4 +11,16 @@ RSpec.describe Users::Callout do
it { is_expected.to validate_presence_of(:feature_name) }
it { is_expected.to validate_uniqueness_of(:feature_name).scoped_to(:user_id).ignoring_case_sensitivity }
end
+
+ describe 'scopes' do
+ describe '.with_feature_name' do
+ let_it_be(:feature_name) { described_class.feature_names.keys.last }
+ let_it_be(:user_callouts_for_feature_name) { create_list(:callout, 2, feature_name: feature_name) }
+ let_it_be(:another_user_callout) { create(:callout, feature_name: described_class.feature_names.each_key.first) }
+
+ it 'returns user callouts for the given feature name only' do
+ expect(described_class.with_feature_name(feature_name)).to eq(user_callouts_for_feature_name)
+ end
+ end
+ end
end
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index e92ae746911..5e757c11f99 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -3,6 +3,28 @@
require 'spec_helper'
RSpec.describe WorkItem do
+ describe 'associations' do
+ it { is_expected.to have_one(:work_item_parent).class_name('WorkItem') }
+
+ it 'has one `parent_link`' do
+ is_expected.to have_one(:parent_link)
+ .class_name('::WorkItems::ParentLink')
+ .with_foreign_key('work_item_id')
+ end
+
+ it 'has many `work_item_children`' do
+ is_expected.to have_many(:work_item_children)
+ .class_name('WorkItem')
+ .with_foreign_key('work_item_id')
+ end
+
+ it 'has many `child_links`' do
+ is_expected.to have_many(:child_links)
+ .class_name('::WorkItems::ParentLink')
+ .with_foreign_key('work_item_parent_id')
+ end
+ end
+
describe '#noteable_target_type_name' do
it 'returns `issue` as the target name' do
work_item = build(:work_item)
@@ -11,6 +33,15 @@ RSpec.describe WorkItem do
end
end
+ describe '#widgets' do
+ subject { build(:work_item).widgets }
+
+ it 'returns instances of supported widgets' do
+ is_expected.to match_array([instance_of(WorkItems::Widgets::Description),
+ instance_of(WorkItems::Widgets::Hierarchy)])
+ end
+ end
+
describe 'callbacks' do
describe 'record_create_action' do
it 'records the creation action after saving' do
diff --git a/spec/models/work_items/parent_link_spec.rb b/spec/models/work_items/parent_link_spec.rb
new file mode 100644
index 00000000000..9516baa7340
--- /dev/null
+++ b/spec/models/work_items/parent_link_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::ParentLink do
+ describe 'associations' do
+ it { is_expected.to belong_to(:work_item) }
+ it { is_expected.to belong_to(:work_item_parent).class_name('WorkItem') }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:work_item) }
+ it { is_expected.to validate_presence_of(:work_item_parent) }
+
+ describe 'hierarchy' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { build(:work_item, project: project) }
+ let_it_be(:task1) { build(:work_item, :task, project: project) }
+ let_it_be(:task2) { build(:work_item, :task, project: project) }
+
+ it 'is valid if not-task parent has task child' do
+ expect(build(:parent_link, work_item: task1, work_item_parent: issue)).to be_valid
+ end
+
+ it 'is not valid if child is not task' do
+ link = build(:parent_link, work_item: issue)
+
+ expect(link).not_to be_valid
+ expect(link.errors[:work_item]).to include('Only Task can be assigned as a child in hierarchy.')
+ end
+
+ it 'is not valid if parent is task' do
+ link = build(:parent_link, work_item_parent: task1)
+
+ expect(link).not_to be_valid
+ expect(link.errors[:work_item_parent]).to include('Only Issue can be parent of Task.')
+ end
+
+ it 'is not valid if parent is in other project' do
+ link = build(:parent_link, work_item_parent: task1, work_item: build(:work_item))
+
+ expect(link).not_to be_valid
+ expect(link.errors[:work_item_parent]).to include('Parent must be in the same project as child.')
+ end
+
+ context 'when parent already has maximum number of links' do
+ let_it_be(:link1) { create(:parent_link, work_item_parent: issue, work_item: task1) }
+
+ before do
+ stub_const("#{described_class}::MAX_CHILDREN", 1)
+ end
+
+ it 'is not valid when another link is added' do
+ link2 = build(:parent_link, work_item_parent: issue, work_item: task2)
+
+ expect(link2).not_to be_valid
+ expect(link2.errors[:work_item_parent]).to include('Parent already has maximum number of children.')
+ end
+
+ it 'existing link is still valid' do
+ expect(link1).to be_valid
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/work_items/type_spec.rb b/spec/models/work_items/type_spec.rb
index 6e9f3210e65..81663d0eb41 100644
--- a/spec/models/work_items/type_spec.rb
+++ b/spec/models/work_items/type_spec.rb
@@ -60,7 +60,16 @@ RSpec.describe WorkItems::Type do
it { is_expected.not_to allow_value('s' * 256).for(:icon_name) }
end
- describe 'default?' do
+ describe '.available_widgets' do
+ subject { described_class.available_widgets }
+
+ it 'returns list of all possible widgets' do
+ is_expected.to match_array([::WorkItems::Widgets::Description,
+ ::WorkItems::Widgets::Hierarchy])
+ end
+ end
+
+ describe '#default?' do
subject { build(:work_item_type, namespace: namespace).default? }
context 'when namespace is nil' do
diff --git a/spec/models/work_items/widgets/base_spec.rb b/spec/models/work_items/widgets/base_spec.rb
new file mode 100644
index 00000000000..9b4b4d9e98f
--- /dev/null
+++ b/spec/models/work_items/widgets/base_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Base do
+ let_it_be(:work_item) { create(:work_item, description: '# Title') }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:base) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:base) }
+ end
+end
diff --git a/spec/models/work_items/widgets/description_spec.rb b/spec/models/work_items/widgets/description_spec.rb
new file mode 100644
index 00000000000..8359db31bff
--- /dev/null
+++ b/spec/models/work_items/widgets/description_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Description do
+ let_it_be(:work_item) { create(:work_item, description: '# Title') }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:description) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:description) }
+ end
+
+ describe '#description' do
+ subject { described_class.new(work_item).description }
+
+ it { is_expected.to eq(work_item.description) }
+ end
+end
diff --git a/spec/models/work_items/widgets/hierarchy_spec.rb b/spec/models/work_items/widgets/hierarchy_spec.rb
new file mode 100644
index 00000000000..0141731529b
--- /dev/null
+++ b/spec/models/work_items/widgets/hierarchy_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Hierarchy do
+ let_it_be(:work_item) { create(:work_item) }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:hierarchy) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:hierarchy) }
+ end
+
+ describe '#parent' do
+ let_it_be(:parent_link) { create(:parent_link) }
+
+ subject { described_class.new(parent_link.work_item).parent }
+
+ it { is_expected.to eq parent_link.work_item_parent }
+
+ context 'when work_items_hierarchy flag is disabled' do
+ before do
+ stub_feature_flags(work_items_hierarchy: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#children' do
+ let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item) }
+ let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item) }
+
+ subject { described_class.new(work_item).children }
+
+ it { is_expected.to match_array([parent_link1.work_item, parent_link2.work_item]) }
+
+ context 'when work_items_hierarchy flag is disabled' do
+ before do
+ stub_feature_flags(work_items_hierarchy: false)
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+end