summaryrefslogtreecommitdiff
path: root/spec/models
diff options
context:
space:
mode:
Diffstat (limited to 'spec/models')
-rw-r--r--spec/models/abuse_report_spec.rb2
-rw-r--r--spec/models/alert_management/alert_spec.rb11
-rw-r--r--spec/models/alert_management/metric_image_spec.rb8
-rw-r--r--spec/models/analytics/cycle_analytics/aggregation_spec.rb31
-rw-r--r--spec/models/application_setting_spec.rb30
-rw-r--r--spec/models/ci/bridge_spec.rb30
-rw-r--r--spec/models/ci/build_spec.rb124
-rw-r--r--spec/models/ci/job_artifact_spec.rb13
-rw-r--r--spec/models/ci/pipeline_spec.rb87
-rw-r--r--spec/models/ci/processable_spec.rb223
-rw-r--r--spec/models/ci/runner_spec.rb32
-rw-r--r--spec/models/ci/secure_file_spec.rb7
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb52
-rw-r--r--spec/models/commit_status_spec.rb50
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb2
-rw-r--r--spec/models/concerns/integrations/reset_secret_fields_spec.rb19
-rw-r--r--spec/models/concerns/issuable_spec.rb28
-rw-r--r--spec/models/concerns/pg_full_text_searchable_spec.rb13
-rw-r--r--spec/models/concerns/reactive_caching_spec.rb2
-rw-r--r--spec/models/concerns/schedulable_spec.rb10
-rw-r--r--spec/models/concerns/sha256_attribute_spec.rb91
-rw-r--r--spec/models/concerns/sha_attribute_spec.rb135
-rw-r--r--spec/models/container_registry/event_spec.rb60
-rw-r--r--spec/models/container_repository_spec.rb68
-rw-r--r--spec/models/deploy_token_spec.rb10
-rw-r--r--spec/models/deployment_spec.rb38
-rw-r--r--spec/models/design_management/action_spec.rb9
-rw-r--r--spec/models/environment_spec.rb35
-rw-r--r--spec/models/event_collection_spec.rb248
-rw-r--r--spec/models/event_spec.rb49
-rw-r--r--spec/models/incident_management/timeline_event_spec.rb84
-rw-r--r--spec/models/instance_configuration_spec.rb64
-rw-r--r--spec/models/integration_spec.rb72
-rw-r--r--spec/models/integrations/bamboo_spec.rb2
-rw-r--r--spec/models/integrations/base_chat_notification_spec.rb19
-rw-r--r--spec/models/integrations/buildkite_spec.rb2
-rw-r--r--spec/models/integrations/drone_ci_spec.rb2
-rw-r--r--spec/models/integrations/jenkins_spec.rb15
-rw-r--r--spec/models/integrations/jira_spec.rb192
-rw-r--r--spec/models/integrations/microsoft_teams_spec.rb2
-rw-r--r--spec/models/integrations/prometheus_spec.rb28
-rw-r--r--spec/models/integrations/teamcity_spec.rb4
-rw-r--r--spec/models/issue_spec.rb19
-rw-r--r--spec/models/key_spec.rb40
-rw-r--r--spec/models/member_spec.rb78
-rw-r--r--spec/models/merge_request/metrics_spec.rb39
-rw-r--r--spec/models/merge_request_assignee_spec.rb13
-rw-r--r--spec/models/merge_request_reviewer_spec.rb13
-rw-r--r--spec/models/merge_request_spec.rb46
-rw-r--r--spec/models/namespace_ci_cd_setting_spec.rb9
-rw-r--r--spec/models/namespace_spec.rb49
-rw-r--r--spec/models/packages/cleanup/policy_spec.rb28
-rw-r--r--spec/models/packages/package_spec.rb2
-rw-r--r--spec/models/pages_domain_spec.rb2
-rw-r--r--spec/models/performance_monitoring/prometheus_dashboard_spec.rb2
-rw-r--r--spec/models/personal_access_token_spec.rb8
-rw-r--r--spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb42
-rw-r--r--spec/models/project_import_state_spec.rb39
-rw-r--r--spec/models/project_setting_spec.rb18
-rw-r--r--spec/models/project_spec.rb61
-rw-r--r--spec/models/project_statistics_spec.rb61
-rw-r--r--spec/models/project_team_spec.rb16
-rw-r--r--spec/models/projects/topic_spec.rb14
-rw-r--r--spec/models/protected_branch/push_access_level_spec.rb2
-rw-r--r--spec/models/protected_branch_spec.rb6
-rw-r--r--spec/models/raw_usage_data_spec.rb25
-rw-r--r--spec/models/release_spec.rb5
-rw-r--r--spec/models/shard_spec.rb4
-rw-r--r--spec/models/system_note_metadata_spec.rb12
-rw-r--r--spec/models/user_custom_attribute_spec.rb57
-rw-r--r--spec/models/user_spec.rb35
-rw-r--r--spec/models/users/in_product_marketing_email_spec.rb53
-rw-r--r--spec/models/users/merge_request_interaction_spec.rb1
73 files changed, 1991 insertions, 811 deletions
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index e87996fc1f0..3871b18fdd5 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe AbuseReport do
end
it 'lets a worker delete the user' do
- expect(DeleteUserWorker).to receive(:perform_async).with(user.id, subject.user.id, hard_delete: true)
+ expect(DeleteUserWorker).to receive(:perform_async).with(user.id, subject.user.id, { hard_delete: true })
subject.remove_user(deleted_by: user)
end
diff --git a/spec/models/alert_management/alert_spec.rb b/spec/models/alert_management/alert_spec.rb
index 40bdfd4bc92..685ed81ec84 100644
--- a/spec/models/alert_management/alert_spec.rb
+++ b/spec/models/alert_management/alert_spec.rb
@@ -233,6 +233,17 @@ RSpec.describe AlertManagement::Alert do
end
end
+ describe '.find_unresolved_alert' do
+ let_it_be(:fingerprint) { SecureRandom.hex }
+ let_it_be(:resolved_alert_with_fingerprint) { create(:alert_management_alert, :resolved, project: project, fingerprint: fingerprint) }
+ let_it_be(:alert_with_fingerprint_in_other_project) { create(:alert_management_alert, project: project2, fingerprint: fingerprint) }
+ let_it_be(:alert_with_fingerprint) { create(:alert_management_alert, project: project, fingerprint: fingerprint) }
+
+ subject { described_class.find_unresolved_alert(project, fingerprint) }
+
+ it { is_expected.to eq(alert_with_fingerprint) }
+ end
+
describe '.last_prometheus_alert_by_project_id' do
subject { described_class.last_prometheus_alert_by_project_id }
diff --git a/spec/models/alert_management/metric_image_spec.rb b/spec/models/alert_management/metric_image_spec.rb
index dedbd6e501e..ca910474423 100644
--- a/spec/models/alert_management/metric_image_spec.rb
+++ b/spec/models/alert_management/metric_image_spec.rb
@@ -15,12 +15,4 @@ RSpec.describe AlertManagement::MetricImage do
it { is_expected.to validate_length_of(:url).is_at_most(255) }
it { is_expected.to validate_length_of(:url_text).is_at_most(128) }
end
-
- describe '.available_for?' do
- subject { described_class.available_for?(issue.project) }
-
- let_it_be_with_refind(:issue) { create(:issue) }
-
- it { is_expected.to eq(true) }
- end
end
diff --git a/spec/models/analytics/cycle_analytics/aggregation_spec.rb b/spec/models/analytics/cycle_analytics/aggregation_spec.rb
index 6071e4b3d21..2fb40852791 100644
--- a/spec/models/analytics/cycle_analytics/aggregation_spec.rb
+++ b/spec/models/analytics/cycle_analytics/aggregation_spec.rb
@@ -43,6 +43,37 @@ RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model do
end
end
+ describe '#consistency_check_cursor_for' do
+ it 'returns empty cursor' do
+ expect(aggregation.consistency_check_cursor_for(Analytics::CycleAnalytics::IssueStageEvent)).to eq({})
+ expect(aggregation.consistency_check_cursor_for(Analytics::CycleAnalytics::MergeRequestStageEvent)).to eq({})
+ end
+
+ it 'returns the cursor value for IssueStageEvent' do
+ aggregation.last_consistency_check_issues_start_event_timestamp = 2.weeks.ago
+ aggregation.last_consistency_check_issues_end_event_timestamp = 1.week.ago
+ aggregation.last_consistency_check_issues_issuable_id = 42
+
+ expect(aggregation.consistency_check_cursor_for(Analytics::CycleAnalytics::IssueStageEvent)).to eq({
+ start_event_timestamp: aggregation.last_consistency_check_issues_start_event_timestamp,
+ end_event_timestamp: aggregation.last_consistency_check_issues_end_event_timestamp,
+ issue_id: aggregation.last_consistency_check_issues_issuable_id
+ })
+ end
+
+ it 'returns the cursor value for MergeRequestStageEvent' do
+ aggregation.last_consistency_check_merge_requests_start_event_timestamp = 2.weeks.ago
+ aggregation.last_consistency_check_merge_requests_end_event_timestamp = 1.week.ago
+ aggregation.last_consistency_check_merge_requests_issuable_id = 42
+
+ expect(aggregation.consistency_check_cursor_for(Analytics::CycleAnalytics::MergeRequestStageEvent)).to eq({
+ start_event_timestamp: aggregation.last_consistency_check_merge_requests_start_event_timestamp,
+ end_event_timestamp: aggregation.last_consistency_check_merge_requests_end_event_timestamp,
+ merge_request_id: aggregation.last_consistency_check_merge_requests_issuable_id
+ })
+ end
+ end
+
describe '#refresh_last_run' do
it 'updates the run_at column' do
freeze_time do
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 541fa1ac77a..20cd96e831c 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -83,10 +83,14 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_numericality_of(:container_registry_import_max_retries).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_start_max_retries).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_max_step_duration).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.to validate_numericality_of(:container_registry_pre_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.to validate_numericality_of(:container_registry_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_tags_count) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_retries) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_start_max_retries) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_step_duration) }
+ it { is_expected.not_to allow_value(nil).for(:container_registry_pre_import_timeout) }
+ it { is_expected.not_to allow_value(nil).for(:container_registry_import_timeout) }
it { is_expected.to validate_presence_of(:container_registry_import_target_plan) }
it { is_expected.to validate_presence_of(:container_registry_import_created_before) }
@@ -132,6 +136,12 @@ RSpec.describe ApplicationSetting do
it { is_expected.not_to allow_value(10.5).for(:raw_blob_request_limit) }
it { is_expected.not_to allow_value(-1).for(:raw_blob_request_limit) }
+ it { is_expected.to allow_value(0).for(:pipeline_limit_per_project_user_sha) }
+ it { is_expected.not_to allow_value('abc').for(:pipeline_limit_per_project_user_sha) }
+ it { is_expected.not_to allow_value(nil).for(:pipeline_limit_per_project_user_sha) }
+ it { is_expected.not_to allow_value(10.5).for(:pipeline_limit_per_project_user_sha) }
+ it { is_expected.not_to allow_value(-1).for(:pipeline_limit_per_project_user_sha) }
+
it { is_expected.not_to allow_value(false).for(:hashed_storage_enabled) }
it { is_expected.to allow_value('default' => 0).for(:repository_storages_weighted) }
@@ -417,6 +427,14 @@ RSpec.describe ApplicationSetting do
.is_greater_than(0)
end
+ it { is_expected.to validate_presence_of(:max_export_size) }
+
+ specify do
+ is_expected.to validate_numericality_of(:max_export_size)
+ .only_integer
+ .is_greater_than_or_equal_to(0)
+ end
+
it { is_expected.to validate_presence_of(:max_import_size) }
specify do
@@ -1336,5 +1354,17 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_numericality_of(:inactive_projects_delete_after_months).is_greater_than(0) }
it { is_expected.to validate_numericality_of(:inactive_projects_min_size_mb).is_greater_than_or_equal_to(0) }
+
+ it "deletes the redis key used for tracking inactive projects deletion warning emails when setting is updated",
+ :clean_gitlab_redis_shared_state do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.hset("inactive_projects_deletion_warning_email_notified", "project:1", "2020-01-01")
+ end
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect { setting.update!(inactive_projects_delete_after_months: 6) }
+ .to change { redis.hgetall('inactive_projects_deletion_warning_email_notified') }.to({})
+ end
+ end
end
end
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 5ee560c4925..6409ea9fc3d 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -31,7 +31,37 @@ RSpec.describe Ci::Bridge do
end
describe '#retryable?' do
+ let(:bridge) { create(:ci_bridge, :success) }
+
+ it 'returns true' do
+ expect(bridge.retryable?).to eq(true)
+ end
+
+ context 'without ci_recreate_downstream_pipeline ff' do
+ before do
+ stub_feature_flags(ci_recreate_downstream_pipeline: false)
+ end
+
+ it 'returns false' do
+ expect(bridge.retryable?).to eq(false)
+ end
+ end
+ end
+
+ context 'when there is a pipeline loop detected' do
+ let(:bridge) { create(:ci_bridge, :failed, failure_reason: :pipeline_loop_detected) }
+
+ it 'returns false' do
+ expect(bridge.failure_reason).to eq('pipeline_loop_detected')
+ expect(bridge.retryable?).to eq(false)
+ end
+ end
+
+ context 'when the pipeline depth has reached the max descendents' do
+ let(:bridge) { create(:ci_bridge, :failed, failure_reason: :reached_max_descendant_pipelines_depth) }
+
it 'returns false' do
+ expect(bridge.failure_reason).to eq('reached_max_descendant_pipelines_depth')
expect(bridge.retryable?).to eq(false)
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 12e65974270..dcf6915a01e 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -743,7 +743,7 @@ RSpec.describe Ci::Build do
it { is_expected.to be_falsey }
end
- context 'when there are runners' do
+ context 'when there is a runner' do
let(:runner) { create(:ci_runner, :project, projects: [build.project]) }
before do
@@ -752,19 +752,28 @@ RSpec.describe Ci::Build do
it { is_expected.to be_truthy }
- it 'that is inactive' do
- runner.update!(active: false)
- is_expected.to be_falsey
+ context 'that is inactive' do
+ before do
+ runner.update!(active: false)
+ end
+
+ it { is_expected.to be_falsey }
end
- it 'that is not online' do
- runner.update!(contacted_at: nil)
- is_expected.to be_falsey
+ context 'that is not online' do
+ before do
+ runner.update!(contacted_at: nil)
+ end
+
+ it { is_expected.to be_falsey }
end
- it 'that cannot handle build' do
- expect_any_instance_of(Ci::Runner).to receive(:matches_build?).with(build).and_return(false)
- is_expected.to be_falsey
+ context 'that cannot handle build' do
+ before do
+ expect_any_instance_of(Gitlab::Ci::Matching::RunnerMatcher).to receive(:matches?).with(build.build_matcher).and_return(false)
+ end
+
+ it { is_expected.to be_falsey }
end
end
@@ -1069,6 +1078,32 @@ RSpec.describe Ci::Build do
is_expected.to all(a_hash_including(key: a_string_matching(/-non_protected$/)))
end
end
+
+ context 'when separated caches are disabled' do
+ before do
+ allow_any_instance_of(Project).to receive(:ci_separated_caches).and_return(false)
+ end
+
+ context 'running on protected ref' do
+ before do
+ allow(build.pipeline).to receive(:protected_ref?).and_return(true)
+ end
+
+ it 'is expected to have no type suffix' do
+ is_expected.to match([a_hash_including(key: 'key-1'), a_hash_including(key: 'key2-1')])
+ end
+ end
+
+ context 'running on not protected ref' do
+ before do
+ allow(build.pipeline).to receive(:protected_ref?).and_return(false)
+ end
+
+ it 'is expected to have no type suffix' do
+ is_expected.to match([a_hash_including(key: 'key-1'), a_hash_including(key: 'key2-1')])
+ end
+ end
+ end
end
context 'when project has jobs_cache_index' do
@@ -1123,36 +1158,6 @@ RSpec.describe Ci::Build do
end
end
- describe '#coverage_regex' do
- subject { build.coverage_regex }
-
- context 'when project has build_coverage_regex set' do
- let(:project_regex) { '\(\d+\.\d+\) covered' }
-
- before do
- project.update_column(:build_coverage_regex, project_regex)
- end
-
- context 'and coverage_regex attribute is not set' do
- it { is_expected.to eq(project_regex) }
- end
-
- context 'but coverage_regex attribute is also set' do
- let(:build_regex) { 'Code coverage: \d+\.\d+' }
-
- before do
- build.coverage_regex = build_regex
- end
-
- it { is_expected.to eq(build_regex) }
- end
- end
-
- context 'when neither project nor build has coverage regex set' do
- it { is_expected.to be_nil }
- end
- end
-
describe '#update_coverage' do
context "regarding coverage_regex's value," do
before do
@@ -1476,6 +1481,44 @@ RSpec.describe Ci::Build do
expect(deployment).to be_canceled
end
end
+
+ # Mimic playing a manual job that needs another job.
+ # `needs + when:manual` scenario, see: https://gitlab.com/gitlab-org/gitlab/-/issues/347502
+ context 'when transits from skipped to created to running' do
+ before do
+ build.skip!
+ end
+
+ context 'during skipped to created' do
+ let(:event) { :process! }
+
+ it 'transitions to created' do
+ subject
+
+ expect(deployment).to be_created
+ end
+ end
+
+ context 'during created to running' do
+ let(:event) { :run! }
+
+ before do
+ build.process!
+ build.enqueue!
+ end
+
+ it 'transitions to running and calls webhook' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ subject
+ end
+
+ expect(deployment).to be_running
+ end
+ end
+ end
end
describe '#on_stop' do
@@ -3755,6 +3798,7 @@ RSpec.describe Ci::Build do
context 'for pipeline ref existence' do
it 'ensures pipeline ref creation' do
+ expect(job.pipeline).to receive(:ensure_persistent_ref).once.and_call_original
expect(job.pipeline.persistent_ref).to receive(:create).once
run_job_without_exception
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 24c318d0218..24265242172 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -206,8 +206,8 @@ RSpec.describe Ci::JobArtifact do
end
end
- describe '#archived_trace_exists?' do
- subject { artifact.archived_trace_exists? }
+ describe '#stored?' do
+ subject { artifact.stored? }
context 'when the file exists' do
it { is_expected.to be_truthy }
@@ -270,15 +270,6 @@ RSpec.describe Ci::JobArtifact do
end
end
- describe '.order_expired_desc' do
- let_it_be(:first_artifact) { create(:ci_job_artifact, expire_at: 2.days.ago) }
- let_it_be(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
-
- it 'returns ordered artifacts' do
- expect(described_class.order_expired_desc).to eq([second_artifact, first_artifact])
- end
- end
-
describe '.order_expired_asc' do
let_it_be(:first_artifact) { create(:ci_job_artifact, expire_at: 2.days.ago) }
let_it_be(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 45b51d5bf44..8dc041814fa 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let_it_be(:namespace) { create_default(:namespace).freeze }
let_it_be(:project) { create_default(:project, :repository).freeze }
- it 'paginates 15 pipeleines per page' do
+ it 'paginates 15 pipelines per page' do
expect(described_class.default_per_page).to eq(15)
end
@@ -552,7 +552,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { is_expected.to be_truthy }
end
- context 'when both sha and source_sha do not matche' do
+ context 'when both sha and source_sha do not match' do
let(:pipeline) { build(:ci_pipeline, sha: 'test', source_sha: 'test') }
it { is_expected.to be_falsy }
@@ -1423,7 +1423,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let(:build_b) { create_build('build2', queued_at: 0) }
let(:build_c) { create_build('build3', queued_at: 0) }
- %w[succeed! drop! cancel! skip!].each do |action|
+ %w[succeed! drop! cancel! skip! block! delay!].each do |action|
context "when the pipeline recieved #{action} event" do
it 'deletes a persistent ref' do
expect(pipeline.persistent_ref).to receive(:delete).once
@@ -1534,6 +1534,21 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
expect(pipeline.started_at).to be_nil
end
end
+
+ context 'from success' do
+ let(:started_at) { 2.days.ago }
+ let(:from_status) { :success }
+
+ before do
+ pipeline.update!(started_at: started_at)
+ end
+
+ it 'does not update on transitioning to running' do
+ pipeline.run
+
+ expect(pipeline.started_at).to eq started_at
+ end
+ end
end
describe '#finished_at' do
@@ -1813,6 +1828,32 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#ensure_persistent_ref' do
+ subject { pipeline.ensure_persistent_ref }
+
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when the persistent ref does not exist' do
+ it 'creates a ref' do
+ expect(pipeline.persistent_ref).to receive(:create).once
+
+ subject
+ end
+ end
+
+ context 'when the persistent ref exists' do
+ before do
+ pipeline.persistent_ref.create # rubocop:disable Rails/SaveBang
+ end
+
+ it 'does not create a ref' do
+ expect(pipeline.persistent_ref).not_to receive(:create)
+
+ subject
+ end
+ end
+ end
+
describe '#branch?' do
subject { pipeline.branch? }
@@ -3428,6 +3469,46 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#upstream_root' do
+ subject { pipeline.upstream_root }
+
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+
+ context 'when pipeline is child of child pipeline' do
+ let!(:root_ancestor) { create(:ci_pipeline) }
+ let!(:parent_pipeline) { create(:ci_pipeline, child_of: root_ancestor) }
+ let!(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
+
+ it 'returns the root ancestor' do
+ expect(subject).to eq(root_ancestor)
+ end
+ end
+
+ context 'when pipeline is root ancestor' do
+ let!(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+
+ it 'returns itself' do
+ expect(subject).to eq(pipeline)
+ end
+ end
+
+ context 'when pipeline is standalone' do
+ it 'returns itself' do
+ expect(subject).to eq(pipeline)
+ end
+ end
+
+ context 'when pipeline is multi-project downstream pipeline' do
+ let!(:upstream_pipeline) do
+ create(:ci_pipeline, project: create(:project), upstream_of: pipeline)
+ end
+
+ it 'returns the upstream pipeline' do
+ expect(subject).to eq(upstream_pipeline)
+ end
+ end
+ end
+
describe '#stuck?' do
let(:pipeline) { create(:ci_empty_pipeline, :created) }
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 71fef3c1b5b..cdd96d45561 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -14,6 +14,223 @@ RSpec.describe Ci::Processable do
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
end
+ describe '#clone' do
+ let(:user) { create(:user) }
+
+ let(:new_processable) do
+ new_proc = processable.clone(current_user: user)
+ new_proc.save!
+
+ new_proc
+ end
+
+ let_it_be(:stage) { create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'test') }
+
+ shared_context 'processable bridge' do
+ let_it_be(:downstream_project) { create(:project, :repository) }
+
+ let_it_be_with_refind(:processable) do
+ create(
+ :ci_bridge, :success, pipeline: pipeline, downstream: downstream_project,
+ description: 'a trigger job', stage_id: stage.id
+ )
+ end
+
+ let(:clone_accessors) { ::Ci::Bridge.clone_accessors }
+ let(:reject_accessors) { [] }
+ let(:ignore_accessors) { [] }
+ end
+
+ shared_context 'processable build' do
+ let_it_be(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
+
+ let_it_be_with_refind(:processable) do
+ create(:ci_build, :failed, :picked, :expired, :erased, :queued, :coverage, :tags,
+ :allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group,
+ description: 'my-job', stage: 'test', stage_id: stage.id,
+ pipeline: pipeline, auto_canceled_by: another_pipeline,
+ scheduled_at: 10.seconds.since)
+ end
+
+ let_it_be(:internal_job_variable) { create(:ci_job_variable, job: processable) }
+
+ let(:clone_accessors) { ::Ci::Build.clone_accessors.without(::Ci::Build.extra_accessors) }
+
+ let(:reject_accessors) do
+ %i[id status user token token_encrypted coverage trace runner
+ artifacts_expire_at
+ created_at updated_at started_at finished_at queued_at erased_by
+ erased_at auto_canceled_by job_artifacts job_artifacts_archive
+ job_artifacts_metadata job_artifacts_trace job_artifacts_junit
+ job_artifacts_sast job_artifacts_secret_detection job_artifacts_dependency_scanning
+ job_artifacts_container_scanning job_artifacts_cluster_image_scanning job_artifacts_dast
+ job_artifacts_license_scanning
+ job_artifacts_performance job_artifacts_browser_performance job_artifacts_load_performance
+ job_artifacts_lsif job_artifacts_terraform job_artifacts_cluster_applications
+ job_artifacts_codequality job_artifacts_metrics scheduled_at
+ job_variables waiting_for_resource_at job_artifacts_metrics_referee
+ job_artifacts_network_referee job_artifacts_dotenv
+ job_artifacts_cobertura needs job_artifacts_accessibility
+ job_artifacts_requirements job_artifacts_coverage_fuzzing
+ job_artifacts_api_fuzzing terraform_state_versions].freeze
+ end
+
+ let(:ignore_accessors) do
+ %i[type namespace lock_version target_url base_tags trace_sections
+ commit_id deployment erased_by_id project_id
+ runner_id tag_taggings taggings tags trigger_request_id
+ user_id auto_canceled_by_id retried failure_reason
+ sourced_pipelines artifacts_file_store artifacts_metadata_store
+ metadata runner_session trace_chunks upstream_pipeline_id
+ artifacts_file artifacts_metadata artifacts_size commands
+ resource resource_group_id processed security_scans author
+ pipeline_id report_results pending_state pages_deployments
+ queuing_entry runtime_metadata trace_metadata
+ dast_site_profile dast_scanner_profile].freeze
+ end
+
+ before_all do
+ # Create artifacts to check that the associations are rejected when cloning
+ Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS.each do |file_type, file_format|
+ create(:ci_job_artifact, file_format,
+ file_type: file_type, job: processable, expire_at: processable.artifacts_expire_at)
+ end
+
+ create(:ci_job_variable, :dotenv_source, job: processable)
+ create(:terraform_state_version, build: processable)
+ end
+
+ before do
+ processable.update!(retried: false, status: :success)
+ end
+ end
+
+ shared_examples_for 'clones the processable' do
+ before_all do
+ processable.update!(stage: 'test', stage_id: stage.id)
+
+ create(:ci_build_need, build: processable)
+ end
+
+ describe 'clone accessors' do
+ let(:forbidden_associations) do
+ Ci::Build.reflect_on_all_associations.each_with_object(Set.new) do |assoc, memo|
+ memo << assoc.name unless assoc.macro == :belongs_to
+ end
+ end
+
+ it 'clones the processable attributes', :aggregate_failures do
+ clone_accessors.each do |attribute|
+ expect(attribute).not_to be_in(forbidden_associations), "association #{attribute} must be `belongs_to`"
+ expect(processable.send(attribute)).not_to be_nil, "old processable attribute #{attribute} should not be nil"
+ expect(new_processable.send(attribute)).not_to be_nil, "new processable attribute #{attribute} should not be nil"
+ expect(new_processable.send(attribute)).to eq(processable.send(attribute)), "new processable attribute #{attribute} should match old processable"
+ end
+ end
+
+ it 'clones only the needs attributes' do
+ expect(new_processable.needs.size).to be(1)
+ expect(processable.needs.exists?).to be_truthy
+
+ expect(new_processable.needs_attributes).to match(processable.needs_attributes)
+ expect(new_processable.needs).not_to match(processable.needs)
+ end
+
+ context 'when the processable has protected: nil' do
+ before do
+ processable.update_attribute(:protected, nil)
+ end
+
+ it 'clones the protected job attribute' do
+ expect(new_processable.protected).to be_nil
+ expect(new_processable.protected).to eq processable.protected
+ end
+ end
+ end
+
+ describe 'reject accessors' do
+ it 'does not clone rejected attributes' do
+ reject_accessors.each do |attribute|
+ expect(new_processable.send(attribute)).not_to eq(processable.send(attribute)), "processable attribute #{attribute} should not have been cloned"
+ end
+ end
+ end
+
+ it 'creates a new processable that represents the old processable' do
+ expect(new_processable.name).to eq processable.name
+ end
+ end
+
+ context 'when the processable to be cloned is a bridge' do
+ include_context 'processable bridge'
+
+ it_behaves_like 'clones the processable'
+ end
+
+ context 'when the processable to be cloned is a build' do
+ include_context 'processable build'
+
+ it_behaves_like 'clones the processable'
+
+ it 'has the correct number of known attributes', :aggregate_failures do
+ processed_accessors = clone_accessors + reject_accessors
+ known_accessors = processed_accessors + ignore_accessors
+
+ current_accessors =
+ Ci::Build.attribute_names.map(&:to_sym) +
+ Ci::Build.attribute_aliases.keys.map(&:to_sym) +
+ Ci::Build.reflect_on_all_associations.map(&:name) +
+ [:tag_list, :needs_attributes, :job_variables_attributes] -
+ # ToDo: Move EE accessors to ee/
+ ::Ci::Build.extra_accessors -
+ [:dast_site_profiles_build, :dast_scanner_profiles_build]
+
+ current_accessors.uniq!
+
+ expect(current_accessors).to include(*processed_accessors)
+ expect(known_accessors).to include(*current_accessors)
+ end
+
+ context 'when it has a deployment' do
+ let!(:processable) do
+ create(:ci_build, :with_deployment, :deploy_to_production,
+ pipeline: pipeline, stage_id: stage.id, project: project)
+ end
+
+ it 'persists the expanded environment name' do
+ expect(new_processable.metadata.expanded_environment_name).to eq('production')
+ end
+ end
+
+ context 'when it has a dynamic environment' do
+ let_it_be(:other_developer) { create(:user).tap { |u| project.add_developer(u) } }
+
+ let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' }
+
+ let!(:processable) do
+ create(:ci_build, :with_deployment, environment: environment_name,
+ options: { environment: { name: environment_name } },
+ pipeline: pipeline, stage_id: stage.id, project: project,
+ user: other_developer)
+ end
+
+ it 're-uses the previous persisted environment' do
+ expect(processable.persisted_environment.name).to eq("review/#{processable.ref}-#{other_developer.id}")
+
+ expect(new_processable.persisted_environment.name).to eq("review/#{processable.ref}-#{other_developer.id}")
+ end
+ end
+
+ context 'when the processable has job variables' do
+ it 'only clones the internal job variables' do
+ expect(new_processable.job_variables.size).to eq(1)
+ expect(new_processable.job_variables.first.key).to eq(internal_job_variable.key)
+ expect(new_processable.job_variables.first.value).to eq(internal_job_variable.value)
+ end
+ end
+ end
+ end
+
describe '#retryable' do
shared_examples_for 'retryable processable' do
context 'when processable is successful' do
@@ -69,6 +286,12 @@ RSpec.describe Ci::Processable do
end
end
+ context 'when the processable is a bridge' do
+ subject(:processable) { create(:ci_bridge, pipeline: pipeline) }
+
+ it_behaves_like 'retryable processable'
+ end
+
context 'when the processable is a build' do
subject(:processable) { create(:ci_build, pipeline: pipeline) }
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 05b7bc39a74..8a1dcbfbdeb 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -412,12 +412,9 @@ RSpec.describe Ci::Runner do
context 'with shared_runner' do
let(:runner) { create(:ci_runner, :instance) }
- it 'transitions shared runner to project runner and assigns project' do
- expect(subject).to be_truthy
-
- expect(runner).to be_project_type
- expect(runner.runner_projects.pluck(:project_id)).to match_array([project.id])
- expect(runner.only_for?(project)).to be_truthy
+ it 'raises an error' do
+ expect { subject }
+ .to raise_error(ArgumentError, 'Transitioning an instance runner to a project runner is not supported')
end
end
@@ -430,6 +427,18 @@ RSpec.describe Ci::Runner do
.to raise_error(ArgumentError, 'Transitioning a group runner to a project runner is not supported')
end
end
+
+ context 'with project runner' do
+ let(:other_project) { create(:project) }
+ let(:runner) { create(:ci_runner, :project, projects: [other_project]) }
+
+ it 'assigns runner to project' do
+ expect(subject).to be_truthy
+
+ expect(runner).to be_project_type
+ expect(runner.runner_projects.pluck(:project_id)).to contain_exactly(project.id, other_project.id)
+ end
+ end
end
describe '.recent' do
@@ -829,7 +838,7 @@ RSpec.describe Ci::Runner do
context 'with legacy_mode enabled' do
let(:legacy_mode) { '14.5' }
- it { is_expected.to eq(:not_connected) }
+ it { is_expected.to eq(:stale) }
end
context 'with legacy_mode disabled' do
@@ -886,7 +895,7 @@ RSpec.describe Ci::Runner do
context 'with legacy_mode enabled' do
let(:legacy_mode) { '14.5' }
- it { is_expected.to eq(:offline) }
+ it { is_expected.to eq(:stale) }
end
context 'with legacy_mode disabled' do
@@ -896,7 +905,7 @@ RSpec.describe Ci::Runner do
end
describe '#deprecated_rest_status' do
- let(:runner) { build(:ci_runner, :instance, contacted_at: 1.second.ago) }
+ let(:runner) { create(:ci_runner, :instance, contacted_at: 1.second.ago) }
subject { runner.deprecated_rest_status }
@@ -905,7 +914,7 @@ RSpec.describe Ci::Runner do
runner.contacted_at = nil
end
- it { is_expected.to eq(:not_connected) }
+ it { is_expected.to eq(:never_contacted) }
end
context 'contacted 1s ago' do
@@ -918,10 +927,11 @@ RSpec.describe Ci::Runner do
context 'contacted long time ago' do
before do
+ runner.created_at = 1.year.ago
runner.contacted_at = 1.year.ago
end
- it { is_expected.to eq(:offline) }
+ it { is_expected.to eq(:stale) }
end
context 'inactive' do
diff --git a/spec/models/ci/secure_file_spec.rb b/spec/models/ci/secure_file_spec.rb
index f92db3fe8db..40ddafad013 100644
--- a/spec/models/ci/secure_file_spec.rb
+++ b/spec/models/ci/secure_file_spec.rb
@@ -25,7 +25,6 @@ RSpec.describe Ci::SecureFile do
it { is_expected.to validate_presence_of(:checksum) }
it { is_expected.to validate_presence_of(:file_store) }
it { is_expected.to validate_presence_of(:name) }
- it { is_expected.to validate_presence_of(:permissions) }
it { is_expected.to validate_presence_of(:project_id) }
context 'unique filename' do
let_it_be(:project1) { create(:project) }
@@ -49,12 +48,6 @@ RSpec.describe Ci::SecureFile do
end
end
- describe '#permissions' do
- it 'defaults to read_only file permssions' do
- expect(subject.permissions).to eq('read_only')
- end
- end
-
describe '#checksum' do
it 'computes SHA256 checksum on the file before encrypted' do
expect(subject.checksum).to eq(Digest::SHA256.hexdigest(sample_file))
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index b298bf2c8bb..a0ede9fb0d9 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -450,6 +450,42 @@ RSpec.describe Clusters::Platforms::Kubernetes do
it { is_expected.to be_nil }
end
+ context 'when there are ignored K8s connections errors' do
+ described_class::IGNORED_CONNECTION_EXCEPTIONS.each do |exception|
+ context "#{exception}" do
+ before do
+ exception_args = ['arg1']
+ exception_args.push('arg2', 'arg3') if exception.name == 'Kubeclient::HttpError'
+ exception_instance = exception.new(*exception_args)
+
+ allow_next_instance_of(Gitlab::Kubernetes::KubeClient) do |kube_client|
+ allow(kube_client).to receive(:get_pods).with(namespace: namespace).and_raise(exception_instance)
+ allow(kube_client).to receive(:get_deployments).with(namespace: namespace).and_raise(exception_instance)
+ allow(kube_client).to receive(:get_ingresses).with(namespace: namespace).and_raise(exception_instance)
+ end
+ end
+
+ it 'does not raise error' do
+ expect { subject }.not_to raise_error
+ end
+
+ it 'returns empty array for the K8s component keys' do
+ expect(subject).to include({ pods: [], deployments: [], ingresses: [] })
+ end
+
+ it 'logs the error' do
+ expect_next_instance_of(Gitlab::Kubernetes::Logger) do |logger|
+ expect(logger).to receive(:error)
+ .with(hash_including(event: :kube_connection_error))
+ .and_call_original
+ end
+
+ subject
+ end
+ end
+ end
+ end
+
context 'when kubernetes responds with 500s' do
before do
stub_kubeclient_pods(namespace, status: 500)
@@ -457,7 +493,9 @@ RSpec.describe Clusters::Platforms::Kubernetes do
stub_kubeclient_ingresses(namespace, status: 500)
end
- it { expect { subject }.to raise_error(Kubeclient::HttpError) }
+ it 'does not raise kubeclient http error' do
+ expect { subject }.not_to raise_error
+ end
end
context 'when kubernetes responds with 404s' do
@@ -755,6 +793,18 @@ RSpec.describe Clusters::Platforms::Kubernetes do
expect(rollout_status.instances.map { |p| p[:pod_name] }).to eq(['pod-a-1', 'pod-a-2', 'pod-b-1', 'pod-b-2'])
end
end
+
+ # Scenario when there are K8s connection errors.
+ context 'when cache keys are defaulted' do
+ let(:cache_data) { Hash(deployments: [], pods: [], ingresses: []) }
+
+ it 'does not raise error' do
+ expect { rollout_status }.not_to raise_error
+
+ expect(rollout_status).to be_kind_of(::Gitlab::Kubernetes::RolloutStatus)
+ expect(rollout_status).to be_not_found
+ end
+ end
end
describe '#ingresses' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index d158a99ef9f..dbb15fad246 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -142,6 +142,26 @@ RSpec.describe CommitStatus do
end
end
+ describe '.cancelable' do
+ subject { described_class.cancelable }
+
+ %i[running pending waiting_for_resource preparing created scheduled].each do |status|
+ context "when #{status} commit status" do
+ let!(:commit_status) { create(:commit_status, status, pipeline: pipeline) }
+
+ it { is_expected.to contain_exactly(commit_status) }
+ end
+ end
+
+ %i[failed success skipped canceled manual].each do |status|
+ context "when #{status} commit status" do
+ let!(:commit_status) { create(:commit_status, status, pipeline: pipeline) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+ end
+
describe '#started?' do
subject { commit_status.started? }
@@ -150,26 +170,28 @@ RSpec.describe CommitStatus do
commit_status.started_at = nil
end
- it { is_expected.to be_falsey }
+ it { is_expected.to be(false) }
end
- %w[running success failed].each do |status|
- context "if commit status is #{status}" do
- before do
- commit_status.status = status
- end
+ context 'with started_at' do
+ described_class::STARTED_STATUSES.each do |status|
+ context "if commit status is #{status}" do
+ before do
+ commit_status.status = status
+ end
- it { is_expected.to be_truthy }
+ it { is_expected.to eq(true) }
+ end
end
- end
- %w[pending canceled].each do |status|
- context "if commit status is #{status}" do
- before do
- commit_status.status = status
- end
+ (described_class::AVAILABLE_STATUSES - described_class::STARTED_STATUSES).each do |status|
+ context "if commit status is #{status}" do
+ before do
+ commit_status.status = status
+ end
- it { is_expected.to be_falsey }
+ it { is_expected.to be(false) }
+ end
end
end
end
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 1c1efab2889..d46f22b2216 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -149,7 +149,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
it 'saves the changes' do
expect(thing)
.to receive(:save_markdown)
- .with("description_html" => updated_html, "title_html" => "", "cached_markdown_version" => cache_version)
+ .with({ "description_html" => updated_html, "title_html" => "", "cached_markdown_version" => cache_version })
thing.refresh_markdown_cache!
end
diff --git a/spec/models/concerns/integrations/reset_secret_fields_spec.rb b/spec/models/concerns/integrations/reset_secret_fields_spec.rb
new file mode 100644
index 00000000000..a372550c70f
--- /dev/null
+++ b/spec/models/concerns/integrations/reset_secret_fields_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::ResetSecretFields do
+ let(:described_class) do
+ Class.new(Integration) do
+ field :username, type: 'text'
+ field :url, type: 'text', exposes_secrets: true
+ field :api_url, type: 'text', exposes_secrets: true
+ field :password, type: 'password'
+ field :token, type: 'password'
+ end
+ end
+
+ let(:integration) { described_class.new }
+
+ it_behaves_like Integrations::ResetSecretFields
+end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index b38135fc0b2..e8e9c263d23 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -474,11 +474,11 @@ RSpec.describe Issuable do
issue.update!(labels: [label])
issue.assignees << user
issue.spend_time(duration: 2, user_id: user.id, spent_at: Time.current)
- expect(Gitlab::HookData::IssuableBuilder)
+ expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(issue).and_return(builder)
end
- it 'delegates to Gitlab::HookData::IssuableBuilder#build and does not set labels, assignees, nor total_time_spent' do
+ it 'delegates to Gitlab::DataBuilder::Issuable#build and does not set labels, assignees, nor total_time_spent' do
expect(builder).to receive(:build).with(
user: user,
changes: {})
@@ -493,11 +493,11 @@ RSpec.describe Issuable do
before do
issue.update!(labels: [labels[1]])
- expect(Gitlab::HookData::IssuableBuilder)
+ expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(issue).and_return(builder)
end
- it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
+ it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
@@ -512,11 +512,11 @@ RSpec.describe Issuable do
before do
issue.spend_time(duration: 2, user_id: user.id, spent_at: Time.current)
issue.save!
- expect(Gitlab::HookData::IssuableBuilder)
+ expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(issue).and_return(builder)
end
- it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
+ it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
@@ -532,11 +532,11 @@ RSpec.describe Issuable do
before do
issue.assignees << user << user2
- expect(Gitlab::HookData::IssuableBuilder)
+ expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(issue).and_return(builder)
end
- it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
+ it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
@@ -554,11 +554,11 @@ RSpec.describe Issuable do
before do
merge_request.update!(assignees: [user])
merge_request.update!(assignees: [user, user2])
- expect(Gitlab::HookData::IssuableBuilder)
+ expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(merge_request).and_return(builder)
end
- it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
+ it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
@@ -574,11 +574,11 @@ RSpec.describe Issuable do
before do
issue.update!(issuable_severity_attributes: { severity: 'low' })
- expect(Gitlab::HookData::IssuableBuilder)
+ expect(Gitlab::DataBuilder::Issuable)
.to receive(:new).with(issue).and_return(builder)
end
- it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
+ it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
@@ -596,10 +596,10 @@ RSpec.describe Issuable do
before do
issue.escalation_status.update!(status: acknowledged)
- expect(Gitlab::HookData::IssuableBuilder).to receive(:new).with(issue).and_return(builder)
+ expect(Gitlab::DataBuilder::Issuable).to receive(:new).with(issue).and_return(builder)
end
- it 'delegates to Gitlab::HookData::IssuableBuilder#build' do
+ it 'delegates to Gitlab::DataBuilder::Issuable#build' do
expect(builder).to receive(:build).with(
user: user,
changes: hash_including(
diff --git a/spec/models/concerns/pg_full_text_searchable_spec.rb b/spec/models/concerns/pg_full_text_searchable_spec.rb
index db7f652f494..b6da481024a 100644
--- a/spec/models/concerns/pg_full_text_searchable_spec.rb
+++ b/spec/models/concerns/pg_full_text_searchable_spec.rb
@@ -54,12 +54,23 @@ RSpec.describe PgFullTextSearchable do
end
context 'when specified columns are not changed' do
- it 'does not enqueue worker' do
+ it 'does not call update_search_data!' do
expect(model).not_to receive(:update_search_data!)
model.update!(description: 'A new description')
end
end
+
+ context 'when model is updated twice within a transaction' do
+ it 'calls update_search_data!' do
+ expect(model).to receive(:update_search_data!)
+
+ model.transaction do
+ model.update!(title: 'A new title')
+ model.update!(updated_at: Time.current)
+ end
+ end
+ end
end
describe '.pg_full_text_search' do
diff --git a/spec/models/concerns/reactive_caching_spec.rb b/spec/models/concerns/reactive_caching_spec.rb
index 4f3b95e43cd..5468699f9dd 100644
--- a/spec/models/concerns/reactive_caching_spec.rb
+++ b/spec/models/concerns/reactive_caching_spec.rb
@@ -237,7 +237,7 @@ RSpec.describe ReactiveCaching, :use_clean_rails_memory_store_caching do
end
it 'does not raise the exception' do
- expect { go! }.not_to raise_exception(ReactiveCaching::ExceededReactiveCacheLimit)
+ expect { go! }.not_to raise_exception
end
end
diff --git a/spec/models/concerns/schedulable_spec.rb b/spec/models/concerns/schedulable_spec.rb
index 62acd12e267..b98dcf1c174 100644
--- a/spec/models/concerns/schedulable_spec.rb
+++ b/spec/models/concerns/schedulable_spec.rb
@@ -57,6 +57,16 @@ RSpec.describe Schedulable do
it_behaves_like '.runnable_schedules'
end
+ context 'for a packages cleanup policy' do
+ # let! is used to reset the next_run_at value before each spec
+ let(:object) { create(:packages_cleanup_policy, :runnable) }
+ let(:non_runnable_object) { create(:packages_cleanup_policy) }
+
+ it_behaves_like '#schedule_next_run!'
+ it_behaves_like 'before_save callback'
+ it_behaves_like '.runnable_schedules'
+ end
+
describe '#next_run_at' do
let(:schedulable_instance) do
Class.new(ActiveRecord::Base) do
diff --git a/spec/models/concerns/sha256_attribute_spec.rb b/spec/models/concerns/sha256_attribute_spec.rb
deleted file mode 100644
index 02947325bf4..00000000000
--- a/spec/models/concerns/sha256_attribute_spec.rb
+++ /dev/null
@@ -1,91 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sha256Attribute do
- let(:model) { Class.new(ApplicationRecord) { include Sha256Attribute } }
-
- before do
- columns = [
- double(:column, name: 'name', type: :text),
- double(:column, name: 'sha256', type: :binary)
- ]
-
- allow(model).to receive(:columns).and_return(columns)
- end
-
- describe '#sha_attribute' do
- context 'when in non-production' do
- before do
- stub_rails_env('development')
- end
-
- context 'when the table exists' do
- before do
- allow(model).to receive(:table_exists?).and_return(true)
- end
-
- it 'defines a SHA attribute for a binary column' do
- expect(model).to receive(:attribute)
- .with(:sha256, an_instance_of(Gitlab::Database::Sha256Attribute))
-
- model.sha256_attribute(:sha256)
- end
-
- it 'raises ArgumentError when the column type is not :binary' do
- expect { model.sha256_attribute(:name) }.to raise_error(ArgumentError)
- end
- end
-
- context 'when the table does not exist' do
- it 'allows the attribute to be added and issues a warning' do
- allow(model).to receive(:table_exists?).and_return(false)
-
- expect(model).not_to receive(:columns)
- expect(model).to receive(:attribute)
- expect(model).to receive(:warn)
-
- model.sha256_attribute(:name)
- end
- end
-
- context 'when the column does not exist' do
- it 'allows the attribute to be added and issues a warning' do
- allow(model).to receive(:table_exists?).and_return(true)
-
- expect(model).to receive(:columns)
- expect(model).to receive(:attribute)
- expect(model).to receive(:warn)
-
- model.sha256_attribute(:no_name)
- end
- end
-
- context 'when other execeptions are raised' do
- it 'logs and re-rasises the error' do
- allow(model).to receive(:table_exists?).and_raise(ActiveRecord::NoDatabaseError.new('does not exist'))
-
- expect(model).not_to receive(:columns)
- expect(model).not_to receive(:attribute)
- expect(Gitlab::AppLogger).to receive(:error)
-
- expect { model.sha256_attribute(:name) }.to raise_error(ActiveRecord::NoDatabaseError)
- end
- end
- end
-
- context 'when in production' do
- before do
- stub_rails_env('production')
- end
-
- it 'defines a SHA attribute' do
- expect(model).not_to receive(:table_exists?)
- expect(model).not_to receive(:columns)
- expect(model).to receive(:attribute).with(:sha256, an_instance_of(Gitlab::Database::Sha256Attribute))
-
- model.sha256_attribute(:sha256)
- end
- end
- end
-end
diff --git a/spec/models/concerns/sha_attribute_spec.rb b/spec/models/concerns/sha_attribute_spec.rb
index 1bcf3dc8b61..790e6936803 100644
--- a/spec/models/concerns/sha_attribute_spec.rb
+++ b/spec/models/concerns/sha_attribute_spec.rb
@@ -3,86 +3,101 @@
require 'spec_helper'
RSpec.describe ShaAttribute do
- let(:model) { Class.new(ActiveRecord::Base) { include ShaAttribute } }
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ include ShaAttribute
- before do
- columns = [
- double(:column, name: 'name', type: :text),
- double(:column, name: 'sha1', type: :binary)
- ]
-
- allow(model).to receive(:columns).and_return(columns)
+ self.table_name = 'merge_requests'
+ end
end
- describe '#sha_attribute' do
- context 'when in development' do
- before do
- stub_rails_env('development')
- end
+ let(:binary_column) { :merge_ref_sha }
+ let(:text_column) { :target_branch }
- context 'when the table exists' do
- before do
- allow(model).to receive(:table_exists?).and_return(true)
- end
+ describe '.sha_attribute' do
+ it 'defines a SHA attribute with Gitlab::Database::ShaAttribute type' do
+ expect(model).to receive(:attribute)
+ .with(binary_column, an_instance_of(Gitlab::Database::ShaAttribute))
+ .and_call_original
- it 'defines a SHA attribute for a binary column' do
- expect(model).to receive(:attribute)
- .with(:sha1, an_instance_of(Gitlab::Database::ShaAttribute))
+ model.sha_attribute(binary_column)
+ end
+ end
- model.sha_attribute(:sha1)
- end
+ describe '.sha256_attribute' do
+ it 'defines a SHA256 attribute with Gitlab::Database::ShaAttribute type' do
+ expect(model).to receive(:attribute)
+ .with(binary_column, an_instance_of(Gitlab::Database::Sha256Attribute))
+ .and_call_original
- it 'raises ArgumentError when the column type is not :binary' do
- expect { model.sha_attribute(:name) }.to raise_error(ArgumentError)
- end
- end
+ model.sha256_attribute(binary_column)
+ end
+ end
- context 'when the table does not exist' do
- it 'allows the attribute to be added' do
- allow(model).to receive(:table_exists?).and_return(false)
+ describe '.load_schema!' do
+ # load_schema! is not a documented class method, so use a documented method
+ # that we know will call load_schema!
+ def load_schema!
+ expect(model).to receive(:load_schema!).and_call_original
- expect(model).not_to receive(:columns)
- expect(model).to receive(:attribute)
+ model.new
+ end
- model.sha_attribute(:name)
- end
- end
+ using RSpec::Parameterized::TableSyntax
- context 'when the column does not exist' do
- it 'allows the attribute to be added' do
- allow(model).to receive(:table_exists?).and_return(true)
+ where(:column_name, :environment, :expected_error) do
+ ref(:binary_column) | 'development' | :no_error
+ ref(:binary_column) | 'production' | :no_error
+ ref(:text_column) | 'development' | :sha_mismatch_error
+ ref(:text_column) | 'production' | :no_error
+ :__non_existent_column | 'development' | :no_error
+ :__non_existent_column | 'production' | :no_error
+ end
- expect(model).to receive(:columns)
- expect(model).to receive(:attribute)
+ let(:sha_mismatch_error) do
+ [
+ described_class::ShaAttributeTypeMismatchError,
+ /#{column_name}.* should be a :binary column/
+ ]
+ end
- model.sha_attribute(:no_name)
- end
+ with_them do
+ before do
+ stub_rails_env(environment)
end
- context 'when other execeptions are raised' do
- it 'logs and re-rasises the error' do
- allow(model).to receive(:table_exists?).and_raise(ActiveRecord::NoDatabaseError.new('does not exist'))
-
- expect(model).not_to receive(:columns)
- expect(model).not_to receive(:attribute)
- expect(Gitlab::AppLogger).to receive(:error)
-
- expect { model.sha_attribute(:name) }.to raise_error(ActiveRecord::NoDatabaseError)
+ context 'with sha_attribute' do
+ before do
+ model.sha_attribute(column_name)
end
- end
- end
- context 'when in production' do
- before do
- stub_rails_env('production')
+ it 'validates column type' do
+ if expected_error == :no_error
+ expect { load_schema! }.not_to raise_error
+ elsif expected_error == :sha_mismatch_error
+ expect { load_schema! }.to raise_error(
+ described_class::ShaAttributeTypeMismatchError,
+ /sha_attribute.*#{column_name}.* should be a :binary column/
+ )
+ end
+ end
end
- it 'defines a SHA attribute' do
- expect(model).not_to receive(:table_exists?)
- expect(model).not_to receive(:columns)
- expect(model).to receive(:attribute).with(:sha1, an_instance_of(Gitlab::Database::ShaAttribute))
+ context 'with sha256_attribute' do
+ before do
+ model.sha256_attribute(column_name)
+ end
- model.sha_attribute(:sha1)
+ it 'validates column type' do
+ if expected_error == :no_error
+ expect { load_schema! }.not_to raise_error
+ elsif expected_error == :sha_mismatch_error
+ expect { load_schema! }.to raise_error(
+ described_class::Sha256AttributeTypeMismatchError,
+ /sha256_attribute.*#{column_name}.* should be a :binary column/
+ )
+ end
+ end
end
end
end
diff --git a/spec/models/container_registry/event_spec.rb b/spec/models/container_registry/event_spec.rb
index 21a3ab5363a..6b544c95cc8 100644
--- a/spec/models/container_registry/event_spec.rb
+++ b/spec/models/container_registry/event_spec.rb
@@ -26,11 +26,65 @@ RSpec.describe ContainerRegistry::Event do
end
describe '#handle!' do
- let(:raw_event) { { 'action' => 'push', 'target' => { 'mediaType' => ContainerRegistry::Client::DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE } } }
+ let(:action) { 'push' }
+ let(:repository) { project.full_path }
+ let(:target) do
+ {
+ 'mediaType' => ContainerRegistry::Client::DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE,
+ 'tag' => 'latest',
+ 'repository' => repository
+ }
+ end
+
+ let(:raw_event) { { 'action' => action, 'target' => target } }
+
+ subject(:handle!) { described_class.new(raw_event).handle! }
+
+ it 'enqueues a project statistics update' do
+ expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:container_registry_size])
+
+ handle!
+ end
- subject { described_class.new(raw_event).handle! }
+ shared_examples 'event without project statistics update' do
+ it 'does not queue a project statistics update' do
+ expect(ProjectCacheWorker).not_to receive(:perform_async)
- it { is_expected.to eq nil }
+ handle!
+ end
+ end
+
+ context 'with :container_registry_project_statistics feature flag disabled' do
+ before do
+ stub_feature_flags(container_registry_project_statistics: false)
+ end
+
+ it_behaves_like 'event without project statistics update'
+ end
+
+ context 'with no target tag' do
+ let(:target) { super().without('tag') }
+
+ it_behaves_like 'event without project statistics update'
+ end
+
+ context 'with an unsupported action' do
+ let(:action) { 'pull' }
+
+ it_behaves_like 'event without project statistics update'
+ end
+
+ context 'with an invalid project repository path' do
+ let(:repository) { 'does/not/exist' }
+
+ it_behaves_like 'event without project statistics update'
+ end
+
+ context 'with no project repository path' do
+ let(:repository) { nil }
+
+ it_behaves_like 'event without project statistics update'
+ end
end
describe '#track!' do
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 2ea042fb767..af4e40cecb7 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -208,9 +208,23 @@ RSpec.describe ContainerRepository, :aggregate_failures do
shared_examples 'queueing the next import' do
it 'starts the worker' do
expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_async)
+ expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_in)
subject
end
+
+ context 'enqueue_twice feature flag disabled' do
+ before do
+ stub_feature_flags(container_registry_migration_phase2_enqueue_twice: false)
+ end
+
+ it 'starts the worker only once' do
+ expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_async)
+ expect(::ContainerRegistry::Migration::EnqueuerWorker).not_to receive(:perform_in)
+
+ subject
+ end
+ end
end
describe '#start_pre_import' do
@@ -354,6 +368,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
subject { repository.skip_import(reason: :too_many_retries) }
it_behaves_like 'transitioning from allowed states', ContainerRepository::SKIPPABLE_MIGRATION_STATES
+ it_behaves_like 'queueing the next import'
it 'sets migration_skipped_at and migration_skipped_reason' do
expect { subject }.to change { repository.reload.migration_skipped_at }
@@ -630,10 +645,15 @@ RSpec.describe ContainerRepository, :aggregate_failures do
describe '#start_expiration_policy!' do
subject { repository.start_expiration_policy! }
+ before do
+ repository.update_column(:last_cleanup_deleted_tags_count, 10)
+ end
+
it 'sets the expiration policy started at to now' do
freeze_time do
expect { subject }
.to change { repository.expiration_policy_started_at }.from(nil).to(Time.zone.now)
+ .and change { repository.last_cleanup_deleted_tags_count }.from(10).to(nil)
end
end
end
@@ -690,22 +710,6 @@ RSpec.describe ContainerRepository, :aggregate_failures do
end
end
- describe '#reset_expiration_policy_started_at!' do
- subject { repository.reset_expiration_policy_started_at! }
-
- before do
- repository.start_expiration_policy!
- end
-
- it 'resets the expiration policy started at' do
- started_at = repository.expiration_policy_started_at
-
- expect(started_at).not_to be_nil
- expect { subject }
- .to change { repository.expiration_policy_started_at }.from(started_at).to(nil)
- end
- end
-
context 'registry migration' do
before do
allow(repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(true)
@@ -1307,6 +1311,38 @@ RSpec.describe ContainerRepository, :aggregate_failures do
end
end
+ describe '#nearing_or_exceeded_retry_limit?' do
+ subject { repository.nearing_or_exceeded_retry_limit? }
+
+ before do
+ stub_application_setting(container_registry_import_max_retries: 3)
+ end
+
+ context 'migration_retries_count is 1 less than max_retries' do
+ before do
+ repository.update_column(:migration_retries_count, 2)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'migration_retries_count is lower than max_retries' do
+ before do
+ repository.update_column(:migration_retries_count, 1)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'migration_retries_count equal to or higher than max_retries' do
+ before do
+ repository.update_column(:migration_retries_count, 3)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
context 'with repositories' do
let_it_be_with_reload(:repository) { create(:container_repository, :cleanup_unscheduled) }
let_it_be(:other_repository) { create(:container_repository, :cleanup_unscheduled) }
diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb
index c48f1fab3c6..635326eeadc 100644
--- a/spec/models/deploy_token_spec.rb
+++ b/spec/models/deploy_token_spec.rb
@@ -73,10 +73,10 @@ RSpec.describe DeployToken do
describe '#ensure_token' do
it 'ensures a token' do
- deploy_token.token = nil
+ deploy_token.token_encrypted = nil
deploy_token.save!
- expect(deploy_token.token).not_to be_empty
+ expect(deploy_token.token_encrypted).not_to be_empty
end
end
@@ -469,4 +469,10 @@ RSpec.describe DeployToken do
end
end
end
+
+ describe '.impersonated?' do
+ it 'returns false' do
+ expect(subject.impersonated?).to be(false)
+ end
+ end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 705b9b4cc65..409353bdbcf 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -858,12 +858,24 @@ RSpec.describe Deployment do
end
end
- it 'tracks an exception if an invalid status transition is detected' do
- expect(Gitlab::ErrorTracking)
+ context 'tracks an exception if an invalid status transition is detected' do
+ it do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(instance_of(described_class::StatusUpdateError), deployment_id: deploy.id)
+
+ expect(deploy.update_status('running')).to eq(false)
+ end
+
+ it do
+ deploy.update_status('success')
+
+ expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
.with(instance_of(described_class::StatusUpdateError), deployment_id: deploy.id)
- expect(deploy.update_status('running')).to eq(false)
+ expect(deploy.update_status('created')).to eq(false)
+ end
end
it 'tracks an exception if an invalid argument' do
@@ -871,7 +883,7 @@ RSpec.describe Deployment do
.to receive(:track_exception)
.with(instance_of(described_class::StatusUpdateError), deployment_id: deploy.id)
- expect(deploy.update_status('created')).to eq(false)
+ expect(deploy.update_status('recreate')).to eq(false)
end
context 'mapping status to event' do
@@ -893,6 +905,16 @@ RSpec.describe Deployment do
deploy.update_status(status)
end
end
+
+ context 'for created status update' do
+ let(:deploy) { create(:deployment, status: :created) }
+
+ it 'calls the correct method' do
+ expect(deploy).to receive(:create!)
+
+ deploy.update_status('created')
+ end
+ end
end
end
@@ -974,7 +996,9 @@ RSpec.describe Deployment do
context 'with created build' do
let(:build_status) { :created }
- it_behaves_like 'ignoring build'
+ it_behaves_like 'gracefully handling error' do
+ let(:error_message) { %Q{Status cannot transition via \"create\"} }
+ end
end
context 'with running build' do
@@ -1002,7 +1026,9 @@ RSpec.describe Deployment do
context 'with created build' do
let(:build_status) { :created }
- it_behaves_like 'ignoring build'
+ it_behaves_like 'gracefully handling error' do
+ let(:error_message) { %Q{Status cannot transition via \"create\"} }
+ end
end
context 'with running build' do
diff --git a/spec/models/design_management/action_spec.rb b/spec/models/design_management/action_spec.rb
index 0a8bbc8d26e..f2b8fcaa256 100644
--- a/spec/models/design_management/action_spec.rb
+++ b/spec/models/design_management/action_spec.rb
@@ -49,6 +49,15 @@ RSpec.describe DesignManagement::Action do
end
end
+ describe '.with_version' do
+ it 'preloads the version' do
+ actions = described_class.with_version
+
+ expect { actions.map(&:version) }.not_to exceed_query_limit(2)
+ expect(actions.count).to be > 2
+ end
+ end
+
describe '.by_event' do
it 'returns the actions by event type' do
expect(described_class.by_event(:deletion)).to match_array([action_a_2, action_c])
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index b42e73e6d93..34dfc7a1fce 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -621,7 +621,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
expect(close_action.processed).to be_falsey
# it encounters the StaleObjectError at first, but reloads the object and runs `build.play`
- expect { subject }.not_to raise_error(ActiveRecord::StaleObjectError)
+ expect { subject }.not_to raise_error
# Now the build should be processed.
expect(close_action.reload.processed).to be_truthy
@@ -683,19 +683,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
expect(actions.count).to eq(environment.successful_deployments.count)
end
end
-
- context 'when the feature is disabled' do
- before do
- stub_feature_flags(environment_multiple_stop_actions: false)
- end
-
- it 'returns the last deployment job stop action' do
- stop_actions = subject
-
- expect(stop_actions.first).to eq(close_actions[1])
- expect(stop_actions.count).to eq(1)
- end
- end
end
end
@@ -886,22 +873,10 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
is_expected.to eq(deployment)
end
- context 'env_last_deployment_by_finished_at feature flag' do
- it 'when enabled it returns the deployment with the latest finished_at' do
- stub_feature_flags(env_last_deployment_by_finished_at: true)
+ it 'returns the deployment with the latest finished_at' do
+ expect(old_deployment.finished_at < deployment.finished_at).to be_truthy
- expect(old_deployment.finished_at < deployment.finished_at).to be_truthy
-
- is_expected.to eq(deployment)
- end
-
- it 'when disabled it returns the deployment with the highest id' do
- stub_feature_flags(env_last_deployment_by_finished_at: false)
-
- expect(old_deployment.finished_at < deployment.finished_at).to be_truthy
-
- is_expected.to eq(old_deployment)
- end
+ is_expected.to eq(deployment)
end
end
end
@@ -1845,7 +1820,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it 'fetches the rollout status from the deployment platform' do
expect(environment.deployment_platform).to receive(:rollout_status)
- .with(environment, pods: pods, deployments: deployments)
+ .with(environment, { pods: pods, deployments: deployments })
.and_return(:mock_rollout_status)
is_expected.to eq(:mock_rollout_status)
diff --git a/spec/models/event_collection_spec.rb b/spec/models/event_collection_spec.rb
index 036072aab76..67b58c7bf6f 100644
--- a/spec/models/event_collection_spec.rb
+++ b/spec/models/event_collection_spec.rb
@@ -5,138 +5,188 @@ require 'spec_helper'
RSpec.describe EventCollection do
include DesignManagementTestHelpers
- describe '#to_a' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project_empty_repo, group: group) }
- let_it_be(:projects) { Project.where(id: project.id) }
- let_it_be(:user) { create(:user) }
- let_it_be(:merge_request) { create(:merge_request) }
-
- before do
- enable_design_management
- end
+ shared_examples 'EventCollection examples' do
+ describe '#to_a' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project_empty_repo, group: group) }
+ let_it_be(:projects) { Project.where(id: project.id) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ before do
+ enable_design_management
+ end
- context 'with project events' do
- let_it_be(:push_event_payloads) do
- Array.new(9) do
- create(:push_event_payload,
- event: create(:push_event, project: project, author: user))
+ context 'with project events' do
+ let_it_be(:push_event_payloads) do
+ Array.new(9) do
+ create(:push_event_payload,
+ event: create(:push_event, project: project, author: user))
+ end
end
- end
- let_it_be(:merge_request_events) { create_list(:event, 10, :commented, project: project, target: merge_request) }
- let_it_be(:closed_issue_event) { create(:closed_issue_event, project: project, author: user) }
- let_it_be(:wiki_page_event) { create(:wiki_page_event, project: project) }
- let_it_be(:design_event) { create(:design_event, project: project) }
-
- let(:push_events) { push_event_payloads.map(&:event) }
-
- it 'returns an Array of events', :aggregate_failures do
- most_recent_20_events = [
- wiki_page_event,
- design_event,
- closed_issue_event,
- *push_events,
- *merge_request_events
- ].sort_by(&:id).reverse.take(20)
- events = described_class.new(projects).to_a
-
- expect(events).to be_an_instance_of(Array)
- expect(events).to match_array(most_recent_20_events)
- end
+ let_it_be(:merge_request_events) { create_list(:event, 10, :merged, project: project, target: merge_request) }
+ let_it_be(:closed_issue_event) { create(:closed_issue_event, project: project, author: user) }
+ let_it_be(:wiki_page_event) { create(:wiki_page_event, project: project) }
+ let_it_be(:design_event) { create(:design_event, project: project) }
+
+ let(:push_events) { push_event_payloads.map(&:event) }
+
+ it 'returns an Array of all event types when no filter is passed', :aggregate_failures do
+ most_recent_20_events = [
+ wiki_page_event,
+ design_event,
+ closed_issue_event,
+ *push_events,
+ *merge_request_events
+ ].sort_by(&:id).reverse.take(20)
+ events = described_class.new(projects).to_a
+
+ expect(events).to be_an_instance_of(Array)
+ expect(events).to match_array(most_recent_20_events)
+ end
- it 'includes the wiki page events when using to_a' do
- events = described_class.new(projects).to_a
+ it 'includes the wiki page events when using to_a' do
+ events = described_class.new(projects).to_a
- expect(events).to include(wiki_page_event)
- end
+ expect(events).to include(wiki_page_event)
+ end
- it 'includes the design events' do
- collection = described_class.new(projects)
+ it 'includes the design events' do
+ collection = described_class.new(projects)
- expect(collection.to_a).to include(design_event)
- expect(collection.all_project_events).to include(design_event)
- end
+ expect(collection.to_a).to include(design_event)
+ expect(collection.all_project_events).to include(design_event)
+ end
- it 'includes the wiki page events when using all_project_events' do
- events = described_class.new(projects).all_project_events
+ it 'includes the wiki page events when using all_project_events' do
+ events = described_class.new(projects).all_project_events
- expect(events).to include(wiki_page_event)
- end
+ expect(events).to include(wiki_page_event)
+ end
- it 'applies a limit to the number of events' do
- events = described_class.new(projects).to_a
+ it 'applies a limit to the number of events' do
+ events = described_class.new(projects).to_a
- expect(events.length).to eq(20)
- end
+ expect(events.length).to eq(20)
+ end
- it 'can paginate through events' do
- events = described_class.new(projects, limit: 5, offset: 15).to_a
+ it 'can paginate through events' do
+ events = described_class.new(projects, limit: 5, offset: 15).to_a
- expect(events.length).to eq(5)
- end
+ expect(events.length).to eq(5)
+ end
- it 'returns an empty Array when crossing the maximum page number' do
- events = described_class.new(projects, limit: 1, offset: 15).to_a
+ it 'returns an empty Array when crossing the maximum page number' do
+ events = described_class.new(projects, limit: 1, offset: 15).to_a
- expect(events).to be_empty
- end
+ expect(events).to be_empty
+ end
- it 'allows filtering of events using an EventFilter, returning single item' do
- filter = EventFilter.new(EventFilter::ISSUE)
- events = described_class.new(projects, filter: filter).to_a
+ it 'allows filtering of events using an EventFilter, returning single item' do
+ filter = EventFilter.new(EventFilter::ISSUE)
+ events = described_class.new(projects, filter: filter).to_a
- expect(events).to contain_exactly(closed_issue_event)
- end
+ expect(events).to contain_exactly(closed_issue_event)
+ end
- it 'allows filtering of events using an EventFilter, returning several items' do
- filter = EventFilter.new(EventFilter::COMMENTS)
- events = described_class.new(projects, filter: filter).to_a
+ it 'allows filtering of events using an EventFilter, returning several items' do
+ filter = EventFilter.new(EventFilter::MERGED)
+ events = described_class.new(projects, filter: filter).to_a
- expect(events).to match_array(merge_request_events)
- end
+ expect(events).to match_array(merge_request_events)
+ end
- it 'allows filtering of events using an EventFilter, returning pushes' do
- filter = EventFilter.new(EventFilter::PUSH)
- events = described_class.new(projects, filter: filter).to_a
+ it 'allows filtering of events using an EventFilter, returning pushes' do
+ filter = EventFilter.new(EventFilter::PUSH)
+ events = described_class.new(projects, filter: filter).to_a
- expect(events).to match_array(push_events)
+ expect(events).to match_array(push_events)
+ end
end
- end
- context 'with group events' do
- let(:groups) { group.self_and_descendants.public_or_visible_to_user(user) }
- let(:subject) { described_class.new(projects, groups: groups).to_a }
+ context 'with group events' do
+ let(:groups) { group.self_and_descendants.public_or_visible_to_user(user) }
+ let(:subject) { described_class.new(projects, groups: groups).to_a }
- it 'includes also group events' do
- subgroup = create(:group, parent: group)
- event1 = create(:event, project: project, author: user)
- event2 = create(:event, project: nil, group: group, author: user)
- event3 = create(:event, project: nil, group: subgroup, author: user)
+ it 'includes also group events' do
+ subgroup = create(:group, parent: group)
+ event1 = create(:event, project: project, author: user)
+ event2 = create(:event, project: nil, group: group, author: user)
+ event3 = create(:event, project: nil, group: subgroup, author: user)
- expect(subject).to eq([event3, event2, event1])
- end
+ expect(subject).to eq([event3, event2, event1])
+ end
- it 'does not include events from inaccessible groups' do
- subgroup = create(:group, :private, parent: group)
- event1 = create(:event, project: nil, group: group, author: user)
- create(:event, project: nil, group: subgroup, author: user)
+ it 'does not include events from inaccessible groups' do
+ subgroup = create(:group, :private, parent: group)
+ event1 = create(:event, project: nil, group: group, author: user)
+ create(:event, project: nil, group: subgroup, author: user)
- expect(subject).to eq([event1])
- end
+ expect(subject).to match_array([event1])
+ end
+
+ context 'pagination through events' do
+ let_it_be(:project_events) { create_list(:event, 10, project: project) }
+ let_it_be(:group_events) { create_list(:event, 10, group: group, author: user) }
- context 'pagination through events' do
- let_it_be(:project_events) { create_list(:event, 10, project: project) }
- let_it_be(:group_events) { create_list(:event, 10, group: group, author: user) }
+ let(:subject) { described_class.new(projects, limit: 10, offset: 5, groups: groups).to_a }
- let(:subject) { described_class.new(projects, limit: 10, offset: 5, groups: groups).to_a }
+ it 'returns recent groups and projects events' do
+ recent_events_with_offset = (project_events[5..] + group_events[..4]).reverse
- it 'returns recent groups and projects events' do
- recent_events_with_offset = (project_events[5..] + group_events[..4]).reverse
+ expect(subject).to eq(recent_events_with_offset)
+ end
+ end
- expect(subject).to eq(recent_events_with_offset)
+ context 'project exclusive event types' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:filter, :event) do
+ EventFilter::PUSH | lazy { create(:push_event, project: project) }
+ EventFilter::MERGED | lazy { create(:event, :merged, project: project, target: merge_request) }
+ EventFilter::TEAM | lazy { create(:event, :joined, project: project) }
+ EventFilter::ISSUE | lazy { create(:closed_issue_event, project: project) }
+ EventFilter::DESIGNS | lazy { create(:design_event, project: project) }
+ end
+
+ with_them do
+ let(:subject) do
+ described_class.new(projects, groups: Group.where(id: group.id), filter: EventFilter.new(filter))
+ end
+
+ it "queries only project events" do
+ expected_event = event # Forcing lazy evaluation
+ expect(subject).to receive(:project_events).with(no_args).and_call_original
+ expect(subject).not_to receive(:group_events)
+
+ expect(subject.to_a).to match_array(expected_event)
+ end
+ end
end
end
end
end
+
+ context 'when the optimized_project_and_group_activity_queries FF is on' do
+ before do
+ stub_feature_flags(optimized_project_and_group_activity_queries: true)
+ end
+
+ it_behaves_like 'EventCollection examples'
+
+ it 'returns no events if no projects are passed' do
+ events = described_class.new(Project.none).to_a
+
+ expect(events).to be_empty
+ end
+ end
+
+ context 'when the optimized_project_and_group_activity_queries FF is off' do
+ before do
+ stub_feature_flags(optimized_project_and_group_activity_queries: false)
+ end
+
+ it_behaves_like 'EventCollection examples'
+ end
end
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index f099015e63e..2c1bbfcb35f 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -834,7 +834,13 @@ RSpec.describe Event do
end
end
- context 'when a project was updated more than 1 hour ago' do
+ context 'when a project was updated more than 1 hour ago', :clean_gitlab_redis_shared_state do
+ before do
+ ::Gitlab::Redis::SharedState.with do |redis|
+ redis.hset('inactive_projects_deletion_warning_email_notified', "project:#{project.id}", Date.current)
+ end
+ end
+
it 'updates the project' do
project.touch(:last_activity_at, time: 1.year.ago) # rubocop: disable Rails/SkipsModelValidations
@@ -845,6 +851,17 @@ RSpec.describe Event do
expect(project.last_activity_at).to be_like_time(event.created_at)
expect(project.updated_at).to be_like_time(event.created_at)
end
+
+ it "deletes the redis key for if the project was inactive" do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:hdel).with('inactive_projects_deletion_warning_email_notified',
+ "project:#{project.id}")
+ end
+
+ project.touch(:last_activity_at, time: 1.year.ago) # rubocop: disable Rails/SkipsModelValidations
+
+ create_push_event(project, project.first_owner)
+ end
end
end
@@ -1040,6 +1057,36 @@ RSpec.describe Event do
end
end
+ describe '#has_no_project_and_group' do
+ context 'with project event' do
+ it 'returns false when the event has project' do
+ event = build(:event, project: create(:project))
+
+ expect(event.has_no_project_and_group?).to be false
+ end
+
+ it 'returns true when the event has no project' do
+ event = build(:event, project: nil)
+
+ expect(event.has_no_project_and_group?).to be true
+ end
+ end
+
+ context 'with group event' do
+ it 'returns false when the event has group' do
+ event = build(:event, group: create(:group))
+
+ expect(event.has_no_project_and_group?).to be false
+ end
+
+ it 'returns true when the event has no group' do
+ event = build(:event, group: nil)
+
+ expect(event.has_no_project_and_group?).to be true
+ end
+ end
+ end
+
def create_push_event(project, user)
event = create(:push_event, project: project, author: user)
diff --git a/spec/models/incident_management/timeline_event_spec.rb b/spec/models/incident_management/timeline_event_spec.rb
new file mode 100644
index 00000000000..17150fc9266
--- /dev/null
+++ b/spec/models/incident_management/timeline_event_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::TimelineEvent do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:incident) { create(:incident, project: project) }
+ let_it_be(:timeline_event) { create(:incident_management_timeline_event, project: project, incident: incident) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:author) }
+ it { is_expected.to belong_to(:incident) }
+ it { is_expected.to belong_to(:updated_by_user) }
+ it { is_expected.to belong_to(:promoted_from_note) }
+ end
+
+ describe 'validations' do
+ subject { build(:incident_management_timeline_event) }
+
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:incident) }
+ it { is_expected.to validate_presence_of(:note) }
+ it { is_expected.to validate_length_of(:note).is_at_most(10_000) }
+ it { is_expected.to validate_presence_of(:note_html) }
+ it { is_expected.to validate_length_of(:note_html).is_at_most(10_000) }
+ it { is_expected.to validate_presence_of(:occurred_at) }
+ it { is_expected.to validate_presence_of(:action) }
+ it { is_expected.to validate_length_of(:action).is_at_most(128) }
+ end
+
+ describe '.order_occurred_at_asc' do
+ let_it_be(:occurred_3mins_ago) do
+ create(:incident_management_timeline_event, project: project, occurred_at: 3.minutes.ago)
+ end
+
+ let_it_be(:occurred_2mins_ago) do
+ create(:incident_management_timeline_event, project: project, occurred_at: 2.minutes.ago)
+ end
+
+ subject(:order) { described_class.order_occurred_at_asc }
+
+ it 'sorts timeline events by occurred_at' do
+ is_expected.to eq([occurred_3mins_ago, occurred_2mins_ago, timeline_event])
+ end
+ end
+
+ describe '#cache_markdown_field' do
+ let(:note) { 'note **bold** _italic_ `code` ![image](/path/img.png) :+1:👍' }
+ let(:expected_note_html) do
+ # rubocop:disable Layout/LineLength
+ '<p>note <strong>bold</strong> <em>italic</em> <code>code</code> <a class="with-attachment-icon" href="/path/img.png" target="_blank" rel="noopener noreferrer">image</a> 👍👍</p>'
+ # rubocop:enable Layout/LineLength
+ end
+
+ before do
+ allow(Banzai::Renderer).to receive(:cacheless_render_field).and_call_original
+ end
+
+ context 'on create' do
+ let(:timeline_event) do
+ build(:incident_management_timeline_event, project: project, incident: incident, note: note)
+ end
+
+ it 'updates note_html', :aggregate_failures do
+ expect(Banzai::Renderer).to receive(:cacheless_render_field)
+ .with(timeline_event, :note, { skip_project_check: false })
+
+ expect { timeline_event.save! }.to change { timeline_event.note_html }.to(expected_note_html)
+ end
+ end
+
+ context 'on update' do
+ let(:timeline_event) { create(:incident_management_timeline_event, project: project, incident: incident) }
+
+ it 'updates note_html', :aggregate_failures do
+ expect(Banzai::Renderer).to receive(:cacheless_render_field)
+ .with(timeline_event, :note, { skip_project_check: false })
+
+ expect { timeline_event.update!(note: note) }.to change { timeline_event.note_html }.to(expected_note_html)
+ end
+ end
+ end
+end
diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb
index 3af717798c3..f57667cc5d6 100644
--- a/spec/models/instance_configuration_spec.rb
+++ b/spec/models/instance_configuration_spec.rb
@@ -99,6 +99,7 @@ RSpec.describe InstanceConfiguration do
max_attachment_size: 10,
receive_max_input_size: 20,
max_import_size: 30,
+ max_export_size: 40,
diff_max_patch_bytes: 409600,
max_artifacts_size: 50,
max_pages_size: 60,
@@ -112,6 +113,7 @@ RSpec.describe InstanceConfiguration do
expect(size_limits[:max_attachment_size]).to eq(10.megabytes)
expect(size_limits[:receive_max_input_size]).to eq(20.megabytes)
expect(size_limits[:max_import_size]).to eq(30.megabytes)
+ expect(size_limits[:max_export_size]).to eq(40.megabytes)
expect(size_limits[:diff_max_patch_bytes]).to eq(400.kilobytes)
expect(size_limits[:max_artifacts_size]).to eq(50.megabytes)
expect(size_limits[:max_pages_size]).to eq(60.megabytes)
@@ -127,11 +129,16 @@ RSpec.describe InstanceConfiguration do
end
it 'returns nil if set to 0 (unlimited)' do
- Gitlab::CurrentSettings.current_application_settings.update!(max_import_size: 0, max_pages_size: 0)
+ Gitlab::CurrentSettings.current_application_settings.update!(
+ max_import_size: 0,
+ max_export_size: 0,
+ max_pages_size: 0
+ )
size_limits = subject.settings[:size_limits]
expect(size_limits[:max_import_size]).to be_nil
+ expect(size_limits[:max_export_size]).to be_nil
expect(size_limits[:max_pages_size]).to be_nil
end
end
@@ -173,6 +180,61 @@ RSpec.describe InstanceConfiguration do
end
end
+ describe '#ci_cd_limits' do
+ let_it_be(:plan1) { create(:plan, name: 'plan1', title: 'Plan 1') }
+ let_it_be(:plan2) { create(:plan, name: 'plan2', title: 'Plan 2') }
+
+ before do
+ create(:plan_limits,
+ plan: plan1,
+ ci_pipeline_size: 1001,
+ ci_active_jobs: 1002,
+ ci_active_pipelines: 1003,
+ ci_project_subscriptions: 1004,
+ ci_pipeline_schedules: 1005,
+ ci_needs_size_limit: 1006,
+ ci_registered_group_runners: 1007,
+ ci_registered_project_runners: 1008
+ )
+ create(:plan_limits,
+ plan: plan2,
+ ci_pipeline_size: 1101,
+ ci_active_jobs: 1102,
+ ci_active_pipelines: 1103,
+ ci_project_subscriptions: 1104,
+ ci_pipeline_schedules: 1105,
+ ci_needs_size_limit: 1106,
+ ci_registered_group_runners: 1107,
+ ci_registered_project_runners: 1108
+ )
+ end
+
+ it 'returns CI/CD limits' do
+ ci_cd_size_limits = subject.settings[:ci_cd_limits]
+
+ expect(ci_cd_size_limits[:Plan1]).to eq({
+ ci_active_jobs: 1002,
+ ci_active_pipelines: 1003,
+ ci_needs_size_limit: 1006,
+ ci_pipeline_schedules: 1005,
+ ci_pipeline_size: 1001,
+ ci_project_subscriptions: 1004,
+ ci_registered_group_runners: 1007,
+ ci_registered_project_runners: 1008
+ })
+ expect(ci_cd_size_limits[:Plan2]).to eq({
+ ci_active_jobs: 1102,
+ ci_active_pipelines: 1103,
+ ci_needs_size_limit: 1106,
+ ci_pipeline_schedules: 1105,
+ ci_pipeline_size: 1101,
+ ci_project_subscriptions: 1104,
+ ci_registered_group_runners: 1107,
+ ci_registered_project_runners: 1108
+ })
+ end
+ end
+
describe '#rate_limits' do
before do
Gitlab::CurrentSettings.current_application_settings.update!(
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 0f596d3908d..0567a8bd386 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -249,18 +249,24 @@ RSpec.describe Integration do
it_behaves_like 'integration instances'
context 'with all existing instances' do
+ def integration_hash(type)
+ Integration.new(instance: true, type: type).to_integration_hash
+ end
+
before do
- Integration.insert_all(
- Integration.available_integration_types(include_project_specific: false).map { |type| { instance: true, type: type } }
- )
+ attrs = Integration.available_integration_types(include_project_specific: false).map do
+ integration_hash(_1)
+ end
+
+ Integration.insert_all(attrs)
end
it_behaves_like 'integration instances'
- context 'with a previous existing integration (MockCiService) and a new integration (Asana)' do
+ context 'with a previous existing integration (:mock_ci) and a new integration (:asana)' do
before do
- Integration.insert({ type: 'MockCiService', instance: true })
- Integration.delete_by(type: 'AsanaService', instance: true)
+ Integration.insert(integration_hash(:mock_ci))
+ Integration.delete_by(**integration_hash(:asana))
end
it_behaves_like 'integration instances'
@@ -681,7 +687,7 @@ RSpec.describe Integration do
integration.properties = { foo: 1, bar: 2 }
- expect { integration.properties[:foo] = 3 }.to raise_error
+ expect { integration.properties[:foo] = 3 }.to raise_error(FrozenError)
end
end
@@ -782,8 +788,16 @@ RSpec.describe Integration do
end
end
- describe '#api_field_names' do
- shared_examples 'api field names' do
+ describe 'field definitions' do
+ shared_examples '#fields' do
+ it 'does not return the same array' do
+ integration = fake_integration.new
+
+ expect(integration.fields).not_to be(integration.fields)
+ end
+ end
+
+ shared_examples '#api_field_names' do
it 'filters out secret fields' do
safe_fields = %w[some_safe_field safe_field url trojan_gift]
@@ -816,7 +830,8 @@ RSpec.describe Integration do
end
end
- it_behaves_like 'api field names'
+ it_behaves_like '#fields'
+ it_behaves_like '#api_field_names'
end
context 'when the class uses the field DSL' do
@@ -839,7 +854,8 @@ RSpec.describe Integration do
end
end
- it_behaves_like 'api field names'
+ it_behaves_like '#fields'
+ it_behaves_like '#api_field_names'
end
end
@@ -848,7 +864,8 @@ RSpec.describe Integration do
let(:test_message) { "test message" }
let(:arguments) do
{
- service_class: integration.class.name,
+ integration_class: integration.class.name,
+ integration_id: integration.id,
project_path: project.full_path,
project_id: project.id,
message: test_message,
@@ -857,13 +874,13 @@ RSpec.describe Integration do
end
it 'logs info messages using json logger' do
- expect(Gitlab::JsonLogger).to receive(:info).with(arguments)
+ expect(Gitlab::IntegrationsLogger).to receive(:info).with(arguments)
integration.log_info(test_message, additional_argument: 'some argument')
end
it 'logs error messages using json logger' do
- expect(Gitlab::JsonLogger).to receive(:error).with(arguments)
+ expect(Gitlab::IntegrationsLogger).to receive(:error).with(arguments)
integration.log_error(test_message, additional_argument: 'some argument')
end
@@ -872,7 +889,8 @@ RSpec.describe Integration do
let(:project) { nil }
let(:arguments) do
{
- service_class: integration.class.name,
+ integration_class: integration.class.name,
+ integration_id: integration.id,
project_path: nil,
project_id: nil,
message: test_message,
@@ -881,11 +899,33 @@ RSpec.describe Integration do
end
it 'logs info messages using json logger' do
- expect(Gitlab::JsonLogger).to receive(:info).with(arguments)
+ expect(Gitlab::IntegrationsLogger).to receive(:info).with(arguments)
integration.log_info(test_message, additional_argument: 'some argument')
end
end
+
+ context 'logging exceptions' do
+ let(:error) { RuntimeError.new('exception message') }
+ let(:arguments) do
+ super().merge(
+ 'exception.class' => 'RuntimeError',
+ 'exception.message' => 'exception message'
+ )
+ end
+
+ it 'logs exceptions using json logger' do
+ expect(Gitlab::IntegrationsLogger).to receive(:error).with(arguments.merge(message: 'exception message'))
+
+ integration.log_exception(error, additional_argument: 'some argument')
+ end
+
+ it 'logs exceptions using json logger with a custom message' do
+ expect(Gitlab::IntegrationsLogger).to receive(:error).with(arguments.merge(message: 'custom message'))
+
+ integration.log_exception(error, message: 'custom message', additional_argument: 'some argument')
+ end
+ end
end
describe '.available_integration_names' do
diff --git a/spec/models/integrations/bamboo_spec.rb b/spec/models/integrations/bamboo_spec.rb
index b5684d153f2..574b87d6c60 100644
--- a/spec/models/integrations/bamboo_spec.rb
+++ b/spec/models/integrations/bamboo_spec.rb
@@ -227,7 +227,7 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
- .with(instance_of(http_error), project_id: project.id)
+ .with(instance_of(http_error), { project_id: project.id })
is_expected.to eq(:error)
end
diff --git a/spec/models/integrations/base_chat_notification_spec.rb b/spec/models/integrations/base_chat_notification_spec.rb
index ac4031a9b7d..672d8de1e14 100644
--- a/spec/models/integrations/base_chat_notification_spec.rb
+++ b/spec/models/integrations/base_chat_notification_spec.rb
@@ -3,15 +3,14 @@
require 'spec_helper'
RSpec.describe Integrations::BaseChatNotification do
- describe 'Associations' do
+ describe 'validations' do
before do
allow(subject).to receive(:activated?).and_return(true)
+ allow(subject).to receive(:default_channel_placeholder).and_return('placeholder')
+ allow(subject).to receive(:webhook_placeholder).and_return('placeholder')
end
it { is_expected.to validate_presence_of :webhook }
- end
-
- describe 'validations' do
it { is_expected.to validate_inclusion_of(:labels_to_be_notified_behavior).in_array(%w[match_any match_all]).allow_blank }
end
@@ -274,4 +273,16 @@ RSpec.describe Integrations::BaseChatNotification do
it_behaves_like 'with channel specified', 'slack-integration, #slack-test, @UDLP91W0A', ['slack-integration', '#slack-test', '@UDLP91W0A']
end
end
+
+ describe '#default_channel_placeholder' do
+ it 'raises an error' do
+ expect { subject.default_channel_placeholder }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#webhook_placeholder' do
+ it 'raises an error' do
+ expect { subject.webhook_placeholder }.to raise_error(NotImplementedError)
+ end
+ end
end
diff --git a/spec/models/integrations/buildkite_spec.rb b/spec/models/integrations/buildkite_spec.rb
index 4207ae0d555..af2e587dc7b 100644
--- a/spec/models/integrations/buildkite_spec.rb
+++ b/spec/models/integrations/buildkite_spec.rb
@@ -129,7 +129,7 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
- .with(instance_of(http_error), project_id: project.id)
+ .with(instance_of(http_error), { project_id: project.id })
is_expected.to eq(:error)
end
diff --git a/spec/models/integrations/drone_ci_spec.rb b/spec/models/integrations/drone_ci_spec.rb
index dd64dcfc52c..78d55c49e7b 100644
--- a/spec/models/integrations/drone_ci_spec.rb
+++ b/spec/models/integrations/drone_ci_spec.rb
@@ -163,7 +163,7 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
- .with(instance_of(http_error), project_id: project.id)
+ .with(instance_of(http_error), { project_id: project.id })
is_expected.to eq(:error)
end
diff --git a/spec/models/integrations/jenkins_spec.rb b/spec/models/integrations/jenkins_spec.rb
index 3d6393f2793..200de1305e2 100644
--- a/spec/models/integrations/jenkins_spec.rb
+++ b/spec/models/integrations/jenkins_spec.rb
@@ -4,11 +4,12 @@ require 'spec_helper'
RSpec.describe Integrations::Jenkins do
let(:project) { create(:project) }
+ let(:jenkins_integration) { described_class.new(jenkins_params) }
let(:jenkins_url) { 'http://jenkins.example.com/' }
let(:jenkins_hook_url) { jenkins_url + 'project/my_project' }
let(:jenkins_username) { 'u$er name%2520' }
let(:jenkins_password) { 'pas$ word' }
-
+ let(:jenkins_authorization) { 'Basic ' + ::Base64.strict_encode64(jenkins_username + ':' + jenkins_password) }
let(:jenkins_params) do
{
active: true,
@@ -22,17 +23,21 @@ RSpec.describe Integrations::Jenkins do
}
end
- let(:jenkins_authorization) { "Basic " + ::Base64.strict_encode64(jenkins_username + ':' + jenkins_password) }
-
include_context Integrations::EnableSslVerification do
- let(:integration) { described_class.new(jenkins_params) }
+ let(:integration) { jenkins_integration }
end
it_behaves_like Integrations::HasWebHook do
- let(:integration) { described_class.new(jenkins_params) }
+ let(:integration) { jenkins_integration }
let(:hook_url) { "http://#{ERB::Util.url_encode jenkins_username}:#{ERB::Util.url_encode jenkins_password}@jenkins.example.com/project/my_project" }
end
+ it 'sets the default values', :aggregate_failures do
+ expect(jenkins_integration.push_events).to eq(true)
+ expect(jenkins_integration.merge_requests_events).to eq(false)
+ expect(jenkins_integration.tag_push_events).to eq(false)
+ end
+
describe 'username validation' do
let(:jenkins_integration) do
described_class.create!(
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index d244b1d33d5..061c770a61a 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -27,6 +27,10 @@ RSpec.describe Integrations::Jira do
WebMock.stub_request(:get, /serverInfo/).to_return(body: server_info_results.to_json )
end
+ it_behaves_like Integrations::ResetSecretFields do
+ let(:integration) { jira_integration }
+ end
+
describe '#options' do
let(:options) do
{
@@ -122,6 +126,11 @@ RSpec.describe Integrations::Jira do
it 'includes SECTION_TYPE_JIRA_ISSUES' do
expect(sections).to include(described_class::SECTION_TYPE_JIRA_ISSUES)
end
+
+ it 'section SECTION_TYPE_JIRA_ISSUES has `plan` attribute' do
+ jira_issues_section = integration.sections.find { |s| s[:type] == described_class::SECTION_TYPE_JIRA_ISSUES }
+ expect(jira_issues_section[:plan]).to eq('premium')
+ end
end
context 'when project_level? is false' do
@@ -301,7 +310,7 @@ RSpec.describe Integrations::Jira do
let_it_be(:new_url) { 'http://jira-new.example.com' }
before do
- integration.update!(username: new_username, url: new_url)
+ integration.update!(username: new_username, url: new_url, password: password)
end
it 'stores updated data in jira_tracker_data table' do
@@ -318,7 +327,7 @@ RSpec.describe Integrations::Jira do
context 'when updating the url, api_url, username, or password' do
context 'when updating the integration' do
it 'updates deployment type' do
- integration.update!(url: 'http://first.url')
+ integration.update!(url: 'http://first.url', password: password)
integration.jira_tracker_data.update!(deployment_type: 'server')
expect(integration.jira_tracker_data.deployment_server?).to be_truthy
@@ -376,135 +385,6 @@ RSpec.describe Integrations::Jira do
expect(WebMock).not_to have_requested(:get, /serverInfo/)
end
end
-
- context 'stored password invalidation' do
- context 'when a password was previously set' do
- context 'when only web url present' do
- let(:data_params) do
- {
- url: url, api_url: nil,
- username: username, password: password,
- jira_issue_transition_id: transition_id
- }
- end
-
- it 'resets password if url changed' do
- integration
- integration.url = 'http://jira_edited.example.com'
-
- expect(integration).not_to be_valid
- expect(integration.url).to eq('http://jira_edited.example.com')
- expect(integration.password).to be_nil
- end
-
- it 'does not reset password if url "changed" to the same url as before' do
- integration.url = 'http://jira.example.com'
-
- expect(integration).to be_valid
- expect(integration.url).to eq('http://jira.example.com')
- expect(integration.password).not_to be_nil
- end
-
- it 'resets password if url not changed but api url added' do
- integration.api_url = 'http://jira_edited.example.com/rest/api/2'
-
- expect(integration).not_to be_valid
- expect(integration.api_url).to eq('http://jira_edited.example.com/rest/api/2')
- expect(integration.password).to be_nil
- end
-
- it 'does not reset password if new url is set together with password, even if it\'s the same password' do
- integration.url = 'http://jira_edited.example.com'
- integration.password = password
-
- expect(integration).to be_valid
- expect(integration.password).to eq(password)
- expect(integration.url).to eq('http://jira_edited.example.com')
- end
-
- it 'resets password if url changed, even if setter called multiple times' do
- integration.url = 'http://jira1.example.com/rest/api/2'
- integration.url = 'http://jira1.example.com/rest/api/2'
-
- expect(integration).not_to be_valid
- expect(integration.password).to be_nil
- end
-
- it 'does not reset password if username changed' do
- integration.username = 'some_name'
-
- expect(integration).to be_valid
- expect(integration.password).to eq(password)
- end
-
- it 'does not reset password if password changed' do
- integration.url = 'http://jira_edited.example.com'
- integration.password = 'new_password'
-
- expect(integration).to be_valid
- expect(integration.password).to eq('new_password')
- end
-
- it 'does not reset password if the password is touched and same as before' do
- integration.url = 'http://jira_edited.example.com'
- integration.password = password
-
- expect(integration).to be_valid
- expect(integration.password).to eq(password)
- end
- end
-
- context 'when both web and api url present' do
- let(:data_params) do
- {
- url: url, api_url: 'http://jira.example.com/rest/api/2',
- username: username, password: password,
- jira_issue_transition_id: transition_id
- }
- end
-
- it 'resets password if api url changed' do
- integration.api_url = 'http://jira_edited.example.com/rest/api/2'
-
- expect(integration).not_to be_valid
- expect(integration.password).to be_nil
- end
-
- it 'does not reset password if url changed' do
- integration.url = 'http://jira_edited.example.com'
-
- expect(integration).to be_valid
- expect(integration.password).to eq(password)
- end
-
- it 'resets password if api url set to empty' do
- integration.api_url = ''
-
- expect(integration).not_to be_valid
- expect(integration.password).to be_nil
- end
- end
- end
-
- context 'when no password was previously set' do
- let(:data_params) do
- {
- url: url, username: username
- }
- end
-
- it 'saves password if new url is set together with password' do
- integration.url = 'http://jira_edited.example.com/rest/api/2'
- integration.password = 'password'
- integration.save!
-
- expect(integration.reload).to have_attributes(
- url: 'http://jira_edited.example.com/rest/api/2',
- password: 'password'
- )
- end
- end
- end
end
end
@@ -539,8 +419,7 @@ RSpec.describe Integrations::Jira do
end
describe '#client' do
- it 'uses the default GitLab::HTTP timeouts' do
- timeouts = Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS
+ subject do
stub_request(:get, 'http://jira.example.com/foo')
expect(Gitlab::HTTP).to receive(:httparty_perform_request)
@@ -548,6 +427,32 @@ RSpec.describe Integrations::Jira do
jira_integration.client.get('/foo')
end
+
+ context 'when the FF :jira_raise_timeouts is enabled' do
+ let(:timeouts) do
+ {
+ open_timeout: 2.minutes,
+ read_timeout: 2.minutes,
+ write_timeout: 2.minutes
+ }
+ end
+
+ it 'uses custom timeouts' do
+ subject
+ end
+ end
+
+ context 'when the FF :jira_raise_timeouts is disabled' do
+ before do
+ stub_feature_flags(jira_raise_timeouts: false)
+ end
+
+ let(:timeouts) { Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS }
+
+ it 'uses the default GitLab::HTTP timeouts' do
+ subject
+ end
+ end
end
describe '#find_issue' do
@@ -746,17 +651,14 @@ RSpec.describe Integrations::Jira do
end
it 'logs exception when transition id is not valid' do
- allow(jira_integration).to receive(:log_error)
+ allow(jira_integration).to receive(:log_exception)
WebMock.stub_request(:post, transitions_url).with(basic_auth: %w(jira-username jira-password)).and_raise("Bad Request")
close_issue
- expect(jira_integration).to have_received(:log_error).with(
- "Issue transition failed",
- error: hash_including(
- exception_class: 'StandardError',
- exception_message: "Bad Request"
- ),
+ expect(jira_integration).to have_received(:log_exception).with(
+ kind_of(StandardError),
+ message: 'Issue transition failed',
client_url: "http://jira.example.com"
)
end
@@ -1054,12 +956,10 @@ RSpec.describe Integrations::Jira do
WebMock.stub_request(:get, test_url).with(basic_auth: [username, password])
.to_raise(JIRA::HTTPError.new(double(message: error_message)))
- expect(jira_integration).to receive(:log_error).with(
- 'Error sending message',
- client_url: 'http://jira.example.com',
- 'exception.class' => anything,
- 'exception.message' => error_message,
- 'exception.backtrace' => anything
+ expect(jira_integration).to receive(:log_exception).with(
+ kind_of(JIRA::HTTPError),
+ message: 'Error sending message',
+ client_url: 'http://jira.example.com'
)
expect(jira_integration.test(nil)).to eq(success: false, result: error_message)
diff --git a/spec/models/integrations/microsoft_teams_spec.rb b/spec/models/integrations/microsoft_teams_spec.rb
index 06b285a855c..af6c142525c 100644
--- a/spec/models/integrations/microsoft_teams_spec.rb
+++ b/spec/models/integrations/microsoft_teams_spec.rb
@@ -123,7 +123,7 @@ RSpec.describe Integrations::MicrosoftTeams do
{
title: "Awesome wiki_page",
content: "Some text describing some thing or another",
- format: "md",
+ format: :markdown,
message: "user created page: Awesome wiki_page"
}
end
diff --git a/spec/models/integrations/prometheus_spec.rb b/spec/models/integrations/prometheus_spec.rb
index 76e20f20a00..a7495cb9574 100644
--- a/spec/models/integrations/prometheus_spec.rb
+++ b/spec/models/integrations/prometheus_spec.rb
@@ -122,34 +122,6 @@ RSpec.describe Integrations::Prometheus, :use_clean_rails_memory_store_caching,
end
end
- describe 'callbacks' do
- context 'after_create' do
- let(:project) { create(:project) }
- let(:integration) { build(:prometheus_integration, project: project) }
-
- subject(:create_integration) { integration.save! }
-
- it 'creates default alerts' do
- expect(Prometheus::CreateDefaultAlertsWorker)
- .to receive(:perform_async)
- .with(project.id)
-
- create_integration
- end
-
- context 'no project exists' do
- let(:integration) { build(:prometheus_integration, :instance) }
-
- it 'does not create default alerts' do
- expect(Prometheus::CreateDefaultAlertsWorker)
- .not_to receive(:perform_async)
-
- create_integration
- end
- end
- end
- end
-
describe '#test' do
before do
integration.manual_configuration = true
diff --git a/spec/models/integrations/teamcity_spec.rb b/spec/models/integrations/teamcity_spec.rb
index e1f4e577503..046476225a6 100644
--- a/spec/models/integrations/teamcity_spec.rb
+++ b/spec/models/integrations/teamcity_spec.rb
@@ -210,7 +210,7 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
- .with(instance_of(Errno::ECONNREFUSED), project_id: project.id)
+ .with(instance_of(Errno::ECONNREFUSED), { project_id: project.id })
is_expected.to eq(teamcity_url)
end
@@ -260,7 +260,7 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
- .with(instance_of(Errno::ECONNREFUSED), project_id: project.id)
+ .with(instance_of(Errno::ECONNREFUSED), { project_id: project.id })
is_expected.to eq(:error)
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index bd75d95080f..c77c0a5504a 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe Issue do
it { is_expected.to have_one(:incident_management_issuable_escalation_status) }
it { is_expected.to have_many(:issue_customer_relations_contacts) }
it { is_expected.to have_many(:customer_relations_contacts) }
+ it { is_expected.to have_many(:incident_management_timeline_events) }
describe 'versions.most_recent' do
it 'returns the most recent version' do
@@ -1257,23 +1258,11 @@ RSpec.describe Issue do
end
describe '.public_only' do
- let_it_be(:banned_user) { create(:user, :banned) }
- let_it_be(:public_issue) { create(:issue, project: reusable_project) }
- let_it_be(:confidential_issue) { create(:issue, project: reusable_project, confidential: true) }
- let_it_be(:hidden_issue) { create(:issue, project: reusable_project, author: banned_user) }
-
it 'only returns public issues' do
- expect(described_class.public_only).to eq([public_issue])
- end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
+ public_issue = create(:issue, project: reusable_project)
+ create(:issue, project: reusable_project, confidential: true)
- it 'returns public and hidden issues' do
- expect(described_class.public_only).to contain_exactly(public_issue, hidden_issue)
- end
+ expect(described_class.public_only).to eq([public_issue])
end
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index e1135aa440b..225c9714187 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -102,15 +102,15 @@ RSpec.describe Key, :mailer do
context 'expiration scopes' do
let_it_be(:user) { create(:user) }
- let_it_be(:expired_today_not_notified) { create(:key, expires_at: Time.current, user: user) }
- let_it_be(:expired_today_already_notified) { create(:key, expires_at: Time.current, user: user, expiry_notification_delivered_at: Time.current) }
- let_it_be(:expired_yesterday) { create(:key, expires_at: 1.day.ago, user: user) }
+ let_it_be(:expired_today_not_notified) { create(:key, :expired_today, user: user) }
+ let_it_be(:expired_today_already_notified) { create(:key, :expired_today, user: user, expiry_notification_delivered_at: Time.current) }
+ let_it_be(:expired_yesterday) { create(:key, :expired, user: user) }
let_it_be(:expiring_soon_unotified) { create(:key, expires_at: 3.days.from_now, user: user) }
let_it_be(:expiring_soon_notified) { create(:key, expires_at: 4.days.from_now, user: user, before_expiry_notification_delivered_at: Time.current) }
let_it_be(:future_expiry) { create(:key, expires_at: 1.month.from_now, user: user) }
describe '.expired_today_and_not_notified' do
- it 'returns keys that expire today and in the past' do
+ it 'returns keys that expire today and have not been notified' do
expect(described_class.expired_today_and_not_notified).to contain_exactly(expired_today_not_notified)
end
end
@@ -126,32 +126,22 @@ RSpec.describe Key, :mailer do
context 'validation of uniqueness (based on fingerprint uniqueness)' do
let(:user) { create(:user) }
- shared_examples 'fingerprint uniqueness' do
- it 'accepts the key once' do
- expect(build(:rsa_key_4096, user: user)).to be_valid
- end
-
- it 'does not accept the exact same key twice' do
- first_key = create(:rsa_key_4096, user: user)
-
- expect(build(:key, user: user, key: first_key.key)).not_to be_valid
- end
+ it 'accepts the key once' do
+ expect(build(:rsa_key_4096, user: user)).to be_valid
+ end
- it 'does not accept a duplicate key with a different comment' do
- first_key = create(:rsa_key_4096, user: user)
- duplicate = build(:key, user: user, key: first_key.key)
- duplicate.key << ' extra comment'
+ it 'does not accept the exact same key twice' do
+ first_key = create(:rsa_key_4096, user: user)
- expect(duplicate).not_to be_valid
- end
+ expect(build(:key, user: user, key: first_key.key)).not_to be_valid
end
- context 'with FIPS mode off' do
- it_behaves_like 'fingerprint uniqueness'
- end
+ it 'does not accept a duplicate key with a different comment' do
+ first_key = create(:rsa_key_4096, user: user)
+ duplicate = build(:key, user: user, key: first_key.key)
+ duplicate.key << ' extra comment'
- context 'with FIPS mode', :fips_mode do
- it_behaves_like 'fingerprint uniqueness'
+ expect(duplicate).not_to be_valid
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 4ab17ee1e6d..286167c918f 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -383,6 +383,75 @@ RSpec.describe Member do
end
end
+ describe '.by_access_level' do
+ subject { described_class.by_access_level(access_levels) }
+
+ context 'by owner' do
+ let(:access_levels) { [Gitlab::Access::OWNER] }
+
+ it { is_expected.to include @owner }
+ it { is_expected.not_to include @maintainer }
+ it { is_expected.not_to include @invited_member }
+ it { is_expected.not_to include @accepted_invite_member }
+ it { is_expected.not_to include @requested_member }
+ it { is_expected.not_to include @accepted_requested_member }
+ it { is_expected.not_to include @blocked_maintainer }
+ it { is_expected.not_to include @blocked_developer }
+ end
+
+ context 'by maintainer' do
+ let(:access_levels) { [Gitlab::Access::MAINTAINER] }
+
+ it { is_expected.not_to include @owner }
+ it { is_expected.to include @maintainer }
+ it { is_expected.not_to include @invited_member }
+ it { is_expected.not_to include @accepted_invite_member }
+ it { is_expected.not_to include @requested_member }
+ it { is_expected.not_to include @accepted_requested_member }
+ it { is_expected.not_to include @blocked_maintainer }
+ it { is_expected.not_to include @blocked_developer }
+ end
+
+ context 'by developer' do
+ let(:access_levels) { [Gitlab::Access::DEVELOPER] }
+
+ it { is_expected.not_to include @owner }
+ it { is_expected.not_to include @maintainer }
+ it { is_expected.to include @invited_member }
+ it { is_expected.to include @accepted_invite_member }
+ it { is_expected.not_to include @requested_member }
+ it { is_expected.not_to include @accepted_requested_member }
+ it { is_expected.not_to include @blocked_maintainer }
+ it { is_expected.not_to include @blocked_developer }
+ end
+
+ context 'by owner and maintainer' do
+ let(:access_levels) { [Gitlab::Access::OWNER, Gitlab::Access::MAINTAINER] }
+
+ it { is_expected.to include @owner }
+ it { is_expected.to include @maintainer }
+ it { is_expected.not_to include @invited_member }
+ it { is_expected.not_to include @accepted_invite_member }
+ it { is_expected.not_to include @requested_member }
+ it { is_expected.not_to include @accepted_requested_member }
+ it { is_expected.not_to include @blocked_maintainer }
+ it { is_expected.not_to include @blocked_developer }
+ end
+
+ context 'by owner, maintainer and developer' do
+ let(:access_levels) { [Gitlab::Access::OWNER, Gitlab::Access::MAINTAINER, Gitlab::Access::DEVELOPER] }
+
+ it { is_expected.to include @owner }
+ it { is_expected.to include @maintainer }
+ it { is_expected.to include @invited_member }
+ it { is_expected.to include @accepted_invite_member }
+ it { is_expected.not_to include @requested_member }
+ it { is_expected.not_to include @accepted_requested_member }
+ it { is_expected.not_to include @blocked_maintainer }
+ it { is_expected.not_to include @blocked_developer }
+ end
+ end
+
describe '.developers' do
subject { described_class.developers.to_a }
@@ -582,6 +651,15 @@ RSpec.describe Member do
expect(project.members.active_state).not_to include awaiting_project_member
end
end
+
+ describe '.excluding_users' do
+ let_it_be(:active_group_member) { create(:group_member, group: group) }
+
+ it 'excludes members with given user ids' do
+ expect(group.members.excluding_users([])).to include active_group_member
+ expect(group.members.excluding_users(active_group_member.user_id)).not_to include active_group_member
+ end
+ end
end
describe 'Delegate methods' do
diff --git a/spec/models/merge_request/metrics_spec.rb b/spec/models/merge_request/metrics_spec.rb
index a4bdac39074..8d1d503b323 100644
--- a/spec/models/merge_request/metrics_spec.rb
+++ b/spec/models/merge_request/metrics_spec.rb
@@ -54,4 +54,43 @@ RSpec.describe MergeRequest::Metrics do
let!(:parent) { create(:ci_pipeline, project: merge_request.target_project) }
let!(:model) { merge_request.metrics.tap { |metrics| metrics.update!(pipeline: parent) } }
end
+
+ describe 'update' do
+ let(:merge_request) { create(:merge_request) }
+ let(:metrics) { merge_request.metrics }
+
+ before do
+ metrics.update!(
+ pipeline_id: 1,
+ latest_build_started_at: Time.current,
+ latest_build_finished_at: Time.current
+ )
+ end
+
+ context 'when pipeline_id is nullified' do
+ before do
+ metrics.update!(pipeline_id: nil)
+ end
+
+ it 'nullifies build related columns via DB trigger' do
+ metrics.reload
+
+ expect(metrics.latest_build_started_at).to be_nil
+ expect(metrics.latest_build_finished_at).to be_nil
+ end
+ end
+
+ context 'when updated but pipeline_id is not nullified' do
+ before do
+ metrics.update!(latest_closed_at: Time.current)
+ end
+
+ it 'does not nullify build related columns' do
+ metrics.reload
+
+ expect(metrics.latest_build_started_at).not_to be_nil
+ expect(metrics.latest_build_finished_at).not_to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/merge_request_assignee_spec.rb b/spec/models/merge_request_assignee_spec.rb
index 1591c517049..387d17d7823 100644
--- a/spec/models/merge_request_assignee_spec.rb
+++ b/spec/models/merge_request_assignee_spec.rb
@@ -41,22 +41,11 @@ RSpec.describe MergeRequestAssignee do
it_behaves_like 'having unique enum values'
- it_behaves_like 'having reviewer state'
-
- describe 'syncs to reviewer state' do
- before do
- reviewer = merge_request.merge_request_reviewers.build(reviewer: assignee)
- reviewer.update!(state: :reviewed)
- end
-
- it { is_expected.to have_attributes(state: 'reviewed') }
- end
-
describe '#attention_requested_by' do
let(:current_user) { create(:user) }
before do
- subject.update!(updated_state_by: current_user)
+ subject.update!(updated_state_by: current_user, state: :attention_requested)
end
context 'attention requested' do
diff --git a/spec/models/merge_request_reviewer_spec.rb b/spec/models/merge_request_reviewer_spec.rb
index dd00c4d8627..4df2dba3a7d 100644
--- a/spec/models/merge_request_reviewer_spec.rb
+++ b/spec/models/merge_request_reviewer_spec.rb
@@ -10,17 +10,6 @@ RSpec.describe MergeRequestReviewer do
it_behaves_like 'having unique enum values'
- it_behaves_like 'having reviewer state'
-
- describe 'syncs to assignee state' do
- before do
- assignee = merge_request.merge_request_assignees.build(assignee: reviewer)
- assignee.update!(state: :reviewed)
- end
-
- it { is_expected.to have_attributes(state: 'reviewed') }
- end
-
describe 'associations' do
it { is_expected.to belong_to(:merge_request).class_name('MergeRequest') }
it { is_expected.to belong_to(:reviewer).class_name('User').inverse_of(:merge_request_reviewers) }
@@ -30,7 +19,7 @@ RSpec.describe MergeRequestReviewer do
let(:current_user) { create(:user) }
before do
- subject.update!(updated_state_by: current_user)
+ subject.update!(updated_state_by: current_user, state: :attention_requested)
end
context 'attention requested' do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 8545c7bc6c7..d40c78b5b60 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -151,6 +151,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
before do
assignee = merge_request6.find_assignee(user2)
assignee.update!(state: :reviewed)
+ merge_request2.find_reviewer(user2).update!(state: :attention_requested)
+ merge_request5.find_assignee(user2).update!(state: :attention_requested)
end
it 'returns MRs that have any attention requests' do
@@ -3538,50 +3540,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe "#legacy_environments" do
- subject { merge_request.legacy_environments }
-
- let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master') }
- let(:project) { merge_request.project }
-
- let(:pipeline) do
- create(:ci_pipeline,
- source: :merge_request_event,
- merge_request: merge_request, project: project,
- sha: merge_request.diff_head_sha,
- merge_requests_as_head_pipeline: [merge_request])
- end
-
- let!(:job) { create(:ci_build, :with_deployment, :start_review_app, pipeline: pipeline, project: project) }
-
- it 'returns environments' do
- is_expected.to eq(pipeline.environments_in_self_and_descendants.to_a)
- expect(subject.count).to be(1)
- end
-
- context 'when pipeline is not associated with environments' do
- let!(:job) { create(:ci_build, pipeline: pipeline, project: project) }
-
- it 'returns empty array' do
- is_expected.to be_empty
- end
- end
-
- context 'when pipeline is not a pipeline for merge request' do
- let(:pipeline) do
- create(:ci_pipeline,
- project: project,
- ref: 'feature',
- sha: merge_request.diff_head_sha,
- merge_requests_as_head_pipeline: [merge_request])
- end
-
- it 'returns empty relation' do
- is_expected.to be_empty
- end
- end
- end
-
describe "#reload_diff" do
it 'calls MergeRequests::ReloadDiffsService#execute with correct params' do
user = create(:user)
diff --git a/spec/models/namespace_ci_cd_setting_spec.rb b/spec/models/namespace_ci_cd_setting_spec.rb
new file mode 100644
index 00000000000..9031d45221a
--- /dev/null
+++ b/spec/models/namespace_ci_cd_setting_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe NamespaceCiCdSetting do
+ describe "associations" do
+ it { is_expected.to belong_to(:namespace).inverse_of(:ci_cd_settings) }
+ end
+end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 09ac15429a5..4373d9a0b24 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -32,6 +32,10 @@ RSpec.describe Namespace do
it { is_expected.to have_one :namespace_route }
it { is_expected.to have_many :namespace_members }
+ it do
+ is_expected.to have_one(:ci_cd_settings).class_name('NamespaceCiCdSetting').inverse_of(:namespace).autosave(true)
+ end
+
describe '#children' do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
@@ -334,6 +338,15 @@ RSpec.describe Namespace do
describe 'delegate' do
it { is_expected.to delegate_method(:name).to(:owner).with_prefix.with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:avatar_url).to(:owner).with_arguments(allow_nil: true) }
+ it do
+ is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy)
+ .to(:namespace_settings).with_arguments(allow_nil: true)
+ end
+
+ it do
+ is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy=)
+ .to(:namespace_settings).with_arguments(allow_nil: true)
+ end
end
describe "Respond to" do
@@ -2236,4 +2249,40 @@ RSpec.describe Namespace do
it_behaves_like 'blocks unsafe serialization'
end
+
+ describe '#certificate_based_clusters_enabled?' do
+ it 'does not call Feature.enabled? twice with request_store', :request_store do
+ expect(Feature).to receive(:enabled?).once
+
+ namespace.certificate_based_clusters_enabled?
+ namespace.certificate_based_clusters_enabled?
+ end
+
+ it 'call Feature.enabled? twice without request_store' do
+ expect(Feature).to receive(:enabled?).twice
+
+ namespace.certificate_based_clusters_enabled?
+ namespace.certificate_based_clusters_enabled?
+ end
+
+ context 'with ff disabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: false)
+ end
+
+ it 'is truthy' do
+ expect(namespace.certificate_based_clusters_enabled?).to be_falsy
+ end
+ end
+
+ context 'with ff enabled' do
+ before do
+ stub_feature_flags(certificate_based_clusters: true)
+ end
+
+ it 'is truthy' do
+ expect(namespace.certificate_based_clusters_enabled?).to be_truthy
+ end
+ end
+ end
end
diff --git a/spec/models/packages/cleanup/policy_spec.rb b/spec/models/packages/cleanup/policy_spec.rb
new file mode 100644
index 00000000000..972071aa0ad
--- /dev/null
+++ b/spec/models/packages/cleanup/policy_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Cleanup::Policy, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it do
+ is_expected
+ .to validate_inclusion_of(:keep_n_duplicated_package_files)
+ .in_array(described_class::KEEP_N_DUPLICATED_PACKAGE_FILES_VALUES)
+ .with_message('keep_n_duplicated_package_files is invalid')
+ end
+ end
+
+ describe '.active' do
+ let_it_be(:active_policy) { create(:packages_cleanup_policy) }
+ let_it_be(:inactive_policy) { create(:packages_cleanup_policy, keep_n_duplicated_package_files: 'all') }
+
+ subject { described_class.active }
+
+ it { is_expected.to contain_exactly(active_policy) }
+ end
+end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 6c86db1197f..a9ed811e77d 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -1182,7 +1182,7 @@ RSpec.describe Packages::Package, type: :model do
it "plan_limits includes column #{plan_limit_name}" do
expect { package.project.actual_limits.send(plan_limit_name) }
- .not_to raise_error(NoMethodError)
+ .not_to raise_error
end
end
end
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index 2ebc9864d9b..7fde8d63947 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe PagesDomain do
'0123123' => true,
'a-reserved.com' => true,
'a.b-reserved.com' => true,
- 'reserved.com' => false,
+ 'reserved.com' => true,
'_foo.com' => false,
'a.reserved.com' => false,
'a.b.reserved.com' => false,
diff --git a/spec/models/performance_monitoring/prometheus_dashboard_spec.rb b/spec/models/performance_monitoring/prometheus_dashboard_spec.rb
index 634690d5d0b..ee2407f21b6 100644
--- a/spec/models/performance_monitoring/prometheus_dashboard_spec.rb
+++ b/spec/models/performance_monitoring/prometheus_dashboard_spec.rb
@@ -185,7 +185,7 @@ RSpec.describe PerformanceMonitoring::PrometheusDashboard do
context 'dashboard has been found' do
it 'uses dashboard finder to find and load dashboard data and returns dashboard instance', :aggregate_failures do
- expect(Gitlab::Metrics::Dashboard::Finder).to receive(:find).with(project, user, environment: environment, dashboard_path: path).and_return(status: :success, dashboard: json_content)
+ expect(Gitlab::Metrics::Dashboard::Finder).to receive(:find).with(project, user, { environment: environment, dashboard_path: path }).and_return(status: :success, dashboard: json_content)
dashboard_instance = described_class.find_for(project: project, user: user, path: path, options: { environment: environment })
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index 125ac7fb102..69866d497a1 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -94,14 +94,6 @@ RSpec.describe PersonalAccessToken do
end
end
- describe '#expired_but_not_enforced?' do
- let(:token) { build(:personal_access_token) }
-
- it 'returns false', :aggregate_failures do
- expect(token).not_to be_expired_but_not_enforced
- end
- end
-
describe 'Redis storage' do
let(:user_id) { 123 }
let(:token) { 'KS3wegQYXBLYhQsciwsj' }
diff --git a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
index 16e699b7e0e..eefe5bfc6c4 100644
--- a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
+++ b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
@@ -9,29 +9,47 @@ RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader do
let_it_be(:project_3) { create(:project) }
let(:projects) { [project_1, project_2, project_3] }
+ let(:query) { projects.each { |project| user.can?(:read_project, project) } }
before do
project_1.add_developer(user)
project_2.add_developer(user)
end
- context 'preload maximum access level to avoid querying project_authorizations', :request_store do
- it 'avoids N+1 queries', :request_store do
- Preloaders::UserMaxAccessLevelInProjectsPreloader.new(projects, user).execute
+ context 'without preloader' do
+ it 'runs N queries' do
+ expect { query }.to make_queries(projects.size)
+ end
+ end
+
+ describe '#execute', :request_store do
+ let(:projects_arg) { projects }
+
+ before do
+ Preloaders::UserMaxAccessLevelInProjectsPreloader.new(projects_arg, user).execute
+ end
+
+ it 'avoids N+1 queries' do
+ expect { query }.not_to make_queries
+ end
- query_count = ActiveRecord::QueryRecorder.new do
- projects.each { |project| user.can?(:read_project, project) }
- end.count
+ context 'when projects is an array of IDs' do
+ let(:projects_arg) { [project_1.id, project_2.id, project_3.id] }
- expect(query_count).to eq(0)
+ it 'avoids N+1 queries' do
+ expect { query }.not_to make_queries
+ end
end
- it 'runs N queries without preloading' do
- query_count = ActiveRecord::QueryRecorder.new do
- projects.each { |project| user.can?(:read_project, project) }
- end.count
+ # Test for handling of SQL table name clashes.
+ context 'when projects is a relation including project_authorizations' do
+ let(:projects_arg) do
+ Project.where(id: ProjectAuthorization.where(project_id: projects).select(:project_id))
+ end
- expect(query_count).to eq(projects.size)
+ it 'avoids N+1 queries' do
+ expect { query }.not_to make_queries
+ end
end
end
end
diff --git a/spec/models/project_import_state_spec.rb b/spec/models/project_import_state_spec.rb
index 42ca8130734..f6e398bd23c 100644
--- a/spec/models/project_import_state_spec.rb
+++ b/spec/models/project_import_state_spec.rb
@@ -65,9 +65,11 @@ RSpec.describe ProjectImportState, type: :model do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:error).with(
- error: 'ActiveRecord::ActiveRecordError',
- message: 'Error setting import status to failed',
- original_error: error_message
+ {
+ error: 'ActiveRecord::ActiveRecordError',
+ message: 'Error setting import status to failed',
+ original_error: error_message
+ }
)
end
@@ -131,20 +133,6 @@ RSpec.describe ProjectImportState, type: :model do
describe 'import state transitions' do
context 'state transition: [:started] => [:finished]' do
- let(:after_import_service) { spy(:after_import_service) }
- let(:housekeeping_service) { spy(:housekeeping_service) }
-
- before do
- allow(Projects::AfterImportService)
- .to receive(:new) { after_import_service }
-
- allow(after_import_service)
- .to receive(:execute) { housekeeping_service.execute }
-
- allow(Repositories::HousekeepingService)
- .to receive(:new) { housekeeping_service }
- end
-
it 'resets last_error' do
error_message = 'Some error'
import_state = create(:import_state, :started, last_error: error_message)
@@ -152,29 +140,28 @@ RSpec.describe ProjectImportState, type: :model do
expect { import_state.finish }.to change { import_state.last_error }.from(error_message).to(nil)
end
- it 'performs housekeeping when an import of a fresh project is completed' do
+ it 'enqueues housekeeping when an import of a fresh project is completed' do
project = create(:project_empty_repo, :import_started, import_type: :github)
- project.import_state.finish
+ expect(Projects::AfterImportWorker).to receive(:perform_async).with(project.id)
- expect(after_import_service).to have_received(:execute)
- expect(housekeeping_service).to have_received(:execute)
+ project.import_state.finish
end
it 'does not perform housekeeping when project repository does not exist' do
project = create(:project, :import_started, import_type: :github)
- project.import_state.finish
+ expect(Projects::AfterImportWorker).not_to receive(:perform_async)
- expect(housekeeping_service).not_to have_received(:execute)
+ project.import_state.finish
end
- it 'does not perform housekeeping when project does not have a valid import type' do
+ it 'does not qneueue housekeeping when project does not have a valid import type' do
project = create(:project, :import_started, import_type: nil)
- project.import_state.finish
+ expect(Projects::AfterImportWorker).not_to receive(:perform_async)
- expect(housekeeping_service).not_to have_received(:execute)
+ project.import_state.finish
end
end
end
diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb
index d03eb3c8bfe..867ad843406 100644
--- a/spec/models/project_setting_spec.rb
+++ b/spec/models/project_setting_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe ProjectSetting, type: :model do
+ using RSpec::Parameterized::TableSyntax
it { is_expected.to belong_to(:project) }
describe 'validations' do
@@ -27,6 +28,23 @@ RSpec.describe ProjectSetting, type: :model do
end
end
+ describe '#human_squash_option' do
+ where(:squash_option, :human_squash_option) do
+ 'never' | 'Do not allow'
+ 'always' | 'Require'
+ 'default_on' | 'Encourage'
+ 'default_off' | 'Allow'
+ end
+
+ with_them do
+ let(:project_setting) { create(:project_setting, squash_option: ProjectSetting.squash_options[squash_option]) }
+
+ subject { project_setting.human_squash_option }
+
+ it { is_expected.to eq(human_squash_option) }
+ end
+ end
+
def valid_target_platform_combinations
target_platforms = described_class::ALLOWED_TARGET_PLATFORMS
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 0bb584845c2..ed5b3d4e0be 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Project, factory_default: :keep do
include ExternalAuthorizationServiceHelpers
include ReloadHelpers
include StubGitlabCalls
+ include ProjectHelpers
using RSpec::Parameterized::TableSyntax
let_it_be(:namespace) { create_default(:namespace).freeze }
@@ -135,10 +136,10 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:packages).class_name('Packages::Package') }
it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile') }
it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::ProjectDistribution').dependent(:destroy) }
+ it { is_expected.to have_one(:packages_cleanup_policy).class_name('Packages::Cleanup::Policy').inverse_of(:project) }
it { is_expected.to have_many(:pipeline_artifacts).dependent(:restrict_with_error) }
it { is_expected.to have_many(:terraform_states).class_name('Terraform::State').inverse_of(:project) }
it { is_expected.to have_many(:timelogs) }
- it { is_expected.to have_many(:error_tracking_errors).class_name('ErrorTracking::Error') }
it { is_expected.to have_many(:error_tracking_client_keys).class_name('ErrorTracking::ClientKey') }
it { is_expected.to have_many(:pending_builds).class_name('Ci::PendingBuild') }
it { is_expected.to have_many(:ci_feature_usages).class_name('Projects::CiFeatureUsage') }
@@ -832,6 +833,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to delegate_method(:members).to(:team).with_prefix(true) }
it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:root_ancestor).to(:namespace).with_arguments(allow_nil: true) }
+ it { is_expected.to delegate_method(:certificate_based_clusters_enabled?).to(:namespace).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:last_pipeline).to(:commit).with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:container_registry_enabled?).to(:project_feature) }
it { is_expected.to delegate_method(:container_registry_access_level).to(:project_feature) }
@@ -844,6 +846,9 @@ RSpec.describe Project, factory_default: :keep do
warn_about_potentially_unwanted_characters
warn_about_potentially_unwanted_characters=
warn_about_potentially_unwanted_characters?
+ enforce_auth_checks_on_uploads
+ enforce_auth_checks_on_uploads=
+ enforce_auth_checks_on_uploads?
).each do |method|
it { is_expected.to delegate_method(method).to(:project_setting).with_arguments(allow_nil: true) }
end
@@ -2025,7 +2030,7 @@ RSpec.describe Project, factory_default: :keep do
it 'returns nil if the path detection throws an error' do
expect(Rails.application.routes).to receive(:recognize_path).with(url) { raise ActionController::RoutingError, 'test' }
- expect { subject }.not_to raise_error(ActionController::RoutingError)
+ expect { subject }.not_to raise_error
expect(subject).to be_nil
end
end
@@ -7153,11 +7158,33 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#add_export_job' do
- context 'if not already present' do
- it 'starts project export job' do
- user = create(:user)
- project = build(:project)
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ context 'when project storage_size does not exceed the application setting max_export_size' do
+ it 'starts project export worker' do
+ stub_application_setting(max_export_size: 1)
+ allow(project.statistics).to receive(:storage_size).and_return(0.megabytes)
+
+ expect(ProjectExportWorker).to receive(:perform_async).with(user.id, project.id, nil, {})
+
+ project.add_export_job(current_user: user)
+ end
+ end
+
+ context 'when project storage_size exceeds the application setting max_export_size' do
+ it 'raises Project::ExportLimitExceeded' do
+ stub_application_setting(max_export_size: 1)
+ allow(project.statistics).to receive(:storage_size).and_return(2.megabytes)
+ expect(ProjectExportWorker).not_to receive(:perform_async).with(user.id, project.id, nil, {})
+ expect { project.add_export_job(current_user: user) }.to raise_error(Project::ExportLimitExceeded)
+ end
+ end
+
+ context 'when application setting max_export_size is not set' do
+ it 'starts project export worker' do
+ allow(project.statistics).to receive(:storage_size).and_return(2.megabytes)
expect(ProjectExportWorker).to receive(:perform_async).with(user.id, project.id, nil, {})
project.add_export_job(current_user: user)
@@ -8241,6 +8268,28 @@ RSpec.describe Project, factory_default: :keep do
it_behaves_like 'returns true if project is inactive'
end
+ describe '.inactive' do
+ before do
+ stub_application_setting(inactive_projects_min_size_mb: 5)
+ stub_application_setting(inactive_projects_send_warning_email_after_months: 12)
+ end
+
+ it 'returns projects that are inactive' do
+ create_project_with_statistics.tap do |project|
+ project.update!(last_activity_at: Time.current)
+ end
+ create_project_with_statistics.tap do |project|
+ project.update!(last_activity_at: 13.months.ago)
+ end
+ inactive_large_project = create_project_with_statistics(with_data: true, size_multiplier: 2.gigabytes)
+ .tap { |project| project.update!(last_activity_at: 2.years.ago) }
+ create_project_with_statistics(with_data: true, size_multiplier: 2.gigabytes)
+ .tap { |project| project.update!(last_activity_at: 1.month.ago) }
+
+ expect(described_class.inactive).to contain_exactly(inactive_large_project)
+ end
+ end
+
private
def finish_job(export_job)
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 20fc14113ef..2c29d4c42f4 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe ProjectStatistics do
snippets_size: 1.exabyte,
pipeline_artifacts_size: 512.petabytes - 1,
uploads_size: 512.petabytes,
- container_registry_size: 8.exabytes - 1
+ container_registry_size: 12.petabytes
)
statistics.reload
@@ -50,7 +50,7 @@ RSpec.describe ProjectStatistics do
expect(statistics.snippets_size).to eq(1.exabyte)
expect(statistics.pipeline_artifacts_size).to eq(512.petabytes - 1)
expect(statistics.uploads_size).to eq(512.petabytes)
- expect(statistics.container_registry_size).to eq(8.exabytes - 1)
+ expect(statistics.container_registry_size).to eq(12.petabytes)
end
end
@@ -62,6 +62,7 @@ RSpec.describe ProjectStatistics do
statistics.build_artifacts_size = 4
statistics.snippets_size = 5
statistics.uploads_size = 3
+ statistics.container_registry_size = 8
expect(statistics.total_repository_size).to eq 5
end
@@ -104,6 +105,7 @@ RSpec.describe ProjectStatistics do
allow(statistics).to receive(:update_snippets_size)
allow(statistics).to receive(:update_storage_size)
allow(statistics).to receive(:update_uploads_size)
+ allow(statistics).to receive(:update_container_registry_size)
end
context "without arguments" do
@@ -118,6 +120,7 @@ RSpec.describe ProjectStatistics do
expect(statistics).to have_received(:update_lfs_objects_size)
expect(statistics).to have_received(:update_snippets_size)
expect(statistics).to have_received(:update_uploads_size)
+ expect(statistics).to have_received(:update_container_registry_size)
end
end
@@ -133,6 +136,7 @@ RSpec.describe ProjectStatistics do
expect(statistics).not_to have_received(:update_wiki_size)
expect(statistics).not_to have_received(:update_snippets_size)
expect(statistics).not_to have_received(:update_uploads_size)
+ expect(statistics).not_to have_received(:update_container_registry_size)
end
end
@@ -148,11 +152,13 @@ RSpec.describe ProjectStatistics do
expect(statistics).to have_received(:update_wiki_size)
expect(statistics).to have_received(:update_snippets_size)
expect(statistics).to have_received(:update_uploads_size)
+ expect(statistics).to have_received(:update_container_registry_size)
expect(statistics.repository_size).to eq(0)
expect(statistics.commit_count).to eq(0)
expect(statistics.wiki_size).to eq(0)
expect(statistics.snippets_size).to eq(0)
expect(statistics.uploads_size).to eq(0)
+ expect(statistics.container_registry_size).to eq(0)
end
end
@@ -174,11 +180,13 @@ RSpec.describe ProjectStatistics do
expect(statistics).to have_received(:update_wiki_size)
expect(statistics).to have_received(:update_snippets_size)
expect(statistics).to have_received(:update_uploads_size)
+ expect(statistics).to have_received(:update_container_registry_size)
expect(statistics.repository_size).to eq(0)
expect(statistics.commit_count).to eq(0)
expect(statistics.wiki_size).to eq(0)
expect(statistics.snippets_size).to eq(0)
expect(statistics.uploads_size).to eq(0)
+ expect(statistics.container_registry_size).to eq(0)
end
end
@@ -224,6 +232,7 @@ RSpec.describe ProjectStatistics do
expect(statistics).not_to receive(:update_lfs_objects_size)
expect(statistics).not_to receive(:update_snippets_size)
expect(statistics).not_to receive(:update_uploads_size)
+ expect(statistics).not_to receive(:update_container_registry_size)
expect(statistics).not_to receive(:save!)
expect(Namespaces::ScheduleAggregationWorker)
.not_to receive(:perform_async)
@@ -319,8 +328,42 @@ RSpec.describe ProjectStatistics do
end
end
+ describe '#update_container_registry_size' do
+ subject(:update_container_registry_size) { statistics.update_container_registry_size }
+
+ it 'stores the project container registry repositories size' do
+ allow(project).to receive(:container_repositories_size).and_return(10)
+
+ update_container_registry_size
+
+ expect(statistics.container_registry_size).to eq(10)
+ end
+
+ it 'handles nil values for the repositories size' do
+ allow(project).to receive(:container_repositories_size).and_return(nil)
+
+ update_container_registry_size
+
+ expect(statistics.container_registry_size).to eq(0)
+ end
+
+ context 'with container_registry_project_statistics FF disabled' do
+ before do
+ stub_feature_flags(container_registry_project_statistics: false)
+ end
+
+ it 'does not update the container_registry_size' do
+ expect(project).not_to receive(:container_repositories_size)
+
+ update_container_registry_size
+
+ expect(statistics.container_registry_size).to eq(0)
+ end
+ end
+ end
+
describe '#update_storage_size' do
- it "sums all storage counters" do
+ it "sums the relevant storage counters" do
statistics.update!(
repository_size: 2,
wiki_size: 4,
@@ -337,6 +380,18 @@ RSpec.describe ProjectStatistics do
expect(statistics.storage_size).to eq 28
end
+ it 'excludes the container_registry_size' do
+ statistics.update!(
+ repository_size: 2,
+ uploads_size: 5,
+ container_registry_size: 10
+ )
+
+ statistics.reload
+
+ expect(statistics.storage_size).to eq 7
+ end
+
it 'works during wiki_size backfill' do
statistics.update!(
repository_size: 2,
diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb
index 5b11f9d828a..2ddbab7779e 100644
--- a/spec/models/project_team_spec.rb
+++ b/spec/models/project_team_spec.rb
@@ -410,6 +410,22 @@ RSpec.describe ProjectTeam do
end
end
+ describe '#purge_member_access_cache_for_user_id', :request_store do
+ let(:project) { create(:project) }
+ let(:user_id) { 1 }
+ let(:resource_data) { { user_id => 50, 42 => 50 } }
+
+ before do
+ Gitlab::SafeRequestStore[project.max_member_access_for_resource_key(User)] = resource_data
+ end
+
+ it 'removes cached max access for user from store' do
+ project.team.purge_member_access_cache_for_user_id(user_id)
+
+ expect(Gitlab::SafeRequestStore[project.max_member_access_for_resource_key(User)]).to eq({ 42 => 50 })
+ end
+ end
+
describe '#member?' do
let(:group) { create(:group) }
let(:developer) { create(:user) }
diff --git a/spec/models/projects/topic_spec.rb b/spec/models/projects/topic_spec.rb
index 8fc4d11f0d9..fc9d9bef437 100644
--- a/spec/models/projects/topic_spec.rb
+++ b/spec/models/projects/topic_spec.rb
@@ -25,6 +25,8 @@ RSpec.describe Projects::Topic do
it { is_expected.to validate_uniqueness_of(:name).case_insensitive }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
it { is_expected.to validate_length_of(:description).is_at_most(1024) }
+ it { expect(Projects::Topic.new).to validate_presence_of(:title) }
+ it { expect(Projects::Topic.new).to validate_length_of(:title).is_at_most(255) }
end
describe 'scopes' do
@@ -104,4 +106,16 @@ RSpec.describe Projects::Topic do
end
end
end
+
+ describe '#title_or_name' do
+ it 'returns title if set' do
+ topic.title = 'My title'
+ expect(topic.title_or_name).to eq('My title')
+ end
+
+ it 'returns name if title not set' do
+ topic.title = nil
+ expect(topic.title_or_name).to eq('topic')
+ end
+ end
end
diff --git a/spec/models/protected_branch/push_access_level_spec.rb b/spec/models/protected_branch/push_access_level_spec.rb
index 13d33b95b16..008ae6275f0 100644
--- a/spec/models/protected_branch/push_access_level_spec.rb
+++ b/spec/models/protected_branch/push_access_level_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe ProtectedBranch::PushAccessLevel do
it 'checks that a deploy key is enabled for the same project as the protected branch\'s' do
level = build(:protected_branch_push_access_level, deploy_key: create(:deploy_key))
- expect { level.save! }.to raise_error
+ expect { level.save! }.to raise_error(ActiveRecord::RecordInvalid)
expect(level.errors.full_messages).to contain_exactly('Deploy key is not enabled for this project')
end
end
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index f7c723cd134..366de809bed 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -324,4 +324,10 @@ RSpec.describe ProtectedBranch do
.to match_array([branch_id])
end
end
+
+ describe '.downcase_humanized_name' do
+ it 'returns downcase humanized name' do
+ expect(described_class.downcase_humanized_name).to eq 'protected branch'
+ end
+ end
end
diff --git a/spec/models/raw_usage_data_spec.rb b/spec/models/raw_usage_data_spec.rb
index 6ff4c6eb19b..95b98279a27 100644
--- a/spec/models/raw_usage_data_spec.rb
+++ b/spec/models/raw_usage_data_spec.rb
@@ -3,6 +3,31 @@
require 'spec_helper'
RSpec.describe RawUsageData do
+ context 'scopes' do
+ describe '.for_current_reporting_cycle' do
+ subject(:recent_service_ping_reports) { described_class.for_current_reporting_cycle }
+
+ before_all do
+ create(:raw_usage_data, created_at: (described_class::REPORTING_CADENCE + 1.day).ago)
+ end
+
+ it 'returns nil where no records match filter criteria' do
+ expect(recent_service_ping_reports).to be_empty
+ end
+
+ context 'with records matching filtering criteria' do
+ let_it_be(:fresh_record) { create(:raw_usage_data) }
+ let_it_be(:record_at_edge_of_time_range) do
+ create(:raw_usage_data, created_at: described_class::REPORTING_CADENCE.ago)
+ end
+
+ it 'return records within reporting cycle time range ordered by creation time' do
+ expect(recent_service_ping_reports).to eq [fresh_record, record_at_edge_of_time_range]
+ end
+ end
+ end
+ end
+
describe 'validations' do
it { is_expected.to validate_presence_of(:payload) }
it { is_expected.to validate_presence_of(:recorded_at) }
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 125fec61d72..4ae1927dcca 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -53,7 +53,10 @@ RSpec.describe Release do
context 'when a release is tied to a milestone for another project' do
it 'creates a validation error' do
milestone = build(:milestone, project: create(:project))
- expect { release.milestones << milestone }.to raise_error
+
+ expect { release.milestones << milestone }
+ .to raise_error(ActiveRecord::RecordInvalid,
+ 'Validation failed: Release does not have the same project as the milestone')
end
end
diff --git a/spec/models/shard_spec.rb b/spec/models/shard_spec.rb
index 38729fa1758..298441fb4c4 100644
--- a/spec/models/shard_spec.rb
+++ b/spec/models/shard_spec.rb
@@ -38,12 +38,12 @@ RSpec.describe Shard do
expect(described_class)
.to receive(:find_by)
- .with(name: 'new_shard')
+ .with({ name: 'new_shard' })
.and_return(nil, shard_created_by_others)
expect(described_class)
.to receive(:create)
- .with(name: 'new_shard')
+ .with({ name: 'new_shard' })
.and_raise(ActiveRecord::RecordNotUnique, 'fail')
.once
diff --git a/spec/models/system_note_metadata_spec.rb b/spec/models/system_note_metadata_spec.rb
index 144c65d2f62..36bcb3499b3 100644
--- a/spec/models/system_note_metadata_spec.rb
+++ b/spec/models/system_note_metadata_spec.rb
@@ -19,12 +19,14 @@ RSpec.describe SystemNoteMetadata do
it { is_expected.to be_invalid }
end
- context 'when action type is valid' do
- subject do
- build(:system_note_metadata, note: build(:note), action: 'merge')
- end
+ %i[merge timeline_event].each do |action|
+ context 'when action type is valid' do
+ subject do
+ build(:system_note_metadata, note: build(:note), action: action)
+ end
- it { is_expected.to be_valid }
+ it { is_expected.to be_valid }
+ end
end
context 'when importing' do
diff --git a/spec/models/user_custom_attribute_spec.rb b/spec/models/user_custom_attribute_spec.rb
index 1a51ad662b0..67c144d7caa 100644
--- a/spec/models/user_custom_attribute_spec.rb
+++ b/spec/models/user_custom_attribute_spec.rb
@@ -15,4 +15,61 @@ RSpec.describe UserCustomAttribute do
it { is_expected.to validate_presence_of(:value) }
it { is_expected.to validate_uniqueness_of(:key).scoped_to(:user_id) }
end
+
+ describe 'scopes' do
+ let(:user) { create(:user) }
+ let(:blocked_at) { DateTime.now }
+ let(:custom_attribute) { create(:user_custom_attribute, key: 'blocked_at', value: blocked_at, user_id: user.id) }
+
+ describe '.by_user_id' do
+ subject { UserCustomAttribute.by_user_id(user.id) }
+
+ it { is_expected.to match_array([custom_attribute]) }
+ end
+
+ describe '.by_updated_at' do
+ subject { UserCustomAttribute.by_updated_at(Date.today.all_day) }
+
+ it { is_expected.to match_array([custom_attribute]) }
+ end
+
+ describe '.by_key' do
+ subject { UserCustomAttribute.by_key('blocked_at') }
+
+ it { is_expected.to match_array([custom_attribute]) }
+ end
+ end
+
+ describe '#upsert_custom_attributes' do
+ subject { UserCustomAttribute.upsert_custom_attributes(custom_attributes) }
+
+ let_it_be_with_reload(:user) { create(:user) }
+
+ let(:arkose_session) { '22612c147bb418c8.2570749403' }
+ let(:risk_band) { 'Low' }
+ let(:global_score) { '0' }
+ let(:custom_score) { '0' }
+
+ let(:custom_attributes) do
+ custom_attributes = []
+ custom_attributes.push({ key: 'arkose_session', value: arkose_session })
+ custom_attributes.push({ key: 'arkose_risk_band', value: risk_band })
+ custom_attributes.push({ key: 'arkose_global_score', value: global_score })
+ custom_attributes.push({ key: 'arkose_custom_score', value: custom_score })
+
+ custom_attributes.map! { |custom_attribute| custom_attribute.merge({ user_id: user.id }) }
+ custom_attributes
+ end
+
+ it 'adds arkose data to custom attributes' do
+ subject
+
+ expect(user.custom_attributes.count).to eq(4)
+
+ expect(user.custom_attributes.find_by(key: 'arkose_session').value).to eq(arkose_session)
+ expect(user.custom_attributes.find_by(key: 'arkose_risk_band').value).to eq(risk_band)
+ expect(user.custom_attributes.find_by(key: 'arkose_global_score').value).to eq(global_score)
+ expect(user.custom_attributes.find_by(key: 'arkose_custom_score').value).to eq(custom_score)
+ end
+ end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index bc425b15c6e..71171f98492 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -1048,8 +1048,8 @@ RSpec.describe User do
context 'SSH key expiration scopes' do
let_it_be(:user1) { create(:user) }
let_it_be(:user2) { create(:user) }
- let_it_be(:expired_today_not_notified) { create(:key, expires_at: Time.current, user: user1) }
- let_it_be(:expired_today_already_notified) { create(:key, expires_at: Time.current, user: user2, expiry_notification_delivered_at: Time.current) }
+ let_it_be(:expired_today_not_notified) { create(:key, :expired_today, user: user1) }
+ let_it_be(:expired_today_already_notified) { create(:key, :expired_today, user: user2, expiry_notification_delivered_at: Time.current) }
let_it_be(:expiring_soon_not_notified) { create(:key, expires_at: 2.days.from_now, user: user2) }
let_it_be(:expiring_soon_notified) { create(:key, expires_at: 2.days.from_now, user: user1, before_expiry_notification_delivered_at: Time.current) }
@@ -5006,9 +5006,13 @@ RSpec.describe User do
let(:archived_project) { create(:project, :public, :archived) }
before do
- create(:merge_request, source_project: project, author: user, reviewers: [user])
- create(:merge_request, :closed, source_project: project, author: user, reviewers: [user])
- create(:merge_request, source_project: archived_project, author: user, reviewers: [user])
+ mr1 = create(:merge_request, source_project: project, author: user, reviewers: [user])
+ mr2 = create(:merge_request, :closed, source_project: project, author: user, reviewers: [user])
+ mr3 = create(:merge_request, source_project: archived_project, author: user, reviewers: [user])
+
+ mr1.find_reviewer(user).update!(state: :attention_requested)
+ mr2.find_reviewer(user).update!(state: :attention_requested)
+ mr3.find_reviewer(user).update!(state: :attention_requested)
end
it 'returns number of open merge requests from non-archived projects' do
@@ -5335,7 +5339,7 @@ RSpec.describe User do
let(:deleted_by) { create(:user) }
it 'blocks the user then schedules them for deletion if a hard delete is specified' do
- expect(DeleteUserWorker).to receive(:perform_async).with(deleted_by.id, user.id, hard_delete: true)
+ expect(DeleteUserWorker).to receive(:perform_async).with(deleted_by.id, user.id, { hard_delete: true })
user.delete_async(deleted_by: deleted_by, params: { hard_delete: true })
@@ -6817,4 +6821,23 @@ RSpec.describe User do
it_behaves_like 'it has loose foreign keys' do
let(:factory_name) { :user }
end
+
+ describe 'mr_attention_requests_enabled?' do
+ let(:user) { create(:user) }
+
+ before do
+ stub_feature_flags(mr_attention_requests: false)
+ end
+
+ it { expect(user.mr_attention_requests_enabled?).to be(false) }
+
+ it 'feature flag is enabled for user' do
+ stub_feature_flags(mr_attention_requests: user)
+
+ another_user = create(:user)
+
+ expect(user.mr_attention_requests_enabled?).to be(true)
+ expect(another_user.mr_attention_requests_enabled?).to be(false)
+ end
+ end
end
diff --git a/spec/models/users/in_product_marketing_email_spec.rb b/spec/models/users/in_product_marketing_email_spec.rb
index ca03c3e645d..7796b54babc 100644
--- a/spec/models/users/in_product_marketing_email_spec.rb
+++ b/spec/models/users/in_product_marketing_email_spec.rb
@@ -14,9 +14,35 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
subject { build(:in_product_marketing_email) }
it { is_expected.to validate_presence_of(:user) }
- it { is_expected.to validate_presence_of(:track) }
- it { is_expected.to validate_presence_of(:series) }
- it { is_expected.to validate_uniqueness_of(:user_id).scoped_to([:track, :series]).with_message('has already been sent') }
+
+ context 'for a track+series email' do
+ it { is_expected.to validate_presence_of(:track) }
+ it { is_expected.to validate_presence_of(:series) }
+ it {
+ is_expected.to validate_uniqueness_of(:user_id)
+ .scoped_to([:track, :series]).with_message('track series email has already been sent')
+ }
+ end
+
+ context 'for a campaign email' do
+ subject { build(:in_product_marketing_email, :campaign) }
+
+ it { is_expected.to validate_presence_of(:campaign) }
+ it { is_expected.not_to validate_presence_of(:track) }
+ it { is_expected.not_to validate_presence_of(:series) }
+ it {
+ is_expected.to validate_uniqueness_of(:user_id)
+ .scoped_to(:campaign).with_message('campaign email has already been sent')
+ }
+ it { is_expected.to validate_inclusion_of(:campaign).in_array(described_class::CAMPAIGNS) }
+ end
+
+ context 'when mixing campaign and track+series' do
+ it 'is not valid' do
+ expect(build(:in_product_marketing_email, :campaign, track: :create)).not_to be_valid
+ expect(build(:in_product_marketing_email, :campaign, series: 0)).not_to be_valid
+ end
+ end
end
describe '.without_track_and_series' do
@@ -58,6 +84,27 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
end
end
+ describe '.without_campaign' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+
+ let(:campaign) { Users::InProductMarketingEmail::BUILD_IOS_APP_GUIDE }
+
+ subject(:without_campaign) { User.merge(described_class.without_campaign(campaign)) }
+
+ context 'when record for campaign already exists' do
+ before do
+ create(:in_product_marketing_email, :campaign, campaign: campaign, user: user)
+ end
+
+ it { is_expected.to match_array [other_user] }
+ end
+
+ context 'when record for campaign does not exist' do
+ it { is_expected.to match_array [user, other_user] }
+ end
+ end
+
describe '.for_user_with_track_and_series' do
let_it_be(:user) { create(:user) }
let_it_be(:in_product_marketing_email) { create(:in_product_marketing_email, series: 0, track: 0, user: user) }
diff --git a/spec/models/users/merge_request_interaction_spec.rb b/spec/models/users/merge_request_interaction_spec.rb
index 12c7fa43a60..a499a7c68e8 100644
--- a/spec/models/users/merge_request_interaction_spec.rb
+++ b/spec/models/users/merge_request_interaction_spec.rb
@@ -59,6 +59,7 @@ RSpec.describe ::Users::MergeRequestInteraction do
context 'when the user has been asked to review the MR' do
before do
merge_request.reviewers << user
+ merge_request.find_reviewer(user).update!(state: :attention_requested)
end
it { is_expected.to eq(Types::MergeRequestReviewStateEnum.values['ATTENTION_REQUESTED'].value) }